diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..fe1152b --- /dev/null +++ b/.dockerignore @@ -0,0 +1,30 @@ +**/.classpath +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/azds.yaml +**/bin +**/charts +**/docker-compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +LICENSE +README.md +!**/.gitignore +!.git/HEAD +!.git/config +!.git/packed-refs +!.git/refs/heads/** \ No newline at end of file diff --git a/.github/workflows/dotnet.yml b/.github/workflows/dotnet.yml new file mode 100644 index 0000000..dcddade --- /dev/null +++ b/.github/workflows/dotnet.yml @@ -0,0 +1,31 @@ +# This workflow will build a .NET project +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-net + +name: .NET + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Setup .NET + uses: actions/setup-dotnet@v4 + with: + dotnet-version: 8.0.x + - name: Restore dependencies + run: dotnet restore + working-directory: ./src + - name: Build + run: dotnet build --no-restore + working-directory: ./src + - name: Test + run: dotnet test --no-build --verbosity normal + working-directory: ./src diff --git a/.gitignore b/.gitignore index a4fe18b..44ce8a3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,402 @@ -## Ignore Visual Studio temporary files, build results, and -## files generated by popular Visual Studio add-ons. +dist/ +.vscode/## +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +# but not Directory.Build.rsp, as it configures directory-level build defaults +!Directory.Build.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files +*.ncb +*.aps + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml ## ## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore diff --git a/src/.gitignore b/src/.gitignore new file mode 100644 index 0000000..a4fe18b --- /dev/null +++ b/src/.gitignore @@ -0,0 +1,400 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +# but not Directory.Build.rsp, as it configures directory-level build defaults +!Directory.Build.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files +*.ncb +*.aps + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml diff --git a/src/Common.Properties.xml b/src/Common.Properties.xml new file mode 100644 index 0000000..e382396 --- /dev/null +++ b/src/Common.Properties.xml @@ -0,0 +1,22 @@ + + + + net8.0 + enable + Sa + abt + dundich + Copyright © 2024 + See https://github.com/dundich/Sa/releases + https://github.com/dundich/Sa + https://github.com/dundich/Sa + Apache-2.0 + true + true + enable + + + + + + \ No newline at end of file diff --git a/src/Sa.Data.Cache/Sa.Data.Cache.csproj b/src/Sa.Data.Cache/Sa.Data.Cache.csproj new file mode 100644 index 0000000..90fa666 --- /dev/null +++ b/src/Sa.Data.Cache/Sa.Data.Cache.csproj @@ -0,0 +1,13 @@ + + + + net8.0 + enable + enable + + + + + + + diff --git a/src/Sa.Data.Cache/Setup.cs b/src/Sa.Data.Cache/Setup.cs new file mode 100644 index 0000000..f740dd6 --- /dev/null +++ b/src/Sa.Data.Cache/Setup.cs @@ -0,0 +1,32 @@ +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.DependencyInjection; +using ZiggyCreatures.Caching.Fusion; + +namespace Sa.Data.Cache; + + +public static class Setup +{ + public static IServiceCollection AddFusionCacheEx(this IServiceCollection services, string cacheName, Action? configure = null) + { + services.AddFusionCacheSystemTextJsonSerializer(); + + // https://github.com/ZiggyCreatures/FusionCache + services + .AddFusionCache(cacheName) + .WithPostSetup((sp, c) => + { + FusionCacheEntryOptions ops = c.DefaultEntryOptions; + + ops.Duration = TimeSpan.FromMinutes(2); + ops.FactorySoftTimeout = TimeSpan.FromMilliseconds(100); + ops.FailSafeMaxDuration = TimeSpan.FromHours(2); + ops.FailSafeThrottleDuration = TimeSpan.FromSeconds(30); + ops.Priority = CacheItemPriority.Low; + configure?.Invoke(sp, ops); + }) + .WithoutLogger(); + + return services; + } +} \ No newline at end of file diff --git a/src/Sa.Data.PostgreSql.Migration/ITodo.cs b/src/Sa.Data.PostgreSql.Migration/ITodo.cs new file mode 100644 index 0000000..2064e3c --- /dev/null +++ b/src/Sa.Data.PostgreSql.Migration/ITodo.cs @@ -0,0 +1,7 @@ +namespace Sa.Data.PostgreSql.Migration +{ + public interface ITodo + { + // todos + } +} diff --git a/src/Sa.Data.PostgreSql.Migration/Sa.Data.PostgreSql.Migration.csproj b/src/Sa.Data.PostgreSql.Migration/Sa.Data.PostgreSql.Migration.csproj new file mode 100644 index 0000000..660d21e --- /dev/null +++ b/src/Sa.Data.PostgreSql.Migration/Sa.Data.PostgreSql.Migration.csproj @@ -0,0 +1,13 @@ + + + + net8.0 + enable + enable + + + + + + + diff --git a/src/Sa.Data.PostgreSql/Configuration/IPgDataSourceSettingsBuilder.cs b/src/Sa.Data.PostgreSql/Configuration/IPgDataSourceSettingsBuilder.cs new file mode 100644 index 0000000..1c8c3ff --- /dev/null +++ b/src/Sa.Data.PostgreSql/Configuration/IPgDataSourceSettingsBuilder.cs @@ -0,0 +1,10 @@ + +namespace Sa.Data.PostgreSql; + +public interface IPgDataSourceSettingsBuilder +{ + void WithConnectionString(string connectionString); + void WithConnectionString(Func implementationFactory); + void WithSettings(PgDataSourceSettings settings); + void WithSettings(Func implementationFactory); +} \ No newline at end of file diff --git a/src/Sa.Data.PostgreSql/Configuration/PgDataSourceSettings.cs b/src/Sa.Data.PostgreSql/Configuration/PgDataSourceSettings.cs new file mode 100644 index 0000000..29c4800 --- /dev/null +++ b/src/Sa.Data.PostgreSql/Configuration/PgDataSourceSettings.cs @@ -0,0 +1,6 @@ +namespace Sa.Data.PostgreSql; + +public class PgDataSourceSettings(string connectionString) +{ + public string ConnectionString { get; } = connectionString; +} diff --git a/src/Sa.Data.PostgreSql/Configuration/PgDataSourceSettingsBuilder.cs b/src/Sa.Data.PostgreSql/Configuration/PgDataSourceSettingsBuilder.cs new file mode 100644 index 0000000..1e40a96 --- /dev/null +++ b/src/Sa.Data.PostgreSql/Configuration/PgDataSourceSettingsBuilder.cs @@ -0,0 +1,28 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace Sa.Data.PostgreSql.Configuration; + +internal class PgDataSourceSettingsBuilder(IServiceCollection services) : IPgDataSourceSettingsBuilder +{ + + public void WithConnectionString(string connectionString) + { + services.TryAddSingleton(new PgDataSourceSettings(connectionString)); + } + + public void WithConnectionString(Func implementationFactory) + { + services.TryAddSingleton(sp => new PgDataSourceSettings(implementationFactory(sp))); + } + + public void WithSettings(Func implementationFactory) + { + services.TryAddSingleton(implementationFactory); + } + + public void WithSettings(PgDataSourceSettings settings) + { + services.TryAddSingleton(settings); + } +} diff --git a/src/Sa.Data.PostgreSql/GlobalSuppressions.cs b/src/Sa.Data.PostgreSql/GlobalSuppressions.cs new file mode 100644 index 0000000..9f39147 --- /dev/null +++ b/src/Sa.Data.PostgreSql/GlobalSuppressions.cs @@ -0,0 +1,8 @@ +// This file is used by Code Analysis to maintain SuppressMessage +// attributes that are applied to this project. +// Project-level suppressions either have no target or are given +// a specific target and scoped to a namespace, type, member, etc. + +using System.Diagnostics.CodeAnalysis; + +[assembly: SuppressMessage("Style", "IDE0130:Namespace does not match folder structure", Justification = "", Scope = "namespace", Target = "~N:Sa.Data.PostgreSql")] diff --git a/src/Sa.Data.PostgreSql/IPgDataSource.cs b/src/Sa.Data.PostgreSql/IPgDataSource.cs new file mode 100644 index 0000000..0f504f7 --- /dev/null +++ b/src/Sa.Data.PostgreSql/IPgDataSource.cs @@ -0,0 +1,83 @@ +using Npgsql; + +namespace Sa.Data.PostgreSql; + +public interface IPgDataSource +{ + public static IPgDataSource Create(string connectionString) => new PgDataSource(new PgDataSourceSettings(connectionString)); + + + // ExecuteNonQuery + + Task ExecuteNonQuery(string sql, NpgsqlParameter[] parameters, CancellationToken cancellationToken = default); + + async Task ExecuteNonQuery(string sql, CancellationToken cancellationToken = default) + => await ExecuteNonQuery(sql, [], cancellationToken); + + + // ExecuteReader + + Task ExecuteReader(string sql, Action read, NpgsqlParameter[] parameters, CancellationToken cancellationToken = default); + + async Task ExecuteReader(string sql, Action read, CancellationToken cancellationToken = default) + => await ExecuteReader(sql, read, [], cancellationToken); + + + // ExecuteReaderList + + + async Task> ExecuteReaderList(string sql, Func read, CancellationToken cancellationToken = default) + { + List list = []; + await ExecuteReader(sql, (reader, _) => list.Add(read(reader)), cancellationToken); + return list; + } + + async Task> ExecuteReaderList(string sql, Func read, NpgsqlParameter[] parameters, CancellationToken cancellationToken = default) + { + List list = []; + await ExecuteReader(sql, (reader, _) => list.Add(read(reader)), parameters, cancellationToken); + return list; + } + + + // ExecuteReaderFirst + + + Task ExecuteReaderFirst(string sql, CancellationToken cancellationToken = default) + { + return ExecuteReaderFirst(sql, [], cancellationToken); + } + + async Task ExecuteReaderFirst(string sql, NpgsqlParameter[] parameters, CancellationToken cancellationToken = default) + { + T value = default!; + + await ExecuteReader(sql, (reader, _) => + { + value = Type.GetTypeCode(typeof(T)) switch + { + TypeCode.Char => (T)(object)reader.GetChar(0), + TypeCode.Int64 => (T)(object)reader.GetInt64(0), + TypeCode.Int32 => (T)(object)reader.GetInt32(0), + TypeCode.String => (T)(object)reader.GetString(0), + TypeCode.Boolean => (T)(object)reader.GetBoolean(0), + TypeCode.Double => (T)(object)reader.GetDouble(0), + TypeCode.DateTime => (T)(object)reader.GetDateTime(0), + TypeCode.Decimal => (T)(object)reader.GetDecimal(0), + TypeCode.DBNull => value, + _ => throw new InvalidOperationException($"Unsupported type: {typeof(T)}"), + }; + } + , parameters + , cancellationToken); + + return value; + } + + + // BeginBinaryImport + + + ValueTask BeginBinaryImport(string sql, Func> write, CancellationToken cancellationToken = default); +} diff --git a/src/Sa.Data.PostgreSql/IPgDistributedLock.cs b/src/Sa.Data.PostgreSql/IPgDistributedLock.cs new file mode 100644 index 0000000..5ebf898 --- /dev/null +++ b/src/Sa.Data.PostgreSql/IPgDistributedLock.cs @@ -0,0 +1,6 @@ +namespace Sa.Data.PostgreSql; + +public interface IPgDistributedLock +{ + Task TryExecuteInDistributedLock(long lockId, Func exclusiveLockTask, CancellationToken cancellationToken); +} diff --git a/src/Sa.Data.PostgreSql/PgDataSource.cs b/src/Sa.Data.PostgreSql/PgDataSource.cs new file mode 100644 index 0000000..d175bf5 --- /dev/null +++ b/src/Sa.Data.PostgreSql/PgDataSource.cs @@ -0,0 +1,71 @@ +using Npgsql; + +namespace Sa.Data.PostgreSql; + +/// +/// NpgsqlDataSource lite +/// +/// connection string +internal sealed class PgDataSource(PgDataSourceSettings settings) : IPgDataSource, IDisposable, IAsyncDisposable +{ + private readonly Lazy _dataSource = new(() => NpgsqlDataSource.Create(settings.ConnectionString)); + + public ValueTask OpenDbConnection(CancellationToken cancellationToken) => _dataSource.Value.OpenConnectionAsync(cancellationToken); + + public void Dispose() + { + if (_dataSource.IsValueCreated) + { + _dataSource.Value.Dispose(); + } + } + + public async ValueTask DisposeAsync() + { + if (_dataSource.IsValueCreated) + { + await _dataSource.Value.DisposeAsync(); + } + } + + public async ValueTask BeginBinaryImport(string sql, Func> write, CancellationToken cancellationToken = default) + { + using NpgsqlConnection db = await OpenDbConnection(cancellationToken); + using NpgsqlBinaryImporter writer = await db.BeginBinaryImportAsync(sql, cancellationToken); + ulong result = await write(writer, cancellationToken); + return result; + } + + public async Task ExecuteNonQuery(string sql, NpgsqlParameter[] parameters, CancellationToken cancellationToken = default) + { + using NpgsqlConnection connection = await OpenDbConnection(cancellationToken); + using NpgsqlCommand cmd = new(sql, connection); + AddParameters(cmd, parameters); + return await cmd.ExecuteNonQueryAsync(cancellationToken); + } + + public async Task ExecuteReader(string sql, Action read, NpgsqlParameter[] parameters, CancellationToken cancellationToken = default) + { + int rowCount = 0; + + using NpgsqlConnection connection = await OpenDbConnection(cancellationToken); + using NpgsqlCommand cmd = new(sql, connection); + AddParameters(cmd, parameters); + using NpgsqlDataReader reader = await cmd.ExecuteReaderAsync(cancellationToken); + while (await reader.ReadAsync(cancellationToken) && !cancellationToken.IsCancellationRequested) + { + read(reader, rowCount); + rowCount++; + } + return rowCount; + } + + + static void AddParameters(NpgsqlCommand cmd, NpgsqlParameter[] parameters) + { + if (parameters != null && parameters.Length > 0) + { + cmd.Parameters.AddRange(parameters); + } + } +} diff --git a/src/Sa.Data.PostgreSql/PgDistributedLock.cs b/src/Sa.Data.PostgreSql/PgDistributedLock.cs new file mode 100644 index 0000000..f22b1b5 --- /dev/null +++ b/src/Sa.Data.PostgreSql/PgDistributedLock.cs @@ -0,0 +1,63 @@ +using Microsoft.Extensions.Logging; +using Npgsql; + +namespace Sa.Data.PostgreSql; + +/// +/// +/// +/// +internal sealed class PgDistributedLock(PgDataSourceSettings settings, ILogger? logger = null) : IPgDistributedLock +{ + private readonly NpgsqlConnectionStringBuilder builder = new(settings.ConnectionString); + + public async Task TryExecuteInDistributedLock(long lockId, Func exclusiveLockTask, CancellationToken cancellationToken) + { + logger?.LogInformation("Trying to acquire session lock for Lock Id {@LockId}", lockId); + + using var connection = new NpgsqlConnection(builder.ToString()); + await connection.OpenAsync(cancellationToken); + + bool hasLockedAcquired = await TryAcquireLockAsync(lockId, connection, cancellationToken); + + if (!hasLockedAcquired) + { + logger?.LogInformation("Lock {@LockId} rejected", lockId); + return false; + } + + logger?.LogInformation("Lock {@LockId} acquired", lockId); + try + { + if (await TryAcquireLockAsync(lockId, connection, cancellationToken)) + { + await exclusiveLockTask(cancellationToken); + } + } + finally + { + logger?.LogInformation("Releasing session lock for {@LockId}", lockId); + await ReleaseLock(lockId, connection, cancellationToken); + } + return true; + } + + private static async Task TryAcquireLockAsync(long lockId, NpgsqlConnection connection, CancellationToken cancellationToken) + { + string sessionLockCommand = $"SELECT pg_try_advisory_lock({lockId})"; + using var commandQuery = new NpgsqlCommand(sessionLockCommand, connection); + object? result = await commandQuery.ExecuteScalarAsync(cancellationToken); + if (result != null && bool.TryParse(result.ToString(), out var lockAcquired) && lockAcquired) + { + return true; + } + return false; + } + + private static async Task ReleaseLock(long lockId, NpgsqlConnection connection, CancellationToken cancellationToke) + { + string transactionLockCommand = $"SELECT pg_advisory_unlock({lockId})"; + using var commandQuery = new NpgsqlCommand(transactionLockCommand, connection); + await commandQuery.ExecuteScalarAsync(cancellationToke); + } +} \ No newline at end of file diff --git a/src/Sa.Data.PostgreSql/Readme.md b/src/Sa.Data.PostgreSql/Readme.md new file mode 100644 index 0000000..5846932 --- /dev/null +++ b/src/Sa.Data.PostgreSql/Readme.md @@ -0,0 +1,87 @@ +# IPgDataSource + +Предоставляет облегченный (минимальный) вариант абстракции для работы с базой данных PostgreSQL в .NET-приложениях. + +## ExecuteNonQuery + +Выполняет SQL-запрос, который не возвращает данные (например, INSERT, UPDATE, DELETE), и возвращает количество затронутых строк. + +```csharp +var dataSource = new PgDataSource(new PgDataSourceSettings("YourConnectionString")); +int affectedRows = await dataSource.ExecuteNonQuery("SELECT 2"); +Console.WriteLine($"Affected Rows: {affectedRows}"); + +var parameters = new[] +{ + new NpgsqlParameter("p1", "Tom"), + new NpgsqlParameter("p2", 18) +}; + +int affectedRows = await dataSource.ExecuteNonQuery(""" + CREATE TABLE IF NOT EXISTS users ( + name text, + age int + ); + + INSERT INTO users (name, age) VALUES (@p1, @p2); + """, parameters); + +Console.WriteLine($"Affected Rows: {affectedRows}"); +``` + +## ExecuteReader + +Чтение данных + +```csharp +int actual = 0; +await dataSource.ExecuteReader("SELECT 1", (reader, i) => actual = reader.GetInt32(0)); +Console.WriteLine($"Value from Database: {actual}"); + +// get first value +int errCount = await fixture.DataSource.ExecuteReaderFirst("select count(error_id) from outbox__$error"); + +``` + +## BeginBinaryImport + +Бинарный импорт + +```csharp +public async ValueTask BulkWrite(ReadOnlyMemory> messages CancellationToken cancellationToken){ + // Начинаем бинарный импорт + ulong result = await dataSource.BeginBinaryImport(sqlTemplate, async (writer, t) => + { + // Записываем строки в импорт + WriteRows(writer, typeCode, messages); + return await writer.CompleteAsync(t); + }, cancellationToken); + + return result; +} + +private void WriteRows(NpgsqlBinaryImporter writer, ReadOnlyMemory> messages) +{ + foreach (OutboxMessage message in messages.Span) + { + // Генерируем уникальный идентификатор для сообщения + string id = idGenerator.GenId(message.PartInfo.CreatedAt); + + // Начинаем новую строку для записи + writer.StartRow(); + + // Записываем данные в строку + writer.Write(id, NpgsqlDbType.Char); // id + writer.Write(message.PartInfo.TenantId, NpgsqlDbType.Integer); // tenant + writer.Write(message.PartInfo.Part, NpgsqlDbType.Text); // part + + // Сериализуем и записываем полезную нагрузку + using RecyclableMemoryStream stream = streamManager.GetStream(); + serializer.Serialize(stream, message.Payload); + stream.Position = 0; + writer.Write(stream, NpgsqlDbType.Bytea); // payload + writer.Write(stream.Length, NpgsqlDbType.Integer); // payload_size + writer.Write(message.PartInfo.CreatedAt.ToUnixTimeSeconds(), NpgsqlDbType.Bigint); // created_at + } +} +``` \ No newline at end of file diff --git a/src/Sa.Data.PostgreSql/Sa.Data.PostgreSql.csproj b/src/Sa.Data.PostgreSql/Sa.Data.PostgreSql.csproj new file mode 100644 index 0000000..f43529b --- /dev/null +++ b/src/Sa.Data.PostgreSql/Sa.Data.PostgreSql.csproj @@ -0,0 +1,13 @@ + + + + net8.0 + enable + enable + + + + + + + diff --git a/src/Sa.Data.PostgreSql/Setup.cs b/src/Sa.Data.PostgreSql/Setup.cs new file mode 100644 index 0000000..9a122a0 --- /dev/null +++ b/src/Sa.Data.PostgreSql/Setup.cs @@ -0,0 +1,17 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Data.PostgreSql.Configuration; + +namespace Sa.Data.PostgreSql; + +public static class Setup +{ + public static IServiceCollection AddPgDataSource(this IServiceCollection services, Action? configure = null) + { + PgDataSourceSettingsBuilder builder = new(services); + configure?.Invoke(builder); + services.TryAddSingleton(); + services.TryAddSingleton(); + return services; + } +} diff --git a/src/Sa.Media/Sa.Media.csproj b/src/Sa.Media/Sa.Media.csproj new file mode 100644 index 0000000..fa71b7a --- /dev/null +++ b/src/Sa.Media/Sa.Media.csproj @@ -0,0 +1,9 @@ + + + + net8.0 + enable + enable + + + diff --git a/src/Sa.Media/Wav/WavFile.cs b/src/Sa.Media/Wav/WavFile.cs new file mode 100644 index 0000000..a333d13 --- /dev/null +++ b/src/Sa.Media/Wav/WavFile.cs @@ -0,0 +1,404 @@ +using System.ComponentModel.DataAnnotations; +using System.Diagnostics.CodeAnalysis; + +namespace Sa.Media.Wav; + +/// +/// Структура, описывающая заголовок WAV файла. +/// +/// +/// +/// + +public sealed class WavFile : IDisposable +{ + static class Env + { + public const UInt32 ChunkId = 0x46464952; + public const UInt32 WaveFormat = 0x45564157; + public const UInt16 WaveFormatPcm = 0x0001; + public const UInt32 Subchunk1IdJunk = 0x4B4E554A; + } + + private BinaryReader? _reader; + + + /// + /// Содержит символы "RIFF" в ASCII кодировке + /// + public UInt32 ChunkId { get; private set; } + + /// + /// Это оставшийся размер цепочки, начиная с этой позиции. + /// Иначе говоря, это размер файла - 8, то есть, исключены поля chunkId и chunkSize. + /// + public UInt32 ChunkSize { get; private set; } + + /// + /// Содержит символы "WAVE" + /// + public UInt32 Format { get; private set; } + + /// + /// Содержит символы "fmt " + /// + public UInt32 Subchunk1Id { get; private set; } + + /// + /// 16 для формата PCM. (or 18) + /// Это оставшийся размер подцепочки, начиная с этой позиции. + /// + public UInt32 Subchunk1Size { get; private set; } + + /// + /// Аудио формат + /// Для PCM = 1 (то есть, Линейное квантование). + /// Значения, отличающиеся от 1, обозначают некоторый формат сжатия. + /// + /// + public UInt16 AudioFormat { get; private set; } + + /// + /// Количество каналов. Моно = 1, Стерео = 2 и т.д. + /// + public UInt16 NumChannels { get; private set; } + + /// + /// Частота дискретизации. 8000 Гц, 44100 Гц и т.д. + /// + public UInt32 SampleRate { get; private set; } + + /// + /// sampleRate * numChannels * bitsPerSample/8 + /// + public UInt32 ByteRate { get; private set; } + + /// + /// numChannels * bitsPerSample/8 + /// Количество байт для одного сэмпла, включая все каналы. + /// + public UInt16 BlockAlign { get; private set; } + + /// + /// Так называемая "глубиная" или точность звучания. 8 бит, 16 бит и т.д. + /// + public UInt16 BitsPerSample { get; private set; } + + // Подцепочка "data" содержит аудио-данные и их размер. + + /// + /// Содержит символы "data" + /// + public UInt32 Subchunk2Id { get; private set; } + + /// + /// numSamples * numChannels * bitsPerSample/8 + /// Количество байт в области данных. + /// + public int Subchunk2Size { get; private set; } + + /// + /// Смещение к области данных + /// + public long DataOffset { get; private set; } + + /// + /// the number of samples per channel + /// + public int SamplesPerChannel { get; private set; } + + /// + /// Из файла + /// + public string? FileName { get; private set; } + + + public bool IsWave => IsLoaded() && ChunkId == Env.ChunkId + && Format == Env.WaveFormat; + + public bool IsPcmWave => IsWave + && (Subchunk1Size == 16 || Subchunk2Size == 18) + && AudioFormat == Env.WaveFormatPcm; + + public bool IsLoaded() => DataOffset > 0; + + + public WavFile ReadHeader(bool suppressErrors = true) + { + if (IsLoaded()) return this; + + BinaryReader reader = OpenReader(); + + // chunk 0 + ChunkId = reader.ReadUInt32(); + ChunkSize = reader.ReadUInt32(); + Format = reader.ReadUInt32(); + + // chunk 1 + Subchunk1Id = reader.ReadUInt32(); + + // chunk 1 + // Содержит символы "fmt " + // (0x666d7420 в big-endian представлении) + while (Subchunk1Id == Env.Subchunk1IdJunk) //JUNK + { + //skip JUNK chunks: https://www.daubnet.com/en/file-format-riff + UInt32 JunkSubchunk1Size = reader.ReadUInt32(); // bytes for this chunk + if (JunkSubchunk1Size % 2 == 1) + { + ++JunkSubchunk1Size; //When writing RIFFs, JUNK chunks should not have odd number as Size. + } + reader.ReadBytes((int)JunkSubchunk1Size); + Subchunk1Id = reader.ReadUInt32(); //read next subchunk + } + + + Subchunk1Size = reader.ReadUInt32(); // bytes for this chunk (expect 16 or 18) + + // 16 bytes coming... + AudioFormat = reader.ReadUInt16(); + NumChannels = reader.ReadUInt16(); + SampleRate = reader.ReadUInt32(); + ByteRate = reader.ReadUInt32(); + BlockAlign = reader.ReadUInt16(); + BitsPerSample = reader.ReadUInt16(); + + + if (Subchunk1Size == 18) + { + // Read any extra values + int fmtExtraSize = reader.ReadInt16(); + reader.ReadBytes(fmtExtraSize); + } + + // chunk 2 + + + while (true) + { + Subchunk2Id = reader.ReadUInt32(); + Subchunk2Size = reader.ReadInt32(); + + if (Subchunk2Id == 0x5453494c) + { + //just skip LIST subchunk + reader.ReadBytes(Subchunk2Size); + continue; + } + if (Subchunk2Id == 0x524c4c46) + { + //just skip FLLR subchunk https://stackoverflow.com/questions/6284651/avaudiorecorder-doesnt-write-out-proper-wav-file-header + reader.ReadBytes(Subchunk2Size); + continue; + } + + if (Subchunk2Id != 0x61746164) + { + if (suppressErrors) return this; + throw new NotImplementedException($"Bad Subchunk2Id: 0x{Subchunk2Id:x8}"); + } + break; + } + + if (Subchunk2Size == 0x7FFFFFFF) + { + //size does not set!! + //hack to support custom file length calculation + //this does not check if there are otehr subchunks after "data" in thefile + long sizeInBytesLong = (reader.BaseStream.Length - reader.BaseStream.Position); + if (sizeInBytesLong > Int32.MaxValue) + { + if (suppressErrors) return this; + throw new ArgumentNullException("Too long wave! " + sizeInBytesLong); + } + + Subchunk2Size = (int)sizeInBytesLong; + } + + // Calculate the number of samples per channel + SamplesPerChannel = Subchunk2Size / (BlockAlign * NumChannels); + + // save start data offset + DataOffset = reader.BaseStream.Position; + + return this; + } + + + public WavFile WithFileName(string filename) + { + if (filename != FileName) + { + Close(); + FileName = filename; + } + + return this; + } + + + public IEnumerable<(int channelId, byte[] sample)> ReadWave(float? cutFromSeconds = null, float? cutToSeconds = null) + { + ReadHeader(); + + BinaryReader reader = OpenReader(); + + // Calculate the byte offset for the start of the data + long dataOffset = DataOffset; + + // Calculate the byte offset for the end of the data + long dataEndOffset = dataOffset + Subchunk2Size; + + // Calculate the byte offset for the start of the cut + long cutFromOffset = dataOffset; + if (cutFromSeconds != null) + { + cutFromOffset += (long)(cutFromSeconds.Value * SampleRate * BlockAlign); + } + + // Calculate the byte offset for the end of the cut + long cutToOffset = dataEndOffset; + if (cutToSeconds != null) + { + cutToOffset = dataOffset + (long)(cutToSeconds.Value * SampleRate * BlockAlign); + } + + if (reader.BaseStream.CanSeek) + { + reader.BaseStream.Position = cutFromOffset; + } + + // Read samples from the current channel + for (long i = cutFromOffset; i < cutToOffset; i += BlockAlign) + { + for (int channelId = 0; channelId < NumChannels; channelId++) + { + // Read the sample from the stream + byte[] sample = reader.ReadBytes(BlockAlign / NumChannels); + yield return (channelId, sample); + } + } + } + + /// + /// Convert and return audio data in double format + /// + /// + /// + /// + public IEnumerable<(int channelId, double[] sample)> ReadDoubleWave(float? cutFromSeconds = null, float? cutToSeconds = null) + => ReadWave(cutFromSeconds, cutToSeconds) + .Select(c => (c.channelId, ConvertToDouble(BitsPerSample, c.sample))); + + + + /// + /// для распознавалок + /// + public IEnumerable<(int channelId, byte[] sample)> ReadDoubleWaveAsByte(float? cutFromSeconds = null, float? cutToSeconds = null) + => ReadDoubleWave(cutFromSeconds, cutToSeconds) + .Select(c => (c.channelId, ConvertToByte(c.sample))); + + public double GetLengthSeconds() + => IsLoaded() && SampleRate != 0 + ? SamplesPerChannel / SampleRate + : 0; + + public TimeSpan GetLength() => TimeSpan.FromSeconds(GetLengthSeconds()); + + public long WriteChannel([NotNull] string fileName, [Range(0, 10)] int indexChannel) + { + using FileStream fs = File.Open(fileName ?? throw new ArgumentNullException(nameof(fileName)), FileMode.OpenOrCreate); + return WriteChannel(fs, indexChannel); + } + + public long WriteChannel(FileStream fs, [Range(0, 10)] int indexChannel) + { + ReadHeader(); + + if (!IsLoaded()) throw new NotSupportedException(); + + if (indexChannel >= NumChannels || indexChannel < 0) throw new ArgumentOutOfRangeException(nameof(indexChannel)); + + using var writer = new BinaryWriter(fs); + writer.Write(ChunkId); + writer.Write(ChunkSize); + writer.Write(Format); + writer.Write(Subchunk1Id); + writer.Write(Subchunk1Size); + writer.Write(AudioFormat); + writer.Write((UInt16)1); //NumChannels + writer.Write(SampleRate); + writer.Write(ByteRate); + writer.Write((UInt16)(BlockAlign / NumChannels)); + writer.Write(BitsPerSample); + writer.Write(Subchunk2Id); + writer.Write(Subchunk2Size / NumChannels); + + + foreach (var (_, sample) in ReadWave().Where(c => c.channelId == indexChannel)) + { + writer.Write(sample); + } + + writer.Flush(); + return fs.Length; + } + + public void Close() + { + _reader?.Dispose(); + _reader = null; + DataOffset = 0; + } + + public void Dispose() => Close(); + + private BinaryReader OpenReader() + { + if (_reader == null) + { + FileStream fs = File.Open(FileName ?? throw new ArgumentException(nameof(FileName)), FileMode.Open); + _reader = new BinaryReader(fs); + } + else + { + _reader.BaseStream.Position = 0; + } + return _reader; + } + + private static byte[] ConvertToByte(double[] data) + { + short[] array = Array.ConvertAll(data, (double e) => (short)(e * 32767.0)); + byte[] array2 = new byte[array.Length * 2]; + Buffer.BlockCopy(array, 0, array2, 0, array2.Length); + return array2; + } + + private static double[] ConvertToDouble(ushort bitsPerSample, byte[] data) + { + int len = data.Length; + double[] sample; + switch (bitsPerSample) + { + case 64: + sample = new double[len / sizeof(double)]; + Buffer.BlockCopy(data, 0, sample, 0, len); + break; + case 32: + float[] asFloat = new float[len / sizeof(float)]; + Buffer.BlockCopy(data, 0, asFloat, 0, len); + sample = Array.ConvertAll(asFloat, e => (double)e); + break; + case 16: + Int16[] asInt16 = new Int16[len / sizeof(Int16)]; + Buffer.BlockCopy(data, 0, asInt16, 0, len); + sample = Array.ConvertAll(asInt16, e => e / -(double)Int16.MinValue); + break; + default: throw new ArgumentException("Bad BitsPerSample: " + bitsPerSample); + } + + return sample; + } +} diff --git a/src/Sa.Outbox.Attributes/IOutboxPayloadMessage.cs b/src/Sa.Outbox.Attributes/IOutboxPayloadMessage.cs new file mode 100644 index 0000000..7305f3f --- /dev/null +++ b/src/Sa.Outbox.Attributes/IOutboxPayloadMessage.cs @@ -0,0 +1,18 @@ +namespace Sa.Outbox.Support; + +/// +/// Represents a message payload in the Outbox system. +/// This interface defines the properties that any Outbox payload message must implement. +/// +public interface IOutboxPayloadMessage +{ + /// + /// Gets the unique identifier for the payload. + /// + string PayloadId { get; } + + /// + /// Gets the identifier for the tenant associated with the payload. + /// + public int TenantId { get; } +} diff --git a/src/Sa.Outbox.Attributes/OutboxMessageAttribute.cs b/src/Sa.Outbox.Attributes/OutboxMessageAttribute.cs new file mode 100644 index 0000000..dcc6af8 --- /dev/null +++ b/src/Sa.Outbox.Attributes/OutboxMessageAttribute.cs @@ -0,0 +1,21 @@ +namespace Sa.Outbox.Support; + + +// +/// An attribute used to mark classes or structs as Outbox messages. +/// This attribute can be used to specify the part associated with the Outbox message. +/// +/// The part identifier for the Outbox message. Default is "root". +[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct)] +public class OutboxMessageAttribute(string part = "root") : Attribute +{ + /// + /// Gets the part identifier associated with the Outbox message. + /// + public string Part => part; + + /// + /// A default instance of the with the default part value. + /// + public readonly static OutboxMessageAttribute Default = new(); +} \ No newline at end of file diff --git a/src/Sa.Outbox.Attributes/PingMessage.cs b/src/Sa.Outbox.Attributes/PingMessage.cs new file mode 100644 index 0000000..1f60aa2 --- /dev/null +++ b/src/Sa.Outbox.Attributes/PingMessage.cs @@ -0,0 +1,10 @@ +namespace Sa.Outbox.Support; + + +[OutboxMessage] +public record PingMessage(long Payload) : IOutboxPayloadMessage +{ + public string PayloadId => String.Empty; + + public int TenantId => 0; +} diff --git a/src/Sa.Outbox.Attributes/Sa.Outbox.Support.csproj b/src/Sa.Outbox.Attributes/Sa.Outbox.Support.csproj new file mode 100644 index 0000000..fa71b7a --- /dev/null +++ b/src/Sa.Outbox.Attributes/Sa.Outbox.Support.csproj @@ -0,0 +1,9 @@ + + + + net8.0 + enable + enable + + + diff --git a/src/Sa.Outbox.PostgreSql/Commands/ErrorDeliveryCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/ErrorDeliveryCommand.cs new file mode 100644 index 0000000..aeb5067 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/ErrorDeliveryCommand.cs @@ -0,0 +1,58 @@ +using Npgsql; +using Sa.Data.PostgreSql; +using Sa.Extensions; + +namespace Sa.Outbox.PostgreSql.Commands; + +internal class ErrorDeliveryCommand( + IPgDataSource dataSource + , SqlOutboxTemplate sqlTemplate +) : IErrorDeliveryCommand +{ + + private readonly SqlCacheSplitter sqlCache = new(len => sqlTemplate.SqlError(len)); + + public async Task> Execute(IOutboxContext[] outboxMessages, CancellationToken cancellationToken) + { + Dictionary errors = outboxMessages + .Where(m => m.Exception != null) + .GroupBy(m => m.Exception!) + .Select(m => (err: m.Key, createdAt: m.First().DeliveryResult.CreatedAt.StartOfDay())) + .ToDictionary(e => e.err, e => new ErrorInfo(e.err.ToString().GetMurmurHash3(), e.err.GetType().Name, e.createdAt)); + + int len = errors.Count; + + if (len == 0) return errors; + + KeyValuePair[] arrErrors = [.. errors]; + + int startIndex = 0; + + foreach ((string sql, int cnt) in sqlCache.GetSql(len)) + { + + var sliceErrors = new ArraySegment>(arrErrors, startIndex, cnt); + + startIndex += cnt; + + List parameters = []; + + int i = 0; + // (@id_{i},@type_{i},@message_{i},@created_at_{i} + foreach ((Exception Key, ErrorInfo Value) in sliceErrors) + { + (long ErrorId, string TypeName, DateTimeOffset CreatedAt) = Value; + + parameters.Add(new($"@id_{i}", ErrorId)); + parameters.Add(new($"@type_{i}", TypeName)); + parameters.Add(new($"@message_{i}", Key.ToString())); + parameters.Add(new($"@created_at_{i}", CreatedAt.ToUnixTimeSeconds())); + i++; + } + + await dataSource.ExecuteNonQuery(sql, [.. parameters], cancellationToken); + } + + return errors; + } +} diff --git a/src/Sa.Outbox.PostgreSql/Commands/ExtendDeliveryCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/ExtendDeliveryCommand.cs new file mode 100644 index 0000000..63b3395 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/ExtendDeliveryCommand.cs @@ -0,0 +1,32 @@ +using Sa.Data.PostgreSql; +using Sa.Outbox.PostgreSql.TypeHashResolve; + +namespace Sa.Outbox.PostgreSql.Commands; + +internal class ExtendDeliveryCommand( + IPgDataSource dataSource + , IMsgTypeHashResolver hashResolver + , SqlOutboxTemplate sqlTemplate +) : IExtendDeliveryCommand +{ + public async Task Execute(TimeSpan lockExpiration, OutboxMessageFilter filter, CancellationToken cancellationToken) + { + + long typeCode = await hashResolver.GetCode(filter.PayloadType, cancellationToken); + long now = filter.NowDate.ToUnixTimeSeconds(); + long lockExpiresOn = (filter.NowDate + lockExpiration).ToUnixTimeSeconds(); + long fromDate = filter.FromDate.ToUnixTimeSeconds(); + + return await dataSource.ExecuteNonQuery(sqlTemplate.SqlExtendDelivery, + [ + new("tenant", filter.TenantId) + , new("part", filter.Part) + , new("from_date", fromDate) + , new("transact_id", filter.TransactId) + , new("payload_type", typeCode) + , new("lock_expires_on", lockExpiresOn) + , new("now", now) + ] + , cancellationToken); + } +} diff --git a/src/Sa.Outbox.PostgreSql/Commands/FinishDeliveryCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/FinishDeliveryCommand.cs new file mode 100644 index 0000000..c8b09ee --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/FinishDeliveryCommand.cs @@ -0,0 +1,86 @@ +using Npgsql; +using Sa.Data.PostgreSql; +using Sa.Outbox.PostgreSql.IdGen; + +namespace Sa.Outbox.PostgreSql.Commands; + +internal class FinishDeliveryCommand( + IPgDataSource dataSource + , SqlOutboxTemplate sqlTemplate + , IIdGenerator idGenerator +) : IFinishDeliveryCommand +{ + const int IndexParamsCount = 7; + const int ConstParamsCount = 4; + + + private readonly SqlCacheSplitter sqlCache = new(len => sqlTemplate.SqlFinishDelivery(len)); + + public async Task Execute( + IOutboxContext[] outboxMessages, + IReadOnlyDictionary errors, + OutboxMessageFilter filter, + CancellationToken cancellationToken) + { + if (outboxMessages.Length == 0) return 0; + + int total = 0; + + int startIndex = 0; + foreach ((string sql, int len) in sqlCache.GetSql(outboxMessages.Length)) + { + var segment = new ArraySegment>(outboxMessages, startIndex, len); + startIndex += len; + + NpgsqlParameter[] parameters = GetSqlParams(segment, errors, filter); + total += await dataSource.ExecuteNonQuery(sql, parameters, cancellationToken); + } + + return total; + } + + private NpgsqlParameter[] GetSqlParams( + ArraySegment> sliceContext, + IReadOnlyDictionary errors, + OutboxMessageFilter filter) + { + NpgsqlParameter[] parameters = new NpgsqlParameter[sliceContext.Count * IndexParamsCount + ConstParamsCount]; + + int j = 0; + foreach (IOutboxContext context in sliceContext) + { + DateTimeOffset createdAt = context.DeliveryResult.CreatedAt; + string id = idGenerator.GenId(createdAt); + string msg = context.DeliveryResult.Message; + long lockExpiresOn = (createdAt + context.PostponeAt).ToUnixTimeSeconds(); + + string errorId = String.Empty; + Exception? error = context.Exception; + if (error != null) + { + errorId = errors[error].ErrorId.ToString(); + if (string.IsNullOrEmpty(msg)) + { + msg = error.Message; + } + } + // (@id_{i},@outbox_id_{i},@error_id_{i},@status_code_{i},@status_message_{i},@lock_expires_on_{i},@created_at_{i} + parameters[j] = new($"@p{j}", id); j++; + parameters[j] = new($"@p{j}", context.OutboxId); j++; + parameters[j] = new($"@p{j}", errorId); j++; + parameters[j] = new($"@p{j}", context.DeliveryResult.Code); j++; + parameters[j] = new($"@p{j}", msg); j++; + parameters[j] = new($"@p{j}", lockExpiresOn); j++; + parameters[j] = new($"@p{j}", createdAt.ToUnixTimeSeconds()); j++; + } + + //@tenant AND @part AND @from_date AND @transact_id AND @created_at + + parameters[j++] = new("tnt", filter.TenantId); + parameters[j++] = new("prt", filter.Part); + parameters[j++] = new("from_date", filter.FromDate.ToUnixTimeSeconds()); + parameters[j] = new("tid", filter.TransactId); + + return parameters; + } +} diff --git a/src/Sa.Outbox.PostgreSql/Commands/IErrorDeliveryCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/IErrorDeliveryCommand.cs new file mode 100644 index 0000000..bf862be --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/IErrorDeliveryCommand.cs @@ -0,0 +1,11 @@ + +namespace Sa.Outbox.PostgreSql.Commands; + + +public record struct ErrorInfo(long ErrorId, string TypeName, DateTimeOffset CreatedAt); + + +internal interface IErrorDeliveryCommand +{ + Task> Execute(IOutboxContext[] outboxMessages, CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox.PostgreSql/Commands/IExtendDeliveryCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/IExtendDeliveryCommand.cs new file mode 100644 index 0000000..ccbcc67 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/IExtendDeliveryCommand.cs @@ -0,0 +1,7 @@ + +namespace Sa.Outbox.PostgreSql.Commands; + +internal interface IExtendDeliveryCommand +{ + Task Execute(TimeSpan lockExpiration, OutboxMessageFilter filter, CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox.PostgreSql/Commands/IFinishDeliveryCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/IFinishDeliveryCommand.cs new file mode 100644 index 0000000..19571ac --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/IFinishDeliveryCommand.cs @@ -0,0 +1,6 @@ +namespace Sa.Outbox.PostgreSql.Commands; + +internal interface IFinishDeliveryCommand +{ + Task Execute(IOutboxContext[] outboxMessages, IReadOnlyDictionary errors, OutboxMessageFilter filter, CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox.PostgreSql/Commands/IOutboxBulkCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/IOutboxBulkCommand.cs new file mode 100644 index 0000000..55adfb2 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/IOutboxBulkCommand.cs @@ -0,0 +1,6 @@ +namespace Sa.Outbox.PostgreSql.Commands; + +internal interface IOutboxBulkCommand +{ + ValueTask BulkWrite(string payloadType, ReadOnlyMemory> messages, CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox.PostgreSql/Commands/IStartDeliveryCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/IStartDeliveryCommand.cs new file mode 100644 index 0000000..09ffefd --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/IStartDeliveryCommand.cs @@ -0,0 +1,6 @@ +namespace Sa.Outbox.PostgreSql.Commands; + +internal interface IStartDeliveryCommand +{ + Task Execute(Memory> writeBuffer, int batchSize, TimeSpan lockDuration, OutboxMessageFilter filter, CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/Sa.Outbox.PostgreSql/Commands/OutboxBulkCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/OutboxBulkCommand.cs new file mode 100644 index 0000000..1681d70 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/OutboxBulkCommand.cs @@ -0,0 +1,70 @@ +using Microsoft.IO; +using Npgsql; +using NpgsqlTypes; +using Sa.Data.PostgreSql; +using Sa.Outbox.PostgreSql.IdGen; +using Sa.Outbox.PostgreSql.Serialization; +using Sa.Outbox.PostgreSql.TypeHashResolve; + +namespace Sa.Outbox.PostgreSql.Commands; + + +internal class OutboxBulkCommand( + IPgDataSource dataSource + , SqlOutboxTemplate sqlTemplate + , RecyclableMemoryStreamManager streamManager + , IOutboxMessageSerializer serializer + , IIdGenerator idGenerator + , IMsgTypeHashResolver hashResolver +) : IOutboxBulkCommand +{ + public async ValueTask BulkWrite(string payloadType, ReadOnlyMemory> messages, CancellationToken cancellationToken) + { + long typeCode = await hashResolver.GetCode(payloadType, cancellationToken); + + ulong result = await dataSource.BeginBinaryImport(sqlTemplate.SqlBulkOutboxCopy, async (writer, t) => + { + WriteRows(writer, typeCode, messages); + + return await writer.CompleteAsync(t); + + }, cancellationToken); + + return result; + } + + private void WriteRows(NpgsqlBinaryImporter writer, long payloadTypeCode, ReadOnlyMemory> messages) + { + foreach (OutboxMessage row in messages.Span) + { + string id = idGenerator.GenId(row.PartInfo.CreatedAt); + + writer.StartRow(); + + + // id + writer.Write(id, NpgsqlDbType.Char); + // tenant + writer.Write(row.PartInfo.TenantId, NpgsqlDbType.Integer); + // part + writer.Write(row.PartInfo.Part, NpgsqlDbType.Text); + + + // payload_id + writer.Write(row.PayloadId, NpgsqlDbType.Text); + // payload_type + writer.Write(payloadTypeCode, NpgsqlDbType.Bigint); + // payload + using RecyclableMemoryStream stream = streamManager.GetStream(); + serializer.Serialize(stream, row.Payload); + stream.Position = 0; + writer.Write(stream, NpgsqlDbType.Bytea); + // payload_size + writer.Write(stream.Length, NpgsqlDbType.Integer); + + + // created_at + writer.Write(row.PartInfo.CreatedAt.ToUnixTimeSeconds(), NpgsqlDbType.Bigint); + } + } +} diff --git a/src/Sa.Outbox.PostgreSql/Commands/Setup.cs b/src/Sa.Outbox.PostgreSql/Commands/Setup.cs new file mode 100644 index 0000000..f43fa4c --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/Setup.cs @@ -0,0 +1,17 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace Sa.Outbox.PostgreSql.Commands; + +internal static class Setup +{ + public static IServiceCollection AddOutboxCommands(this IServiceCollection services) + { + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + return services; + } +} diff --git a/src/Sa.Outbox.PostgreSql/Commands/SqlCacheSplitter.cs b/src/Sa.Outbox.PostgreSql/Commands/SqlCacheSplitter.cs new file mode 100644 index 0000000..b82ff5b --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/SqlCacheSplitter.cs @@ -0,0 +1,49 @@ +namespace Sa.Outbox.PostgreSql.Commands; + +internal class SqlCacheSplitter(Func genSql) +{ + private readonly Dictionary _sqlCache = []; + + public IEnumerable<(string sql, int len)> GetSql(int len, int maxLen = 4096) + { + if (len <= 0) + { + yield break; + } + + int multipleOf16 = len / 16 * 16; + + if (multipleOf16 > maxLen) + { + int multipleOfMax = multipleOf16 / maxLen; + + for (int i = 0; i < multipleOfMax; i++) + { + yield return GetOrAdd(maxLen); + } + } + else if (multipleOf16 > 0) + { + yield return GetOrAdd(multipleOf16); + } + + int diff = len - multipleOf16; + + if (diff > 0) + { + yield return GetOrAdd(diff); + } + } + + private (string, int) GetOrAdd(int len) + { + if (!_sqlCache.TryGetValue(len, out string? sql)) + { + sql = genSql(len); + _sqlCache[len] = sql; + } + + return (sql, len); + } +} + diff --git a/src/Sa.Outbox.PostgreSql/Commands/StartDeliveryCommand.cs b/src/Sa.Outbox.PostgreSql/Commands/StartDeliveryCommand.cs new file mode 100644 index 0000000..4c64871 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Commands/StartDeliveryCommand.cs @@ -0,0 +1,94 @@ +using Npgsql; +using Sa.Data.PostgreSql; +using Sa.Extensions; +using Sa.Outbox.PostgreSql.Serialization; +using Sa.Outbox.PostgreSql.TypeHashResolve; +using System.Data; + +namespace Sa.Outbox.PostgreSql.Commands; + +internal class StartDeliveryCommand( + IPgDataSource dataSource + , SqlOutboxTemplate sqlTemplate + , IOutboxMessageSerializer serializer + , IMsgTypeHashResolver hashResolver +) : IStartDeliveryCommand +{ + public async Task Execute(Memory> writeBuffer, int batchSize, TimeSpan lockDuration, OutboxMessageFilter filter, CancellationToken cancellationToken) + { + + long typeCode = await hashResolver.GetCode(filter.PayloadType, cancellationToken); + + + return await dataSource.ExecuteReader(sqlTemplate.SqlLockAndSelect, (reader, i) => + { + OutboxDeliveryMessage deliveryMessage = DeliveryReader.Read(reader, serializer); + + writeBuffer.Span[i] = deliveryMessage; + }, + [ + new("tenant", filter.TenantId) + , new("part", filter.Part) + , new("from_date", filter.FromDate.ToUnixTimeSeconds()) + , new("payload_type", typeCode) + , new("transact_id", filter.TransactId) + , new("limit", batchSize) + , new("lock_expires_on", (filter.NowDate + lockDuration).ToUnixTimeSeconds()) + , new("now", filter.NowDate.ToUnixTimeSeconds()) + ] + , cancellationToken); + } + + + internal static class DeliveryReader + { + public static OutboxDeliveryMessage Read(NpgsqlDataReader reader, IOutboxMessageSerializer serializer) + { + string outboxId = reader.GetString("outbox_id"); + string payloadId = reader.GetString("outbox_payload_id"); + + TMessage payload = ReadPayload(reader, serializer); + OutboxPartInfo outboxPart = ReadOutboxPart(reader); + OutboxDeliveryInfo deliveryInfo = ReadDeliveryInfo(reader); + + return new OutboxDeliveryMessage(outboxId, payloadId, payload, outboxPart, deliveryInfo); + } + + + private static OutboxPartInfo ReadOutboxPart(NpgsqlDataReader reader) + { + return new OutboxPartInfo( + reader.GetInt32("outbox_tenant") + , reader.GetString("outbox_part") + , reader.GetInt64("outbox_created_at").ToDateTimeOffsetFromUnixTimestamp() + ); + } + + private static OutboxDeliveryInfo ReadDeliveryInfo(NpgsqlDataReader reader) + { + return new OutboxDeliveryInfo( + reader.GetString("outbox_delivery_id") + , reader.GetInt32("outbox_delivery_attempt") + , reader.GetString("outbox_delivery_error_id") + , ReadStatus(reader) + , reader.GetInt64("outbox_delivery_created_at").ToDateTimeOffsetFromUnixTimestamp() + ); + } + + private static TMessage ReadPayload(NpgsqlDataReader reader, IOutboxMessageSerializer serializer) + { + using Stream stream = reader.GetStream("outbox_payload"); + TMessage payload = serializer.Deserialize(stream)!; + return payload; + } + + + private static DeliveryStatus ReadStatus(NpgsqlDataReader reader) + { + int code = reader.GetInt32("outbox_delivery_status_code"); + string message = reader.GetString("outbox_delivery_status_message"); + DateTimeOffset createAt = reader.GetInt64("outbox_delivery_created_at").ToDateTimeOffsetFromUnixTimestamp(); + return new DeliveryStatus(code, message, createAt); + } + } +} diff --git a/src/Sa.Outbox.PostgreSql/Configuration/IPgOutboxConfiguration.cs b/src/Sa.Outbox.PostgreSql/Configuration/IPgOutboxConfiguration.cs new file mode 100644 index 0000000..359e99e --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Configuration/IPgOutboxConfiguration.cs @@ -0,0 +1,9 @@ +using Sa.Data.PostgreSql; + +namespace Sa.Outbox.PostgreSql; + +public interface IPgOutboxConfiguration +{ + IPgOutboxConfiguration WithPgOutboxSettings(Action? configure = null); + IPgOutboxConfiguration AddDataSource(Action? configure = null); +} diff --git a/src/Sa.Outbox.PostgreSql/Configuration/PgOutboxConfiguration.cs b/src/Sa.Outbox.PostgreSql/Configuration/PgOutboxConfiguration.cs new file mode 100644 index 0000000..8e51402 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Configuration/PgOutboxConfiguration.cs @@ -0,0 +1,62 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Data.PostgreSql; +using System.Collections.Concurrent; + +namespace Sa.Outbox.PostgreSql.Configuration; + +internal class PgOutboxConfiguration(IServiceCollection services) : IPgOutboxConfiguration +{ + private static readonly ConcurrentDictionary>> s_invokers = []; + + public IPgOutboxConfiguration WithPgOutboxSettings(Action? configure = null) + { + if (configure != null) + { + if (s_invokers.TryGetValue(services, out var invokers)) + { + invokers.Add(configure); + } + else + { + s_invokers[services] = [configure]; + } + } + + + services.TryAddSingleton(sp => + { + PgOutboxSettings settings = new(); + + if (s_invokers.TryGetValue(services, out var invokers)) + { + foreach (Action build in invokers) + build.Invoke(sp, settings); + + s_invokers.Remove(services, out _); + } + + return settings; + }); + + AddSettings(); + return this; + } + + public IPgOutboxConfiguration AddDataSource(Action? configure = null) + { + services.AddPgDataSource(configure); + return this; + } + + + private void AddSettings() + { + services.TryAddSingleton(sp => sp.GetRequiredService().SerializationSettings); + + services.TryAddSingleton(sp => sp.GetRequiredService().TableSettings); + services.TryAddSingleton(sp => sp.GetRequiredService().CacheSettings); + services.TryAddSingleton(sp => sp.GetRequiredService().MigrationSettings); + services.TryAddSingleton(sp => sp.GetRequiredService().CleanupSettings); + } +} diff --git a/src/Sa.Outbox.PostgreSql/Configuration/PgOutboxSettings.cs b/src/Sa.Outbox.PostgreSql/Configuration/PgOutboxSettings.cs new file mode 100644 index 0000000..b9a840c --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Configuration/PgOutboxSettings.cs @@ -0,0 +1,171 @@ +using System.Text.Json; + +namespace Sa.Outbox.PostgreSql; + +/// +/// Represents the settings for the PostgreSQL Outbox configuration. +/// This class contains various settings related to table configuration, serialization, caching, migration, and cleanup. +/// +public class PgOutboxSettings +{ + /// + /// Gets the settings related to the Outbox table configuration. + /// + public PgOutboxTableSettings TableSettings { get; } = new(); + + /// + /// Gets the settings related to serialization of messages. + /// + public PgOutboxSerializeSettings SerializationSettings { get; } = new(); + + /// + /// Gets the settings related to caching of message types. + /// + public PgOutboxCacheSettings CacheSettings { get; } = new(); + + /// + /// Gets the settings related to migration of the Outbox schema. + /// + public PgOutboxMigrationSettings MigrationSettings { get; } = new(); + + /// + /// Gets the settings related to cleanup of old Outbox messages and parts. + /// + public PgOutboxCleanupSettings CleanupSettings { get; } = new(); +} + + +/// +/// Represents the settings for configuring the Outbox tables in PostgreSQL. +/// +public class PgOutboxTableSettings +{ + /// + /// Gets or sets the name of the database schema. + /// Default is set to "public". + /// + public string DatabaseSchemaName { get; set; } = "public"; + + /// + /// Gets or sets the name of the Outbox table. + /// Default is set to "outbox". + /// + public string DatabaseOutboxTableName { get; set; } = "outbox"; + + /// + /// Gets or sets the name of the delivery table. + /// Default is set to "outbox__$delivery". + /// + public string DatabaseDeliveryTableName { get; set; } = "outbox__$delivery"; + + /// + /// Gets or sets the name of the type table. + /// Default is set to "outbox__$type". + /// + public string DatabaseTypeTableName { get; set; } = "outbox__$type"; + + /// + /// Gets or sets the name of the error table. + /// Default is set to "outbox__$error". + /// + public string DatabaseErrorTableName { get; set; } = "outbox__$error"; + + /// + /// Gets the fully qualified name of the Outbox table, including the schema. + /// + /// The qualified name of the Outbox table. + public string GetQualifiedOutboxTableName() => $@"{DatabaseSchemaName}.""{DatabaseOutboxTableName}"""; + + /// + /// Gets the fully qualified name of the delivery table, including the schema. + /// + /// The qualified name of the delivery table. + public string GetQualifiedDeliveryTableName() => $@"{DatabaseSchemaName}.""{DatabaseDeliveryTableName}"""; + + /// + /// Gets the fully qualified name of the type table, including the schema. + /// + /// The qualified name of the type table. + public string GetQualifiedTypeTableName() => $@"{DatabaseSchemaName}.""{DatabaseTypeTableName}"""; + + /// + /// Gets the fully qualified name of the error table, including the schema. + /// + /// The qualified name of the error table. + public string GetQualifiedErrorTableName() => $@"{DatabaseSchemaName}.""{DatabaseErrorTableName}"""; +} + +/// +/// Represents the settings for serialization of messages in the Outbox. +/// +public class PgOutboxSerializeSettings +{ + public JsonSerializerOptions? JsonSerializerOptions { get; set; } +} + + +/// +/// Represents the settings for caching message types in the Outbox. +/// +public class PgOutboxCacheSettings +{ + /// + /// Gets or sets the duration for which message types are cached. + /// Default is set to 1 day. + /// + public TimeSpan CacheTypeDuration { get; set; } = TimeSpan.FromDays(1); +} + +/// +/// Represents the settings for migrating the Outbox schema in PostgreSQL. +/// +public class PgOutboxMigrationSettings +{ + /// + /// Gets or sets a value indicating whether the migration should be executed as a background job. + /// Default is set to true, meaning the migration will run as a job. + /// + public bool AsJob { get; set; } = true; + + /// + /// Gets or sets the number of days to move forward during the migration process. + /// Default is set to 2 days. + /// + public int ForwardDays { get; set; } = 2; + + /// + /// Gets or sets the interval at which the migration job will be executed. + /// Default is set to every 4 hours, with a random additional delay of up to 59 minutes. + /// + public TimeSpan ExecutionInterval { get; set; } = TimeSpan + .FromHours(4) + .Add(TimeSpan.FromMinutes(Random.Shared.Next(1, 59))); +} + + +/// +/// Represents the settings for cleaning up old Outbox messages and parts in PostgreSQL. +/// This class contains configuration options for how and when the cleanup should occur. +/// +public class PgOutboxCleanupSettings +{ + /// + /// Gets or sets a value indicating whether the cleanup should be executed as a background job. + /// Default is set to false, meaning the cleanup will not run as a job. + /// + public bool AsJob { get; set; } = false; + + /// + /// Gets or sets the duration after which old parts will be dropped. + /// Default is set to 30 days. + /// + public TimeSpan DropPartsAfterRetention { get; set; } = TimeSpan.FromDays(30); + + /// + /// Gets or sets the interval at which the cleanup job will be executed. + /// Default is set to every 4 hours, with a random additional delay of up to 59 minutes. + /// + public TimeSpan ExecutionInterval { get; set; } = TimeSpan + .FromHours(4) + .Add(TimeSpan.FromMinutes(Random.Shared.Next(1, 59))); +} diff --git a/src/Sa.Outbox.PostgreSql/Configuration/Setup.cs b/src/Sa.Outbox.PostgreSql/Configuration/Setup.cs new file mode 100644 index 0000000..a6f28f3 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Configuration/Setup.cs @@ -0,0 +1,19 @@ +using Microsoft.Extensions.DependencyInjection; + +namespace Sa.Outbox.PostgreSql.Configuration; + +internal static class Setup +{ + public static IServiceCollection AddPgOutboxSettings(this IServiceCollection services, Action? configure = null) + { + var cfg = new PgOutboxConfiguration(services); + configure?.Invoke(cfg); + + cfg + .WithPgOutboxSettings() + .AddDataSource() + ; + + return services; + } +} diff --git a/src/Sa.Outbox.PostgreSql/GlobalSuppressions.cs b/src/Sa.Outbox.PostgreSql/GlobalSuppressions.cs new file mode 100644 index 0000000..0157fed --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/GlobalSuppressions.cs @@ -0,0 +1,8 @@ +// This file is used by Code Analysis to maintain SuppressMessage +// attributes that are applied to this project. +// Project-level suppressions either have no target or are given +// a specific target and scoped to a namespace, type, member, etc. + +using System.Diagnostics.CodeAnalysis; + +[assembly: SuppressMessage("Style", "IDE0130:Namespace does not match folder structure")] diff --git a/src/Sa.Outbox.PostgreSql/IdGen/IIdGenerator.cs b/src/Sa.Outbox.PostgreSql/IdGen/IIdGenerator.cs new file mode 100644 index 0000000..a89cc5b --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/IdGen/IIdGenerator.cs @@ -0,0 +1,6 @@ +namespace Sa.Outbox.PostgreSql.IdGen; + +public interface IIdGenerator +{ + string GenId(DateTimeOffset date); +} diff --git a/src/Sa.Outbox.PostgreSql/IdGen/IdGenerator.cs b/src/Sa.Outbox.PostgreSql/IdGen/IdGenerator.cs new file mode 100644 index 0000000..095fd9f --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/IdGen/IdGenerator.cs @@ -0,0 +1,6 @@ +namespace Sa.Outbox.PostgreSql.IdGen; + +internal class IdGenerator : IIdGenerator +{ + public string GenId(DateTimeOffset date) => Ulid.NewUlid(date).ToString(); +} diff --git a/src/Sa.Outbox.PostgreSql/IdGen/Setup.cs b/src/Sa.Outbox.PostgreSql/IdGen/Setup.cs new file mode 100644 index 0000000..d78d13a --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/IdGen/Setup.cs @@ -0,0 +1,14 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace Sa.Outbox.PostgreSql.IdGen; + +internal static class Setup +{ + public static IServiceCollection AddIdGen(this IServiceCollection services) + { + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/Sa.Outbox.PostgreSql/Interceptors/DeliveryJobInterceptor.cs b/src/Sa.Outbox.PostgreSql/Interceptors/DeliveryJobInterceptor.cs new file mode 100644 index 0000000..48344b6 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Interceptors/DeliveryJobInterceptor.cs @@ -0,0 +1,18 @@ +using Sa.Outbox.Job; +using Sa.Partitional.PostgreSql; +using Sa.Schedule; + +namespace Sa.Outbox.PostgreSql.Interceptors; + +internal class DeliveryJobInterceptor(IPartMigrationService migrationService) : IOutboxJobInterceptor +{ + public async Task OnHandle(IJobContext context, Func next, object? key, CancellationToken cancellationToken) + { + if (!migrationService.OutboxMigrated.IsCancellationRequested && context.Settings.JobType.Name.StartsWith("DeliveryJob")) + { + return; + } + + await next(); + } +} diff --git a/src/Sa.Outbox.PostgreSql/Interceptors/Setup.cs b/src/Sa.Outbox.PostgreSql/Interceptors/Setup.cs new file mode 100644 index 0000000..f51c5f3 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Interceptors/Setup.cs @@ -0,0 +1,14 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Outbox.Job; + +namespace Sa.Outbox.PostgreSql.Interceptors; + +internal static class Setup +{ + public static IServiceCollection AddOutboxJobInterceptors(this IServiceCollection services) + { + services.TryAddSingleton(); + return services; + } +} diff --git a/src/Sa.Outbox.PostgreSql/Partitional/OutboxMigrationSupport.cs b/src/Sa.Outbox.PostgreSql/Partitional/OutboxMigrationSupport.cs new file mode 100644 index 0000000..ef2c514 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Partitional/OutboxMigrationSupport.cs @@ -0,0 +1,19 @@ +using Sa.Classes; +using Sa.Outbox.Partitional; +using Sa.Partitional.PostgreSql; + +namespace Sa.Outbox.PostgreSql.Partitional; + +internal class OutboxMigrationSupport(IOutboxPartitionalSupport? partitionalSupport = null) : IPartTableMigrationSupport +{ + public async Task GetPartValues(CancellationToken cancellationToken) + { + if (partitionalSupport == null) return []; + + IReadOnlyCollection parts = await partitionalSupport.GetPartValues(cancellationToken); + + return parts + .Select(c => new StrOrNum[] { c.TenantId, c.Part }) + .ToArray(); + } +} diff --git a/src/Sa.Outbox.PostgreSql/Partitional/Setup.cs b/src/Sa.Outbox.PostgreSql/Partitional/Setup.cs new file mode 100644 index 0000000..58935c8 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Partitional/Setup.cs @@ -0,0 +1,66 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Partitional.PostgreSql; + +namespace Sa.Outbox.PostgreSql.Partitional; + +internal static class Setup +{ + public static IServiceCollection AddOutboxPartitional(this IServiceCollection services) + { + services.TryAddSingleton(); + + services.AddPartitional((sp, builder) => + { + SqlOutboxTemplate sql = sp.GetRequiredService(); + IPartTableMigrationSupport? migrationSupport = sp.GetService(); + + builder.AddSchema(sql.DatabaseSchemaName, schema => + { + ITableBuilder outboxTableBuilder = schema + .AddTable(sql.DatabaseOutboxTableName, SqlOutboxTemplate.OutboxFields) + .PartByList("outbox_tenant", "outbox_part") + .TimestampAs("outbox_created_at") + .AddPostSql(() => sql.SqlCreateTypeTable) + ; + + ITableBuilder deliveryTableBuilder = schema + .AddTable(sql.DatabaseDeliveryTableName, SqlOutboxTemplate.DeliveryFields) + .PartByList("delivery_tenant", "delivery_part") + .TimestampAs("delivery_created_at") + ; + + ITableBuilder errorTableBuilder = schema + .AddTable(sql.DatabaseErrorTableName, SqlOutboxTemplate.ErrorFields) + .TimestampAs("error_created_at") + ; + + if (migrationSupport != null) + { + outboxTableBuilder.AddMigration(migrationSupport); + deliveryTableBuilder.AddMigration(migrationSupport); + } + + errorTableBuilder.AddMigration(); + }) + ; + }) + .AddPartMigrationSchedule((sp, opts) => + { + PgOutboxMigrationSettings settings = sp.GetRequiredService(); + opts.AsJob = settings.AsJob; + opts.ExecutionInterval = settings.ExecutionInterval; + opts.ForwardDays = settings.ForwardDays; + }) + .AddPartCleanupSchedule((sp, opts) => + { + PgOutboxCleanupSettings settings = sp.GetRequiredService(); + opts.AsJob = settings.AsJob; + opts.ExecutionInterval = settings.ExecutionInterval; + opts.DropPartsAfterRetention = settings.DropPartsAfterRetention; + }) + ; + + return services; + } +} diff --git a/src/Sa.Outbox.PostgreSql/Readme.md b/src/Sa.Outbox.PostgreSql/Readme.md new file mode 100644 index 0000000..8ec6622 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Readme.md @@ -0,0 +1,118 @@ +# Sa.Outbox.PostgreSql + +Предназначен для реализации паттерна Outbox с использованием PostgreSQL, который используется для обеспечения надежной доставки сообщений в распределенных системах. Он помогает избежать потери сообщений и гарантирует, что сообщения будут обработаны даже в случае сбоев. + +## Основные функции +- **Надежная доставка сообщений**: Обеспечивает сохранение сообщений в базе данных до их успешной обработки. +- **Поддержка транзакций**: Позволяет отправлять сообщения в рамках одной транзакции с изменениями в базе данных. +- **Гибкость**: Поддерживает различные типы сообщений и их обработчиков. +- **Параллельная обработка**: Позволяет обрабатывать сообщения параллельно, что увеличивает производительность системы. + + + +## Примеры + +### Пример конфигурирования + +```csharp +using Microsoft.Extensions.DependencyInjection; +using Sa.Outbox; +using Sa.Outbox.PostgreSql; + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + // Конфигурация Outbox + services.AddOutbox(builder => + { + builder.WithDeliveries(deliveryBuilder => + { + deliveryBuilder.AddDelivery(); + }); + + builder.WithPartitioningSupport((serviceProvider, partSettings) => + { + // Пример настройки для обработки сообщений для каждого арендатора + partSettings.ForEachTenant = true; + partSettings.GetTenantIds = async cancellationToken => + { + // Логика получения идентификаторов арендаторов + return await Task.FromResult(new int[] { 1, 2 }); + }; + }); + }); + + // Подключение Outbox с использованием PostgreSQL + services.AddOutboxUsingPostgreSql(cfg => + { + cfg.AddDataSource(c => c.WithConnectionString("Host=my_host;Database=my_db;Username=my_user;Password=my_password")); + cfg.WithPgOutboxSettings((_, settings) => + { + // Установка схемы базы данных + settings.TableSettings.DatabaseSchemaName = "public"; + // Настройка очистки + settings.CleanupSettings.DropPartsAfterRetention = TimeSpan.FromDays(30); + }); + }); + } +} +``` + +### Пример потребителя сообщений + +```csharp +using Sa.Outbox; + +namespace MyNamespace +{ + // Пример сообщения, которое будет отправляться через Outbox + [OutboxMessage] + public record MyMessage(string PayloadId, string Content) : IOutboxPayloadMessage + { + public int TenantId { get; init; } // Идентификатор арендатора + } + + // Пример потребителя, который будет обрабатывать сообщения MyMessage + public class MyMessageConsumer : IConsumer + { + public async ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken) + { + foreach (var messageContext in outboxMessages) + { + // Логика обработки сообщения + Console.WriteLine($"Processing message with ID: {messageContext.Payload.PayloadId} and Content: {messageContext.Payload.Content}"); + + // Успешная обработка сообщения + messageContext.Ok("Message processed successfully."); + } + } + } +} +``` + +### Пример отправки сообщения + +```csharp + +public class MessageSender(IOutboxMessagePublisher publisher) +{ + public async Task SendMessagesAsync(CancellationToken cancellationToken) + { + // Создание списка сообщений для отправки + var messages = new List + { + new MyMessage { PayloadId = Guid.NewGuid().ToString(), Content = "Hello, World!", TenantId = 1 }, + new MyMessage { PayloadId = Guid.NewGuid().ToString(), Content = "Another message", TenantId = 2 } + }; + + // Отправка сообщений через Outbox + ulong result = await publisher.Publish(messages, cancellationToken); + + Console.WriteLine($"Sent {result} messages."); + } +} + +``` + + diff --git a/src/Sa.Outbox.PostgreSql/Repository/DeliveryRepository.cs b/src/Sa.Outbox.PostgreSql/Repository/DeliveryRepository.cs new file mode 100644 index 0000000..9101978 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Repository/DeliveryRepository.cs @@ -0,0 +1,47 @@ +using Sa.Outbox.PostgreSql.Commands; + +namespace Sa.Outbox.PostgreSql.Repository; + +internal class DeliveryRepository( + IStartDeliveryCommand startCmd + , IErrorDeliveryCommand errorCmd + , IFinishDeliveryCommand finishCmd + , IExtendDeliveryCommand extendCmd + , IOutboxPartRepository partRepository +) : IDeliveryRepository +{ + public Task StartDelivery(Memory> writeBuffer, int batchSize, TimeSpan lockDuration, OutboxMessageFilter filter, CancellationToken cancellationToken) + { + if (cancellationToken.IsCancellationRequested) return Task.FromResult(0); + return startCmd.Execute(writeBuffer, batchSize, lockDuration, filter, cancellationToken); + } + + public async Task FinishDelivery(IOutboxContext[] outboxMessages, OutboxMessageFilter filter, CancellationToken cancellationToken) + { + IReadOnlyDictionary errors = await GetErrors(outboxMessages, cancellationToken); + + IEnumerable parts = outboxMessages + .Select(c => new OutboxPartInfo(c.PartInfo.TenantId, c.PartInfo.Part, c.DeliveryResult.CreatedAt)); + + await partRepository.EnsureDeliveryParts(parts, cancellationToken); + + return await finishCmd.Execute(outboxMessages, errors, filter, cancellationToken); + } + + private async Task> GetErrors(IOutboxContext[] outboxMessages, CancellationToken cancellationToken) + { + IEnumerable errOnDates = outboxMessages + .Where(m => m.Exception != null) + .Select(m => m.DeliveryResult.CreatedAt); + + await partRepository.EnsureErrorParts(errOnDates, cancellationToken); + + IReadOnlyDictionary errors = await errorCmd.Execute(outboxMessages, cancellationToken); + return errors; + } + + public async Task ExtendDelivery(TimeSpan lockExpiration, OutboxMessageFilter filter, CancellationToken cancellationToken) + { + return await extendCmd.Execute(lockExpiration, filter, cancellationToken); + } +} diff --git a/src/Sa.Outbox.PostgreSql/Repository/IMsgTypeRepository.cs b/src/Sa.Outbox.PostgreSql/Repository/IMsgTypeRepository.cs new file mode 100644 index 0000000..d972142 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Repository/IMsgTypeRepository.cs @@ -0,0 +1,7 @@ +namespace Sa.Outbox.PostgreSql.Repository; + +internal interface IMsgTypeRepository +{ + Task Insert(long id, string typeName, CancellationToken cancellationToken); + Task> SelectAll(CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox.PostgreSql/Repository/IOutboxPartRepository.cs b/src/Sa.Outbox.PostgreSql/Repository/IOutboxPartRepository.cs new file mode 100644 index 0000000..914a0f7 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Repository/IOutboxPartRepository.cs @@ -0,0 +1,11 @@ + +namespace Sa.Outbox.PostgreSql.Repository; + +internal interface IOutboxPartRepository +{ + Task EnsureDeliveryParts(IEnumerable outboxParts, CancellationToken cancellationToken); + Task EnsureOutboxParts(IEnumerable outboxParts, CancellationToken cancellationToken); + Task EnsureErrorParts(IEnumerable dates, CancellationToken cancellationToken); + + Task Migrate(); +} diff --git a/src/Sa.Outbox.PostgreSql/Repository/MsgTypeRepository.cs b/src/Sa.Outbox.PostgreSql/Repository/MsgTypeRepository.cs new file mode 100644 index 0000000..f82e558 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Repository/MsgTypeRepository.cs @@ -0,0 +1,24 @@ +using Sa.Data.PostgreSql; +using System.Data; + +namespace Sa.Outbox.PostgreSql.Repository; + + +internal class MsgTypeRepository(IPgDataSource dataSource, SqlOutboxTemplate template) : IMsgTypeRepository +{ + public Task Insert(long id, string typeName, CancellationToken cancellationToken) + { + return dataSource.ExecuteNonQuery(template.SqlInsertType, [ + new ("type_id", id) + , new ("type_name", typeName) + ], cancellationToken); + } + + public Task> SelectAll(CancellationToken cancellationToken) + { + return dataSource.ExecuteReaderList(template.SqlSelectType, + reader => + (id: reader.GetInt64("type_id"), typeName: reader.GetString("type_name")) + , cancellationToken); + } +} diff --git a/src/Sa.Outbox.PostgreSql/Repository/OutboxPartRepository.cs b/src/Sa.Outbox.PostgreSql/Repository/OutboxPartRepository.cs new file mode 100644 index 0000000..4181a49 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Repository/OutboxPartRepository.cs @@ -0,0 +1,42 @@ +using Sa.Extensions; +using Sa.Partitional.PostgreSql; + +namespace Sa.Outbox.PostgreSql.Repository; + +internal class OutboxPartRepository(IPartitionManager partManager, PgOutboxTableSettings tableSettings) + : IOutboxPartRepository +{ + + public Task EnsureDeliveryParts(IEnumerable outboxParts, CancellationToken cancellationToken) + => EnsureParts(tableSettings.DatabaseDeliveryTableName, outboxParts, cancellationToken); + + public Task EnsureOutboxParts(IEnumerable outboxParts, CancellationToken cancellationToken) + => EnsureParts(tableSettings.DatabaseOutboxTableName, outboxParts, cancellationToken); + + public async Task EnsureErrorParts(IEnumerable dates, CancellationToken cancellationToken) + { + int i = 0; + foreach (DateTimeOffset date in dates.Select(c => c.StartOfDay()).Distinct()) + { + i++; + await partManager.EnsureParts(tableSettings.DatabaseErrorTableName, date, [], cancellationToken); + } + + return i; + } + + public Task Migrate() => partManager.Migrate(CancellationToken.None); + + + private async Task EnsureParts(string databaseTableName, IEnumerable outboxParts, CancellationToken cancellationToken) + { + int i = 0; + foreach (OutboxPartInfo part in outboxParts.Distinct()) + { + i++; + await partManager.EnsureParts(databaseTableName, part.CreatedAt, [part.TenantId, part.Part], cancellationToken); + } + + return i; + } +} diff --git a/src/Sa.Outbox.PostgreSql/Repository/OutboxRepository.cs b/src/Sa.Outbox.PostgreSql/Repository/OutboxRepository.cs new file mode 100644 index 0000000..b96bd55 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Repository/OutboxRepository.cs @@ -0,0 +1,18 @@ +using Sa.Extensions; +using Sa.Outbox.PostgreSql.Commands; + +namespace Sa.Outbox.PostgreSql.Repository; + +internal class OutboxRepository(IOutboxBulkCommand bulkCmd, IOutboxPartRepository partRepository) + : IOutboxRepository +{ + public async ValueTask Save(string payloadType, ReadOnlyMemory> messages, CancellationToken cancellationToken = default) + { + if (messages.Length == 0) return 0; + + OutboxPartInfo[] parts = messages.Span.SelectWhere(c => c.PartInfo); + await partRepository.EnsureOutboxParts(parts, cancellationToken); + + return await bulkCmd.BulkWrite(payloadType, messages, cancellationToken); + } +} diff --git a/src/Sa.Outbox.PostgreSql/Repository/Setup.cs b/src/Sa.Outbox.PostgreSql/Repository/Setup.cs new file mode 100644 index 0000000..388d634 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Repository/Setup.cs @@ -0,0 +1,16 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace Sa.Outbox.PostgreSql.Repository; + +internal static class Setup +{ + public static IServiceCollection AddOutboxMessageRepository(this IServiceCollection services) + { + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + return services; + } +} diff --git a/src/Sa.Outbox.PostgreSql/Sa.Outbox.PostgreSql.csproj b/src/Sa.Outbox.PostgreSql/Sa.Outbox.PostgreSql.csproj new file mode 100644 index 0000000..5763403 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Sa.Outbox.PostgreSql.csproj @@ -0,0 +1,18 @@ + + + + net8.0 + enable + enable + + + + + + + + + + + + diff --git a/src/Sa.Outbox.PostgreSql/Serialization/IOutboxMessageSerializer.cs b/src/Sa.Outbox.PostgreSql/Serialization/IOutboxMessageSerializer.cs new file mode 100644 index 0000000..beb9428 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Serialization/IOutboxMessageSerializer.cs @@ -0,0 +1,12 @@ +using System.Diagnostics.CodeAnalysis; + +namespace Sa.Outbox.PostgreSql.Serialization; + +public interface IOutboxMessageSerializer +{ + T? Deserialize(Stream stream); + Task DeserializeAsync(Stream stream, CancellationToken cancellationToken = default); + + void Serialize(Stream stream, [NotNull] T value); + Task SerializeAsync(Stream stream, [NotNull] T value, CancellationToken cancellationToken = default); +} diff --git a/src/Sa.Outbox.PostgreSql/Serialization/OutboxMessageSerializer.cs b/src/Sa.Outbox.PostgreSql/Serialization/OutboxMessageSerializer.cs new file mode 100644 index 0000000..f8e9896 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Serialization/OutboxMessageSerializer.cs @@ -0,0 +1,56 @@ +using Sa.Serialization.Converter; +using System.Diagnostics.CodeAnalysis; +using System.Text.Encodings.Web; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Unicode; + +namespace Sa.Outbox.PostgreSql.Serialization; + +/// +/// Implementation of using . +/// +internal class OutboxMessageSerializer : IOutboxMessageSerializer +{ + + private readonly static JavaScriptEncoder encoder = JavaScriptEncoder.Create(UnicodeRanges.BasicLatin, UnicodeRanges.Cyrillic); + + + /// + /// options for the JSON serializer. By default adds converter. + /// + public JsonSerializerOptions Options { get; private set; } = CreateDefaultOptions(); + + public OutboxMessageSerializer WithOptions(JsonSerializerOptions? options) + { + if (options != null) + { + Options = options; + } + return this; + } + + public static JsonSerializerOptions CreateDefaultOptions() + { + JsonSerializerOptions options = new(JsonSerializerDefaults.Web) + { + WriteIndented = false, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + AllowTrailingCommas = true, + Encoder = encoder, + }; + options.Converters.Add(new ObjectToInferredTypesConverter()); + return options; + } + + public Task DeserializeAsync(Stream stream, CancellationToken cancellationToken = default) + => JsonSerializer.DeserializeAsync(stream, Options, cancellationToken).AsTask(); + + public T? Deserialize(Stream stream) => JsonSerializer.Deserialize(stream, Options); + + + public Task SerializeAsync(Stream stream, [NotNull] T value, CancellationToken cancellationToken = default) + => JsonSerializer.SerializeAsync(stream, value ?? throw new ArgumentNullException(nameof(value)), Options, cancellationToken); + + public void Serialize(Stream stream, [NotNull] T value) => JsonSerializer.Serialize(stream, value ?? throw new ArgumentNullException(nameof(value)), Options); +} diff --git a/src/Sa.Outbox.PostgreSql/Serialization/Setup.cs b/src/Sa.Outbox.PostgreSql/Serialization/Setup.cs new file mode 100644 index 0000000..2cabf7a --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Serialization/Setup.cs @@ -0,0 +1,14 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace Sa.Outbox.PostgreSql.Serialization; + +public static class Setup +{ + public static IServiceCollection AddOutboxMessageSerializer(this IServiceCollection services, PgOutboxSerializeSettings? settings = null) + { + services.TryAddSingleton(sp + => new OutboxMessageSerializer().WithOptions((settings ?? sp.GetRequiredService()).JsonSerializerOptions)); + return services; + } +} diff --git a/src/Sa.Outbox.PostgreSql/Setup.cs b/src/Sa.Outbox.PostgreSql/Setup.cs new file mode 100644 index 0000000..a2f0e0c --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/Setup.cs @@ -0,0 +1,40 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Outbox.PostgreSql.Commands; +using Sa.Outbox.PostgreSql.Configuration; +using Sa.Outbox.PostgreSql.IdGen; +using Sa.Outbox.PostgreSql.Interceptors; +using Sa.Outbox.PostgreSql.Partitional; +using Sa.Outbox.PostgreSql.Repository; +using Sa.Outbox.PostgreSql.Serialization; +using Sa.Outbox.PostgreSql.TypeHashResolve; + +namespace Sa.Outbox.PostgreSql; + +public static class Setup +{ + public static IServiceCollection AddOutboxUsingPostgreSql(this IServiceCollection services, Action? configure = null) + { + services + .AddSaInfrastructure(); + + services.TryAddSingleton(); + + services + .AddPgOutboxSettings(configure); + + services + .AddOutboxMessageRepository() + .AddOutboxMessageSerializer() + .AddOutboxPartitional() + .AddIdGen() + .AddOutboxCommands() + .AddMsgTypeHashResolver() + .AddOutboxJobInterceptors() + ; + + services.AddOutbox(); + + return services; + } +} \ No newline at end of file diff --git a/src/Sa.Outbox.PostgreSql/SqlOutboxTemplate.cs b/src/Sa.Outbox.PostgreSql/SqlOutboxTemplate.cs new file mode 100644 index 0000000..4e63e65 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/SqlOutboxTemplate.cs @@ -0,0 +1,260 @@ +namespace Sa.Outbox.PostgreSql; + +internal class SqlOutboxTemplate(PgOutboxTableSettings settings) +{ + public string DatabaseSchemaName => settings.DatabaseSchemaName; + public string DatabaseOutboxTableName => settings.DatabaseOutboxTableName; + public string DatabaseDeliveryTableName => settings.DatabaseDeliveryTableName; + public string DatabaseErrorTableName => settings.DatabaseErrorTableName; + + + public readonly static string[] OutboxFields = + [ + // ulid + "outbox_id CHAR(26) NOT NULL", + + // -- parts + outbox_created_at + "outbox_tenant INT NOT NULL DEFAULT 0", + "outbox_part TEXT NOT NULL", + + "outbox_payload_id TEXT NOT NULL", + "outbox_payload_type BIGINT NOT NULL", + "outbox_payload BYTEA NOT NULL", + "outbox_payload_size INT NOT NULL", + + // -- rw + "outbox_transact_id TEXT NOT NULL DEFAULT ''", + "outbox_lock_expires_on BIGINT NOT NULL DEFAULT 0", + + // -- delivery + "outbox_delivery_attempt int NOT NULL DEFAULT 0", + // --- copy last + "outbox_delivery_id CHAR(26) NOT NULL DEFAULT ''", + "outbox_delivery_error_id TEXT NOT NULL DEFAULT ''", + "outbox_delivery_status_code INT NOT NULL DEFAULT 0", + "outbox_delivery_status_message TEXT NOT NULL DEFAULT ''", + "outbox_delivery_created_at BIGINT NOT NULL DEFAULT 0" + ]; + + + public readonly static string[] DeliveryFields = + [ + "delivery_id CHAR(26) NOT NULL", + "delivery_outbox_id CHAR(26) NOT NULL", + "delivery_error_id TEXT NOT NULL DEFAULT ''", + "delivery_status_code INT NOT NULL DEFAULT 0", + "delivery_status_message TEXT NOT NULL DEFAULT ''", + "delivery_transact_id TEXT NOT NULL DEFAULT ''", + "delivery_lock_expires_on BIGINT NOT NULL DEFAULT 0", + // - parts + "delivery_tenant INT NOT NULL DEFAULT 0", + "delivery_part TEXT NOT NULL", + ]; + + // Delivery errors + public readonly static string[] ErrorFields = + [ + "error_id BIGINT NOT NULL", + "error_type TEXT NOT NULL", + "error_message TEXT NOT NULL", + ]; + + + //CREATE INDEX IF NOT EXISTS ix_{DatabaseTableName}__payload_id + // ON {GetQualifiedTableName()} (payload_id) + // WHERE payload_id <> '' AND (outbox_delivery_status_code BETWEEN {DeliveryStatusCode.Ok} AND 299 OR outbox_delivery_status_code >= 500) + + //CREATE INDEX IF NOT EXISTS ix_{DatabaseTableName}__payload_type + // ON {GetQualifiedTableName()} (payload_type); + // WHERE (outbox_delivery_status_code < {DeliveryStatusCode.Ok} OR outbox_delivery_status_code BETWEEN 300 AND 499) + //"""; + + + public string SqlBulkOutboxCopy = +$""" +COPY {settings.GetQualifiedOutboxTableName()} ( + outbox_id + ,outbox_tenant + ,outbox_part + ,outbox_payload_id + ,outbox_payload_type + ,outbox_payload + ,outbox_payload_size + ,outbox_created_at +) +FROM STDIN (FORMAT BINARY) +; +"""; + + + static readonly string s_InProcessing = $"(outbox_delivery_status_code < {DeliveryStatusCode.Ok} OR outbox_delivery_status_code BETWEEN {DeliveryStatusCode.Status300} AND {DeliveryStatusCode.Status499})"; + + + public string SqlLockAndSelect = +$""" +WITH next_task AS ( + SELECT outbox_id FROM {settings.GetQualifiedOutboxTableName()} + WHERE + outbox_tenant = @tenant AND outbox_part = @part AND outbox_created_at >= @from_date + AND outbox_payload_type = @payload_type + AND {s_InProcessing} + AND outbox_lock_expires_on < @now + LIMIT @limit + FOR UPDATE SKIP LOCKED +) +UPDATE {settings.GetQualifiedOutboxTableName()} +SET + outbox_delivery_status_code = CASE + WHEN outbox_delivery_status_code = 0 THEN {DeliveryStatusCode.Processing} + ELSE outbox_delivery_status_code + END + ,outbox_transact_id = @transact_id + ,outbox_lock_expires_on = @lock_expires_on +FROM + next_task +WHERE + {settings.GetQualifiedOutboxTableName()}.outbox_id = next_task.outbox_id +RETURNING + {settings.GetQualifiedOutboxTableName()}.outbox_id + ,outbox_tenant + ,outbox_part + ,outbox_payload + ,outbox_payload_id + ,outbox_delivery_id + ,outbox_delivery_attempt + ,outbox_delivery_error_id + ,outbox_delivery_status_code + ,outbox_delivery_status_message + ,outbox_delivery_created_at + ,outbox_created_at +; +"""; + + + private static string SqlFinishDelivery(PgOutboxTableSettings settings, int count) + { + return +$""" +WITH inserted_delivery AS ( + INSERT INTO {settings.GetQualifiedDeliveryTableName()} ( + delivery_id + , delivery_outbox_id + , delivery_error_id + , delivery_status_code + , delivery_status_message + , delivery_lock_expires_on + , delivery_transact_id + , delivery_tenant + , delivery_part + , delivery_created_at + ) + VALUES +{BuildDeliveryInsertValues(count)} + ON CONFLICT DO NOTHING + RETURNING * +) +UPDATE {settings.GetQualifiedOutboxTableName()} +SET + outbox_delivery_id = inserted_delivery.delivery_id + , outbox_delivery_attempt = outbox_delivery_attempt + CASE + WHEN inserted_delivery.delivery_status_code <> {DeliveryStatusCode.Postpone} THEN 1 + ELSE 0 + END + , outbox_delivery_error_id = inserted_delivery.delivery_error_id + , outbox_delivery_status_code = inserted_delivery.delivery_status_code + , outbox_delivery_status_message = inserted_delivery.delivery_status_message + , outbox_lock_expires_on = inserted_delivery.delivery_lock_expires_on + , outbox_delivery_created_at = inserted_delivery.delivery_created_at +FROM + inserted_delivery +WHERE + outbox_tenant = @tnt + AND outbox_part = @prt + AND outbox_created_at >= @from_date + AND outbox_transact_id = @tid + AND outbox_id = inserted_delivery.delivery_outbox_id +; + +"""; + } + + private static string BuildDeliveryInsertValues(int count) + { + List values = []; + int j = 0; + for (int i = 0; i < count; i++) + { + // @id_{i},@outbox_id_{i},@error_id_{i},@status_code_{i},@status_message_{i},@lock_expires_on_{i},@created_at_{i} + values.Add($" (@p{j++},@p{j++},@p{j++},@p{j++},@p{j++},@p{j++},@tid,@tnt,@prt,@p{j++})"); + } + return string.Join(",\r\n", values); + } + + + public string SqlExtendDelivery = +$""" +UPDATE {settings.GetQualifiedOutboxTableName()} +SET + outbox_lock_expires_on = @lock_expires_on +WHERE + outbox_tenant = @tenant AND outbox_part = @part AND outbox_created_at >= @from_date + AND outbox_payload_type = @payload_type + AND {s_InProcessing} + AND outbox_transact_id = @transact_id + AND outbox_lock_expires_on > @now +FOR UPDATE SKIP LOCKED +; +"""; + + + public string SqlFinishDelivery(int count) => SqlFinishDelivery(settings, count); + + + public string SqlCreateTypeTable + = +$""" +CREATE TABLE IF NOT EXISTS {settings.GetQualifiedTypeTableName()} +( + type_id BIGINT NOT NULL, + type_name TEXT NOT NULL, + CONSTRAINT "pk_{settings.DatabaseTypeTableName}" PRIMARY KEY (type_id) +) +; +"""; + + + public string SqlSelectType = $"SELECT * FROM {settings.GetQualifiedTypeTableName()}"; + + public string SqlInsertType = +$""" +INSERT INTO {settings.GetQualifiedTypeTableName()} + (type_id, type_name) +VALUES + (@type_id,@type_name) +ON CONFLICT DO NOTHING +; +"""; + + + private static string BuildErrorInsertValues(int count) + { + List values = []; + for (int i = 0; i < count; i++) + { + values.Add($" (@id_{i},@type_{i},@message_{i},@created_at_{i})"); + } + return string.Join(",\r\n", values); + } + + private static string SqlError(PgOutboxTableSettings settings, int count) => +$""" +INSERT INTO {settings.GetQualifiedErrorTableName()} + (error_id,error_type,error_message,error_created_at) +VALUES +{BuildErrorInsertValues(count)} +ON CONFLICT DO NOTHING +; +"""; + + public string SqlError(int count) => SqlError(settings, count); +} diff --git a/src/Sa.Outbox.PostgreSql/TypeHashResolve/IMsgTypeCache.cs b/src/Sa.Outbox.PostgreSql/TypeHashResolve/IMsgTypeCache.cs new file mode 100644 index 0000000..4c231c1 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/TypeHashResolve/IMsgTypeCache.cs @@ -0,0 +1,9 @@ + +namespace Sa.Outbox.PostgreSql.TypeHashResolve; + +internal interface IMsgTypeCache +{ + ValueTask GetCode(string typeName, CancellationToken cancellationToken); + ValueTask GetTypeName(long code, CancellationToken cancellationToken); + ValueTask Reset(CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/Sa.Outbox.PostgreSql/TypeHashResolve/IMsgTypeHashResolver.cs b/src/Sa.Outbox.PostgreSql/TypeHashResolve/IMsgTypeHashResolver.cs new file mode 100644 index 0000000..a2d4954 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/TypeHashResolve/IMsgTypeHashResolver.cs @@ -0,0 +1,7 @@ +namespace Sa.Outbox.PostgreSql.TypeHashResolve; + +internal interface IMsgTypeHashResolver +{ + ValueTask GetCode(string typeName, CancellationToken cancellationToken); + ValueTask GetTypeName(long typeCode, CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox.PostgreSql/TypeHashResolve/MsgTypeCache.cs b/src/Sa.Outbox.PostgreSql/TypeHashResolve/MsgTypeCache.cs new file mode 100644 index 0000000..32da900 --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/TypeHashResolve/MsgTypeCache.cs @@ -0,0 +1,77 @@ +using Sa.Outbox.PostgreSql.Repository; +using ZiggyCreatures.Caching.Fusion; + +namespace Sa.Outbox.PostgreSql.TypeHashResolve; + +internal sealed class MsgTypeCache( + IFusionCacheProvider cacheProvider + , IMsgTypeRepository repository + , PgOutboxCacheSettings cacheSettings) + : IDisposable, IMsgTypeCache +{ + internal static class Env + { + public const string CacheName = "sa-msgtype"; + } + + private readonly IFusionCache _cache = cacheProvider.GetCache(Env.CacheName); + + internal class Storage + { + private readonly Dictionary _hashType = []; + private readonly Dictionary _typeHash = []; + + internal Storage(List<(long id, string typeName)> hashCodes) + { + foreach (var (id, typeName) in hashCodes) + { + _hashType[id] = typeName; + _typeHash[typeName] = id; + } + } + + public long GetCode(string typeName) + { + if (_typeHash.TryGetValue(typeName, out var code)) return code; + return 0; + } + + public string? GetType(long code) + { + if (_hashType.TryGetValue(code, out var name)) return name; + return default; + } + } + + public async ValueTask GetCode(string typeName, CancellationToken cancellationToken) + { + var storage = await GetStorage(cancellationToken); + return storage.GetCode(typeName); + } + + public async ValueTask GetTypeName(long code, CancellationToken cancellationToken) + { + var storage = await GetStorage(cancellationToken); + return storage.GetType(code); + } + + public ValueTask Reset(CancellationToken cancellationToken) => _cache.RemoveAsync(Env.CacheName, token: cancellationToken); + + private ValueTask GetStorage(CancellationToken cancellationToken) + { + return _cache.GetOrSetAsync( + Env.CacheName + , async (context, t) => await Load(context, t) + , options: null + , token: cancellationToken); + } + + private async Task Load(FusionCacheFactoryExecutionContext context, CancellationToken cancellationToken) + { + List<(long id, string typeName)> hashCodes = await repository.SelectAll(cancellationToken); + context.Options.Duration = hashCodes.Count > 0 ? cacheSettings.CacheTypeDuration : TimeSpan.Zero; + return new Storage(hashCodes); + } + + public void Dispose() => _cache.Dispose(); +} diff --git a/src/Sa.Outbox.PostgreSql/TypeHashResolve/MsgTypeHashResolver.cs b/src/Sa.Outbox.PostgreSql/TypeHashResolve/MsgTypeHashResolver.cs new file mode 100644 index 0000000..2524f0a --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/TypeHashResolve/MsgTypeHashResolver.cs @@ -0,0 +1,44 @@ +using Sa.Extensions; +using Sa.Outbox.PostgreSql.Repository; + +namespace Sa.Outbox.PostgreSql.TypeHashResolve; + + +internal class MsgTypeHashResolver(IMsgTypeCache cache, IMsgTypeRepository repository) : IMsgTypeHashResolver +{ + private int _triggered = 0; + + public async ValueTask GetCode(string typeName, CancellationToken cancellationToken) + { + + long code = await cache.GetCode(typeName, cancellationToken); + + if (code != 0) return code; + + code = typeName.GetMurmurHash3(); + + if (Interlocked.CompareExchange(ref _triggered, 1, 0) == 1) return code; + + try + { + await repository.Insert(code, typeName, cancellationToken); + await cache.Reset(cancellationToken); + } + finally + { + Interlocked.CompareExchange(ref _triggered, 0, 1); + } + + return code; + } + + public async ValueTask GetTypeName(long typeCode, CancellationToken cancellationToken) + { + string? typeName = await cache.GetTypeName(typeCode, cancellationToken); + if (typeName != null) return typeName; + + await cache.Reset(cancellationToken); + + return await cache.GetTypeName(typeCode, cancellationToken) ?? typeCode.ToString(); + } +} diff --git a/src/Sa.Outbox.PostgreSql/TypeHashResolve/Setup.cs b/src/Sa.Outbox.PostgreSql/TypeHashResolve/Setup.cs new file mode 100644 index 0000000..9609aec --- /dev/null +++ b/src/Sa.Outbox.PostgreSql/TypeHashResolve/Setup.cs @@ -0,0 +1,23 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Data.Cache; + +namespace Sa.Outbox.PostgreSql.TypeHashResolve; + +internal static class Setup +{ + public static IServiceCollection AddMsgTypeHashResolver(this IServiceCollection services) + { + services.AddFusionCacheEx(MsgTypeCache.Env.CacheName, (sp, opts) => + { + PgOutboxCacheSettings cacheSettings = sp.GetRequiredService(); + opts.Duration = cacheSettings.CacheTypeDuration; + }); + + + services.TryAddSingleton(); + services.TryAddSingleton(); + + return services; + } +} \ No newline at end of file diff --git a/src/Sa.Outbox/Configuration/DeliveryBuilder.cs b/src/Sa.Outbox/Configuration/DeliveryBuilder.cs new file mode 100644 index 0000000..ca47401 --- /dev/null +++ b/src/Sa.Outbox/Configuration/DeliveryBuilder.cs @@ -0,0 +1,14 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Outbox.Job; + +namespace Sa.Outbox.Configuration; + +internal class DeliveryBuilder(IServiceCollection services) : IDeliveryBuilder +{ + public IDeliveryBuilder AddDelivery(Action? configure = null, int instanceCount = 1) + where TConsumer : class, IConsumer + { + services.AddDeliveryJob(configure, instanceCount); + return this; + } +} diff --git a/src/Sa.Outbox/Configuration/IDeliveryBuilder.cs b/src/Sa.Outbox/Configuration/IDeliveryBuilder.cs new file mode 100644 index 0000000..a8a19a0 --- /dev/null +++ b/src/Sa.Outbox/Configuration/IDeliveryBuilder.cs @@ -0,0 +1,19 @@ +namespace Sa.Outbox; + + +/// +/// Represents a builder for creating outbox deliveries. +/// +public interface IDeliveryBuilder +{ + /// + /// Adds a delivery for the specified consumer and message type. + /// + /// The type of consumer. + /// The type of message. + /// An optional action to configure the delivery settings. + /// The number of instances to create for the delivery. + /// The delivery builder instance. + IDeliveryBuilder AddDelivery(Action? configure = null, int instanceCount = 1) + where TConsumer : class, IConsumer; +} \ No newline at end of file diff --git a/src/Sa.Outbox/Configuration/IOutboxBuilder.cs b/src/Sa.Outbox/Configuration/IOutboxBuilder.cs new file mode 100644 index 0000000..7db49d8 --- /dev/null +++ b/src/Sa.Outbox/Configuration/IOutboxBuilder.cs @@ -0,0 +1,19 @@ +namespace Sa.Outbox; + +public interface IOutboxBuilder +{ + OutboxPublishSettings PublishSettings { get; } + /// + /// Configures the delivery settings for the outbox. + /// + /// An action to configure the delivery settings. + /// The current instance of the IOutboxSettingsBuilder. + IOutboxBuilder WithDeliveries(Action build); + + /// + /// Enables partitioning support for the outbox. + /// + /// An action to configure the partitioning settings. + /// The current instance of the IOutboxSettingsBuilder. + IOutboxBuilder WithPartitioningSupport(Action configure); +} \ No newline at end of file diff --git a/src/Sa.Outbox/Configuration/OutboxBuilder.cs b/src/Sa.Outbox/Configuration/OutboxBuilder.cs new file mode 100644 index 0000000..dfc1bda --- /dev/null +++ b/src/Sa.Outbox/Configuration/OutboxBuilder.cs @@ -0,0 +1,34 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Outbox.Delivery; +using Sa.Outbox.Partitional; +using Sa.Outbox.Publication; + +namespace Sa.Outbox.Configuration; + +internal class OutboxBuilder : IOutboxBuilder +{ + private readonly IServiceCollection services; + + public OutboxBuilder(IServiceCollection services) + { + this.services = services; + services.AddSaInfrastructure(); + services.AddMessagePublisher(); + services.TryAddSingleton(this.PublishSettings); + } + + public OutboxPublishSettings PublishSettings { get; } = new(); + + public IOutboxBuilder WithDeliveries(Action build) + { + services.AddOutboxDelivery(build); + return this; + } + + public IOutboxBuilder WithPartitioningSupport(Action configure) + { + services.AddPartitioningSupport(configure); + return this; + } +} diff --git a/src/Sa.Outbox/Configuration/OutboxSettings.cs b/src/Sa.Outbox/Configuration/OutboxSettings.cs new file mode 100644 index 0000000..e83632f --- /dev/null +++ b/src/Sa.Outbox/Configuration/OutboxSettings.cs @@ -0,0 +1,109 @@ +namespace Sa.Outbox; + + +/// +/// Settings for publishing messages from the Outbox. +/// +public class OutboxPublishSettings +{ + /// + /// The maximum batch size of messages to be sent at once. + /// Default value: 16. + /// for array pool size: 16, 32, 64, 128, 256, 512, 1024, 2048, 4096 + /// + public int MaxBatchSize { get; set; } = 64; +} + + +/// +/// Indicates that this is a configuration for message delivery in the Outbox. +/// +public class OutboxDeliverySettings(Guid jobId, int instanceIndex = 0) +{ + /// + /// Gets the unique identifier for the delivery job + /// + public Guid JobId => jobId; + /// + /// Indicates the index of the worker instance. + /// + public int WorkerInstanceIndex => instanceIndex; + /// + /// Gets the scheduling settings for the delivery job. + /// + public ScheduleSettings ScheduleSettings { get; } = new(); + /// + /// Gets the extraction settings for retrieving messages from the Outbox. + /// + public ExtractSettings ExtractSettings { get; } = new(); + /// + /// Gets the consumption settings for processing messages. + /// + public ConsumeSettings ConsumeSettings { get; } = new(); +} + +/// +/// Represents the scheduling settings for the delivery job. +/// +public class ScheduleSettings +{ + public string? Name { get; set; } + public TimeSpan ExecutionInterval { get; set; } = TimeSpan.FromMinutes(1); + + /// + /// job schedule delay before start + /// + public TimeSpan InitialDelay { get; set; } = TimeSpan.FromSeconds(10); + + public int RetryCountOnError { get; set; } = 2; +} + +/// +/// Represents the extraction settings for retrieving messages from the Outbox. +/// +public class ExtractSettings +{ + /// + /// Gets or sets the maximum size of the Outbox message batch for each database poll. + /// for array pool size: 16, 32, 64, 128, 256, 512, 1024 ... + /// + public int MaxBatchSize { get; set; } = 16; + /// + /// Message lock expiration time. + /// When a batch of messages for a bus instance is acquired, the messages will be locked (reserved) for that amount of time. + /// + public TimeSpan LockDuration { get; set; } = TimeSpan.FromSeconds(10); + + /// + /// How long before to request a lock renewal. + /// This should be much shorter than . + /// + public TimeSpan LockRenewal { get; set; } = TimeSpan.FromSeconds(3); + + /// + /// Repeat extract for each tenant + /// + /// + public bool ForEachTenant { get; set; } + + /// + /// select outbox messages for processing for the period + /// + public TimeSpan LookbackInterval { get; set; } = TimeSpan.FromDays(7); +} + +/// +/// Represents the consumption settings for processing messages from the Outbox. +/// +public class ConsumeSettings +{ + /// + /// The maximum number of delivery attempts before delivery will not be attempted again. + /// + public int MaxDeliveryAttempts { get; set; } = 3; + /// + /// The maximum number of messages that can take in part + /// default value + /// + public int? ConsumeBatchSize { get; set; } +} diff --git a/src/Sa.Outbox/Delivery/DeliveryCourier.cs b/src/Sa.Outbox/Delivery/DeliveryCourier.cs new file mode 100644 index 0000000..815e720 --- /dev/null +++ b/src/Sa.Outbox/Delivery/DeliveryCourier.cs @@ -0,0 +1,70 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Extensions; +using Sa.Outbox.Exceptions; + +namespace Sa.Outbox.Delivery; + +internal class DeliveryCourier(IServiceProvider serviceProvider) : IDeliveryCourier +{ + + public async ValueTask Deliver(IReadOnlyCollection> outboxMessages, int maxDeliveryAttempts, CancellationToken cancellationToken) + { + try + { + using IServiceScope scope = serviceProvider.CreateScope(); + IConsumer consumer = scope.ServiceProvider.GetRequiredService>(); + await consumer.Consume(outboxMessages, cancellationToken); + } + catch (Exception ex) when (!ex.IsCritical()) + { + HandleError(ex, outboxMessages); + } + + return PostHandle(outboxMessages, maxDeliveryAttempts); + } + + private static void HandleError(Exception error, IReadOnlyCollection> outboxMessages) + { + foreach (IOutboxContext item in outboxMessages) + { + // если не было обработанных ошибок до - то... + if (item.DeliveryResult.Code == 0) + { + // "Unknown delivery error." + // раскидываем ошибки в отложенную обработку от 10 до 45 мин + item.Error(error ?? new DeliveryException("Unknown delivery error."), postpone: GenTimeSpan.Next()); + } + } + } + + private static int PostHandle(IReadOnlyCollection> messages, int maxDeliveryAttempts) + { + int iOk = 0; + foreach (IOutboxContext message in messages) + { + if (message.DeliveryResult.Code >= 400 && message.DeliveryResult.Code < 500 && message.DeliveryInfo.Attempt + 1 > maxDeliveryAttempts) + { + Exception exception = message.Exception ?? new DeliveryPermanentException("Maximum delivery attempts exceeded", statusCode: 501); + + // Устанавливаем постоянную ошибку + message.PermanentError(exception, statusCode: 501); + } + else if (message.DeliveryResult.Code == 0) + { + message.Ok(); + iOk++; + } + } + + return iOk; + } + + + static class GenTimeSpan + { + public static TimeSpan Next() + { + return TimeSpan.FromSeconds(Random.Shared.Next(60 * 10, 60 * 45)); + } + } +} \ No newline at end of file diff --git a/src/Sa.Outbox/Delivery/DeliveryProcessor.cs b/src/Sa.Outbox/Delivery/DeliveryProcessor.cs new file mode 100644 index 0000000..5f107af --- /dev/null +++ b/src/Sa.Outbox/Delivery/DeliveryProcessor.cs @@ -0,0 +1,19 @@ +namespace Sa.Outbox.Delivery; + +internal class DeliveryProcessor(IDeliveryRelay relayService) : IDeliveryProcessor +{ + public async Task ProcessMessages(OutboxDeliverySettings settings, CancellationToken cancellationToken) + { + long count = 0; + bool runAgain; + do + { + int sentCount = await relayService.StartDelivery(settings, cancellationToken); + runAgain = sentCount > 0; + count += sentCount; + } + while (runAgain && !cancellationToken.IsCancellationRequested); + + return count; + } +} diff --git a/src/Sa.Outbox/Delivery/DeliveryRelay.cs b/src/Sa.Outbox/Delivery/DeliveryRelay.cs new file mode 100644 index 0000000..c3d4590 --- /dev/null +++ b/src/Sa.Outbox/Delivery/DeliveryRelay.cs @@ -0,0 +1,135 @@ +using Sa.Classes; +using Sa.Extensions; +using Sa.Host.MessageTypeResolver; +using Sa.Outbox.Partitional; +using Sa.Outbox.Publication; +using Sa.Outbox.Repository; +using Sa.Timing.Providers; + +namespace Sa.Outbox.Delivery; + +internal sealed class DeliveryRelay( + IDeliveryRepository repository + , IMessageTypeResolver typeResolver + , IArrayPoolFactory arrayPoolFactory + , IPartitionalSupportCache partCache + , ICurrentTimeProvider timeProvider + , IDeliveryCourier deliveryCourier + , PartitionalSettings? partitionalSettings = null + ) : IDeliveryRelay +{ + + private readonly bool _globalForEachTenant = partitionalSettings?.ForEachTenant ?? false; + + public async Task StartDelivery(OutboxDeliverySettings settings, CancellationToken cancellationToken) + { + IArrayPooler> arrayPooler = arrayPoolFactory.Create>(); + + int batchSize = settings.ExtractSettings.MaxBatchSize; + + if (batchSize == 0) return 0; + + OutboxDeliveryMessage[] buffer = arrayPooler.Rent(batchSize); + Memory> slice = buffer.AsMemory(0, batchSize); + try + { + if (_globalForEachTenant || settings.ExtractSettings.ForEachTenant) + { + int count = 0; + int[] tenantIds = await partCache.GetTenantIds(cancellationToken); + foreach (int tenantId in tenantIds) + { + count += await FillBuffer(slice, settings, tenantId, cancellationToken); + } + return count; + } + else + { + return await FillBuffer(slice, settings, 0, cancellationToken); + } + } + finally + { + arrayPooler.Return(buffer); + } + } + + private async Task FillBuffer(Memory> buffer, OutboxDeliverySettings settings, int tenantId, CancellationToken cancellationToken) + { + OutboxMessageFilter filter = CreateFilter(settings, tenantId); + + int locked = await repository.StartDelivery(buffer, settings.ExtractSettings.MaxBatchSize, settings.ExtractSettings.LockDuration, filter, cancellationToken); + if (locked == 0) return locked; + + buffer = buffer[..locked]; + + using IDisposable locker = KeepLocker.KeepLocked( + settings.ExtractSettings.LockRenewal + , async t => + { + filter = ExtendFilter(filter); + await repository.ExtendDelivery(settings.ExtractSettings.LockDuration, filter, t); + } + , cancellationToken: cancellationToken + ); + + // send msgs to consumer + return await DeliverBatches(buffer, settings, filter, cancellationToken); + } + + private OutboxMessageFilter CreateFilter(OutboxDeliverySettings settings, int tenantId) + { + OutboxMessageTypeInfo ti = OutboxMessageTypeHelper.GetOutboxMessageTypeInfo(); + DateTimeOffset fromDate = timeProvider.GetUtcNow().StartOfDay() - settings.ExtractSettings.LookbackInterval; + + return new OutboxMessageFilter( + GenTransactId() + , typeResolver.ToName() + , tenantId + , ti.PartName + , fromDate + , timeProvider.GetUtcNow() + ); + } + + private OutboxMessageFilter ExtendFilter(OutboxMessageFilter filter) + { + return new OutboxMessageFilter( + filter.TransactId + , filter.PayloadType + , filter.TenantId + , filter.Part + , filter.FromDate + , timeProvider.GetUtcNow() + ); + } + + private static string GenTransactId() => $"{Environment.MachineName}-{Guid.NewGuid():N}"; + + private async Task DeliverBatches(Memory> deliveryMessages, OutboxDeliverySettings settings, OutboxMessageFilter filter, CancellationToken cancellationToken) + { + int iOk = 0; + + foreach (IOutboxContext[] outboxMessages in deliveryMessages + .GetChunks(settings.ConsumeSettings.ConsumeBatchSize ?? settings.ExtractSettings.MaxBatchSize) + .Select(chunk + => chunk.Span.SelectWhere(dm + => new OutboxContext(dm, timeProvider)))) + { + if (cancellationToken.IsCancellationRequested) break; + + iOk += await DeliveryCourier(settings, filter, outboxMessages, cancellationToken); + } + + return iOk; + } + + private async Task DeliveryCourier(OutboxDeliverySettings settings, OutboxMessageFilter filter, IOutboxContext[] outboxMessages, CancellationToken cancellationToken) + { + int iOk = await deliveryCourier.Deliver(outboxMessages, settings.ConsumeSettings.MaxDeliveryAttempts, cancellationToken); + + await repository.FinishDelivery(outboxMessages, filter, cancellationToken); + + return iOk; + } +} diff --git a/src/Sa.Outbox/Delivery/IDeliveryCourier.cs b/src/Sa.Outbox/Delivery/IDeliveryCourier.cs new file mode 100644 index 0000000..f1e5f26 --- /dev/null +++ b/src/Sa.Outbox/Delivery/IDeliveryCourier.cs @@ -0,0 +1,6 @@ +namespace Sa.Outbox.Delivery; + +public interface IDeliveryCourier +{ + ValueTask Deliver(IReadOnlyCollection> outboxMessages, int maxDeliveryAttempts, CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox/Delivery/IDeliveryProcessor.cs b/src/Sa.Outbox/Delivery/IDeliveryProcessor.cs new file mode 100644 index 0000000..2a628d6 --- /dev/null +++ b/src/Sa.Outbox/Delivery/IDeliveryProcessor.cs @@ -0,0 +1,6 @@ +namespace Sa.Outbox.Delivery; + +public interface IDeliveryProcessor +{ + Task ProcessMessages(OutboxDeliverySettings settings, CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox/Delivery/IDeliveryRelay.cs b/src/Sa.Outbox/Delivery/IDeliveryRelay.cs new file mode 100644 index 0000000..ff4507f --- /dev/null +++ b/src/Sa.Outbox/Delivery/IDeliveryRelay.cs @@ -0,0 +1,6 @@ +namespace Sa.Outbox.Delivery; + +public interface IDeliveryRelay +{ + Task StartDelivery(OutboxDeliverySettings settings, CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox/Delivery/Setup.cs b/src/Sa.Outbox/Delivery/Setup.cs new file mode 100644 index 0000000..20f358c --- /dev/null +++ b/src/Sa.Outbox/Delivery/Setup.cs @@ -0,0 +1,25 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Outbox.Configuration; +using Sa.Outbox.Partitional; + +namespace Sa.Outbox.Delivery; + +internal static class Setup +{ + public static IServiceCollection AddOutboxDelivery(this IServiceCollection services, Action configure) + { + // looper - job processor + services.TryAddSingleton(); + // iteration - extract from repository & batch & send to courier + services.TryAddSingleton(); + // sender - sending to scope consumer + services.TryAddSingleton(); + // support - messaging to each tenant + services.TryAddSingleton(); + + configure.Invoke(new DeliveryBuilder(services)); + + return services; + } +} diff --git a/src/Sa.Outbox/DeliveryStatusCode.cs b/src/Sa.Outbox/DeliveryStatusCode.cs new file mode 100644 index 0000000..9886ece --- /dev/null +++ b/src/Sa.Outbox/DeliveryStatusCode.cs @@ -0,0 +1,61 @@ +namespace Sa.Outbox; + + + +public static class DeliveryStatusCode +{ + /// + /// Indicates that the message is pending and has not yet been processed. + /// + public const int Pending = 0; + + /// + /// Indicates that the message is currently being processed. + /// + public const int Processing = 100; + + /// + /// Indicates that the processing of the message has been postponed. + /// This may occur due to temporary conditions that prevent immediate processing. + /// + public const int Postpone = 103; + + /// + /// Indicates that the message has been processed successfully. + /// + public const int Ok = 200; + + /// + /// Indicates that the processing of the message has been aborted. + /// This may happen due to user intervention. + /// + public const int Aborted = 299; + + /// + /// Reserved for future use or specific status codes that may be defined later. + /// + public const int Status300 = 300; + + /// + /// Indicates that an error occurred during the processing of the message. + /// This may include various types of recoverable errors. + /// + public const int Error = 400; + + /// + /// Reserved for client-side errors that do not fall into other categories. + /// + public const int Status499 = 499; + + /// + /// Indicates that a permanent error has occurred, and the message cannot be processed. + /// + public const int PermanentError = 500; + + /// + /// Indicates a permanent error has occurred + /// - that the maximum number of processing attempts has been reached, + /// and the message will not be retried further. + /// + public const int MaximumAttemptsError = 501; +} diff --git a/src/Sa.Outbox/Exceptions/DeliveryException.cs b/src/Sa.Outbox/Exceptions/DeliveryException.cs new file mode 100644 index 0000000..ea99b3a --- /dev/null +++ b/src/Sa.Outbox/Exceptions/DeliveryException.cs @@ -0,0 +1,8 @@ +namespace Sa.Outbox.Exceptions; + +public class DeliveryException(string message, Exception? innerException = null, int statusCode = 400, TimeSpan? postponeAt = null) + : OutboxMessageException(message, innerException) +{ + public int StatusCode => statusCode; + public TimeSpan? PostponeAt => postponeAt; +} diff --git a/src/Sa.Outbox/Exceptions/DeliveryPermanentException.cs b/src/Sa.Outbox/Exceptions/DeliveryPermanentException.cs new file mode 100644 index 0000000..231da5d --- /dev/null +++ b/src/Sa.Outbox/Exceptions/DeliveryPermanentException.cs @@ -0,0 +1,6 @@ +namespace Sa.Outbox.Exceptions; + +public class DeliveryPermanentException(string message, Exception? innerException = null, int statusCode = 500) + : DeliveryException(message, innerException, statusCode) +{ +} diff --git a/src/Sa.Outbox/Exceptions/OutboxMessageException.cs b/src/Sa.Outbox/Exceptions/OutboxMessageException.cs new file mode 100644 index 0000000..27a5e86 --- /dev/null +++ b/src/Sa.Outbox/Exceptions/OutboxMessageException.cs @@ -0,0 +1,16 @@ +namespace Sa.Outbox.Exceptions; + +public class OutboxMessageException : Exception +{ + public OutboxMessageException() + { + } + + public OutboxMessageException(string message) : base(message) + { + } + + public OutboxMessageException(string message, Exception? innerException) : base(message, innerException) + { + } +} diff --git a/src/Sa.Outbox/GlobalSuppressions.cs b/src/Sa.Outbox/GlobalSuppressions.cs new file mode 100644 index 0000000..5935e67 --- /dev/null +++ b/src/Sa.Outbox/GlobalSuppressions.cs @@ -0,0 +1,9 @@ +// This file is used by Code Analysis to maintain SuppressMessage +// attributes that are applied to this project. +// Project-level suppressions either have no target or are given +// a specific target and scoped to a namespace, type, member, etc. + +using System.Diagnostics.CodeAnalysis; + +[assembly: SuppressMessage("Style", "IDE0130:Namespace does not match folder structure")] +[assembly: SuppressMessage("Minor Code Smell", "S3267:Loops should be simplified with \"LINQ\" expressions", Justification = "")] diff --git a/src/Sa.Outbox/IConsumer.cs b/src/Sa.Outbox/IConsumer.cs new file mode 100644 index 0000000..29c0f36 --- /dev/null +++ b/src/Sa.Outbox/IConsumer.cs @@ -0,0 +1,25 @@ +namespace Sa.Outbox; + +/// +/// Represents a consumer interface for processing Outbox messages of a specific type. +/// +/// The type of the message being consumed. +public interface IConsumer : IConsumer +{ + /// + /// Consumes a collection of Outbox messages. + /// This method processes the provided messages asynchronously. + /// + /// A read-only collection of Outbox contexts containing messages to be consumed. + /// A cancellation token to signal the operation's cancellation. + /// A task representing the asynchronous operation. + ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken); +} + +/// +/// Represents a base consumer interface for processing Outbox messages. +/// This interface can be extended by specific consumer implementations. +/// +public interface IConsumer +{ +} \ No newline at end of file diff --git a/src/Sa.Outbox/IOutboxMessagePluginPublisher.cs b/src/Sa.Outbox/IOutboxMessagePluginPublisher.cs new file mode 100644 index 0000000..8043e19 --- /dev/null +++ b/src/Sa.Outbox/IOutboxMessagePluginPublisher.cs @@ -0,0 +1,31 @@ +using Sa.Outbox.Support; + +namespace Sa.Outbox; + +/// +/// Defines a contract for publishing outbox messages. +/// +public interface IOutboxMessagePublisher +{ + /// + /// Publishes a collection of messages. + /// + /// The type of the messages to be published, which must implement . + /// A collection of messages to be published. + /// A token to monitor for cancellation requests. + /// A representing the asynchronous operation, + /// with the number of successfully published messages as the result. + ValueTask Publish(IReadOnlyCollection messages, CancellationToken cancellationToken = default) + where TMessage : IOutboxPayloadMessage; + + /// + /// Publishes a single message. + /// + /// The type of the message to be published, which must implement . + /// The message to be published. + /// A token to monitor for cancellation requests. + /// A representing the asynchronous operation, + /// with the number of successfully published messages as the result. + ValueTask Publish(TMessage messages, CancellationToken cancellationToken = default) + where TMessage : IOutboxPayloadMessage => Publish([messages], cancellationToken); +} \ No newline at end of file diff --git a/src/Sa.Outbox/Job/DeliveryJob.cs b/src/Sa.Outbox/Job/DeliveryJob.cs new file mode 100644 index 0000000..505fa6a --- /dev/null +++ b/src/Sa.Outbox/Job/DeliveryJob.cs @@ -0,0 +1,19 @@ +using Sa.Outbox.Delivery; +using Sa.Schedule; + +namespace Sa.Outbox.Job; + + +public interface IDeliveryJob: IJob; + + +internal class DeliveryJob(IDeliveryProcessor processor) : IDeliveryJob +{ + public async Task Execute(IJobContext context, CancellationToken cancellationToken) + { + OutboxDeliverySettings settings = context.Settings.Properties.Tag as OutboxDeliverySettings + ?? throw new NotImplementedException("tag"); + + await processor.ProcessMessages(settings, cancellationToken); + } +} diff --git a/src/Sa.Outbox/Job/IOutboxJobInterceptor.cs b/src/Sa.Outbox/Job/IOutboxJobInterceptor.cs new file mode 100644 index 0000000..b073968 --- /dev/null +++ b/src/Sa.Outbox/Job/IOutboxJobInterceptor.cs @@ -0,0 +1,7 @@ +using Sa.Schedule; + +namespace Sa.Outbox.Job; + +public interface IOutboxJobInterceptor : IJobInterceptor +{ +} \ No newline at end of file diff --git a/src/Sa.Outbox/Job/OutboxJobInterceptor.cs b/src/Sa.Outbox/Job/OutboxJobInterceptor.cs new file mode 100644 index 0000000..1c12f46 --- /dev/null +++ b/src/Sa.Outbox/Job/OutboxJobInterceptor.cs @@ -0,0 +1,21 @@ +using Sa.Schedule; + +namespace Sa.Outbox.Job; + +internal class OutboxJobInterceptor(IEnumerable interceptors) : IJobInterceptor +{ + public async Task OnHandle(IJobContext context, Func next, object? key, CancellationToken cancellationToken) + { + bool hasInterceptors = false; + foreach (IOutboxJobInterceptor item in interceptors) + { + hasInterceptors = true; + await item.OnHandle(context, next, key, cancellationToken); + } + + if (!hasInterceptors) + { + await next(); + } + } +} diff --git a/src/Sa.Outbox/Job/Setup.cs b/src/Sa.Outbox/Job/Setup.cs new file mode 100644 index 0000000..59f9e44 --- /dev/null +++ b/src/Sa.Outbox/Job/Setup.cs @@ -0,0 +1,56 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Schedule; + +namespace Sa.Outbox.Job; + +internal static class Setup +{ + public static IServiceCollection AddDeliveryJob(this IServiceCollection services, Action? сonfigure, int intstanceCount) + where TConsumer : class, IConsumer + { + services.TryAddScoped, TConsumer>(); + + if (intstanceCount > 0) + { + AddSchedule(services, сonfigure, intstanceCount); + } + + return services; + } + + private static void AddSchedule(IServiceCollection services, Action? сonfigure, int intstanceCount) + where TConsumer : class, IConsumer + { + services.AddSchedule(builder => + { + builder.UseHostedService(); + + for (int i = 0; i < intstanceCount; i++) + { + Guid jobId = Guid.NewGuid(); + + builder.AddJob>((sp, jobBuilder) => + { + var settings = new OutboxDeliverySettings(jobId, i); + сonfigure?.Invoke(sp, settings); + + ScheduleSettings scheduleSettings = settings.ScheduleSettings; + + jobBuilder + .EveryTime(scheduleSettings.ExecutionInterval) + .WithInitialDelay(scheduleSettings.InitialDelay) + .WithTag(settings) + .WithName(scheduleSettings.Name ?? typeof(TConsumer).Name) + .ConfigureErrorHandling(c => c + .IfErrorRetry(scheduleSettings.RetryCountOnError) + .ThenCloseApplication()) + ; + + }, jobId); + } + + builder.AddInterceptor(); + }); + } +} diff --git a/src/Sa.Outbox/Partitional/IOutboxPartitionalSupport.cs b/src/Sa.Outbox/Partitional/IOutboxPartitionalSupport.cs new file mode 100644 index 0000000..ca97d4c --- /dev/null +++ b/src/Sa.Outbox/Partitional/IOutboxPartitionalSupport.cs @@ -0,0 +1,25 @@ + +namespace Sa.Outbox.Partitional; + +/// +/// Represents a pair of tenant identifier and part information in the Outbox system. +/// This record is used to associate a tenant with a specific part of the Outbox message. +/// +/// The unique identifier for the tenant. +/// The part identifier associated with the tenant. +public record struct OutboxTenantPartPair(int TenantId, string Part); + +/// +/// Represents an interface for supporting partitioning in the Outbox processing system. +/// This interface defines a method for retrieving tenant-part pairs. +/// +public interface IOutboxPartitionalSupport +{ + /// + /// Asynchronously retrieves a collection of tenant-part pairs. + /// This method can be used to get the current mapping of tenants to their respective parts. + /// + /// A cancellation token to signal the operation's cancellation. + /// A task representing the asynchronous operation, containing a read-only collection of . + Task> GetPartValues(CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox/Partitional/IPartitionalSupportCache.cs b/src/Sa.Outbox/Partitional/IPartitionalSupportCache.cs new file mode 100644 index 0000000..0567f38 --- /dev/null +++ b/src/Sa.Outbox/Partitional/IPartitionalSupportCache.cs @@ -0,0 +1,15 @@ + +namespace Sa.Outbox.Partitional; + +/// +/// Represents a cache that provides partitional support. +/// +public interface IPartitionalSupportCache +{ + /// + /// Retrieves an array of tenant IDs from the cache. + /// + /// A token to cancel the operation. + /// A task that represents the asynchronous operation, containing an array of tenant IDs. + ValueTask GetTenantIds(CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox/Partitional/OutboxPartitionalSupport.cs b/src/Sa.Outbox/Partitional/OutboxPartitionalSupport.cs new file mode 100644 index 0000000..f44b405 --- /dev/null +++ b/src/Sa.Outbox/Partitional/OutboxPartitionalSupport.cs @@ -0,0 +1,64 @@ +using Sa.Outbox.Job; +using Sa.Outbox.Publication; +using Sa.Schedule; + +namespace Sa.Outbox.Partitional; + +internal class OutboxPartitionalSupport(IScheduleSettings scheduleSettings, PartitionalSettings partSettings) : IOutboxPartitionalSupport +{ + private readonly Lazy s_lazyParts = new(() => + { + Type baseType = typeof(DeliveryJob<>); + + IEnumerable jobSettings = scheduleSettings.GetJobSettings(); + + string[] parts = jobSettings + .Select(c => GetMessageTypeIfInheritsFromDeliveryJob(c.JobType, baseType)) + .Where(mt => mt != null) + .Cast() + .Select(mt => OutboxMessageTypeHelper.GetOutboxMessageTypeInfo(mt).PartName) + .Distinct() + .ToArray(); + + return parts; + }); + + public async Task> GetPartValues(CancellationToken cancellationToken) + { + string[] parts = s_lazyParts.Value; + + if (parts.Length == 0) return []; + + if (partSettings?.GetTenantIds == null) return []; + + + int[] tenantIds = await partSettings.GetTenantIds(cancellationToken); + if (tenantIds.Length == 0) return []; + + List result = []; + + foreach (int tenantId in tenantIds) + { + foreach (string part in parts) + { + result.Add(new OutboxTenantPartPair(tenantId, part)); + } + } + + return result; + } + + + private static Type? GetMessageTypeIfInheritsFromDeliveryJob(Type type, Type baseType) + { + if (!baseType.IsGenericTypeDefinition) return null; + + if (type.IsGenericType && type.GetGenericTypeDefinition() == baseType) + return type.GenericTypeArguments[0]; + + if (type.BaseType != null) + return GetMessageTypeIfInheritsFromDeliveryJob(type.BaseType, baseType); + + return null; + } +} diff --git a/src/Sa.Outbox/Partitional/PartitionalSettings.cs b/src/Sa.Outbox/Partitional/PartitionalSettings.cs new file mode 100644 index 0000000..bcb3b03 --- /dev/null +++ b/src/Sa.Outbox/Partitional/PartitionalSettings.cs @@ -0,0 +1,26 @@ +namespace Sa.Outbox; + +/// +/// Represents the settings for partitioning in the Outbox processing system. +/// This class contains configuration options related to tenant handling and caching. +/// +public class PartitionalSettings +{ + /// + /// Gets or sets a value indicating whether to process messages for each tenant individually. + /// Default is set to true, meaning messages will be processed for each tenant. + /// + public bool ForEachTenant { get; set; } = true; + + /// + /// Gets or sets the duration for which tenant IDs are cached. + /// Default is set to 2 minutes. + /// + public TimeSpan CacheTenantIdsDuration { get; set; } = TimeSpan.FromMinutes(2); + + /// + /// Gets or sets a function that retrieves tenant IDs asynchronously. + /// This function takes a as a parameter and returns an array of tenant IDs. + /// + public Func>? GetTenantIds { get; set; } +} \ No newline at end of file diff --git a/src/Sa.Outbox/Partitional/PartitionalSupportCache.cs b/src/Sa.Outbox/Partitional/PartitionalSupportCache.cs new file mode 100644 index 0000000..09420a6 --- /dev/null +++ b/src/Sa.Outbox/Partitional/PartitionalSupportCache.cs @@ -0,0 +1,34 @@ +using ZiggyCreatures.Caching.Fusion; + +namespace Sa.Outbox.Partitional; + +internal class PartitionalSupportCache(IFusionCacheProvider cacheProvider, PartitionalSettings? settings = null) : IPartitionalSupportCache +{ + internal static class Env + { + public const string CacheName = "sa-outbox"; + public const string KeyGetTenantIds = "sa-tenant-ids"; + } + + private readonly IFusionCache _cache = cacheProvider.GetCache(Env.CacheName); + + + public async ValueTask GetTenantIds(CancellationToken cancellationToken) + { + if (settings == null) return []; + + return await _cache.GetOrSetAsync( + Env.KeyGetTenantIds + , ExtractTenantIds + , options: null + , token: cancellationToken); + } + + private async Task ExtractTenantIds(FusionCacheFactoryExecutionContext context, CancellationToken cancellationToken) + { + if (settings?.GetTenantIds == null) return []; + int[] ids = await settings.GetTenantIds(cancellationToken); + context.Options.Duration = settings.CacheTenantIdsDuration; + return ids; + } +} diff --git a/src/Sa.Outbox/Partitional/Setup.cs b/src/Sa.Outbox/Partitional/Setup.cs new file mode 100644 index 0000000..c0fd787 --- /dev/null +++ b/src/Sa.Outbox/Partitional/Setup.cs @@ -0,0 +1,27 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +using Sa.Data.Cache; + +namespace Sa.Outbox.Partitional; + +internal static class Setup +{ + public static IServiceCollection AddPartitioningSupport(this IServiceCollection services, Action configure) + { + services.TryAddSingleton(); + + services.TryAddSingleton(sp => + { + PartitionalSettings settings = new(); + configure.Invoke(sp, settings); + return settings; + }); + + services.AddFusionCacheEx(PartitionalSupportCache.Env.CacheName); + + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/Sa.Outbox/Publication/OutboxMessagePublisher.cs b/src/Sa.Outbox/Publication/OutboxMessagePublisher.cs new file mode 100644 index 0000000..866b15d --- /dev/null +++ b/src/Sa.Outbox/Publication/OutboxMessagePublisher.cs @@ -0,0 +1,71 @@ +using Sa.Classes; +using Sa.Host.MessageTypeResolver; +using Sa.Outbox.Support; +using Sa.Timing.Providers; + +namespace Sa.Outbox.Publication; + +internal class OutboxMessagePublisher( + ICurrentTimeProvider timeProvider, + IMessageTypeResolver typeResolver, + IArrayPoolFactory poolFactory, + IOutboxRepository outboxRepository, + OutboxPublishSettings publishSettings +) : IOutboxMessagePublisher +{ + public async ValueTask Publish(IReadOnlyCollection messages, CancellationToken cancellationToken = default) + where TMessage : IOutboxPayloadMessage + { + if (messages.Count == 0) return 0; + return await Send(messages, cancellationToken); + } + + private async ValueTask Send(IReadOnlyCollection messages, CancellationToken cancellationToken) + where TMessage : IOutboxPayloadMessage + { + OutboxMessageTypeInfo typeInfo = OutboxMessageTypeHelper.GetOutboxMessageTypeInfo(); + DateTimeOffset now = timeProvider.GetUtcNow(); + string payloadType = typeResolver.ToName(); + int maxBatchSize = publishSettings.MaxBatchSize; + IArrayPooler> pooler = poolFactory.Create>(); + IEnumerator enumerator = messages.GetEnumerator(); + + ulong sent = 0; + int start = 0; + do + { + int len = (start + maxBatchSize < messages.Count) + ? maxBatchSize + : messages.Count - start; + + OutboxMessage[] payloads = pooler.Rent(len); + try + { + int i = 0; + while (i < len && enumerator.MoveNext()) + { + TMessage message = enumerator.Current; + + payloads[i] = new OutboxMessage( + PayloadId: message.PayloadId ?? string.Empty, + Payload: message, + PartInfo: new OutboxPartInfo(TenantId: message.TenantId, typeInfo.PartName, now)); + + i++; + } + + sent += await outboxRepository.Save(payloadType, payloads.AsMemory()[..len], cancellationToken); + } + finally + { + pooler.Return(payloads); + } + + start += len; + } + while (start < messages.Count); + + return sent; + + } +} diff --git a/src/Sa.Outbox/Publication/OutboxMessageTypeHelper.cs b/src/Sa.Outbox/Publication/OutboxMessageTypeHelper.cs new file mode 100644 index 0000000..779dcb7 --- /dev/null +++ b/src/Sa.Outbox/Publication/OutboxMessageTypeHelper.cs @@ -0,0 +1,29 @@ +using Sa.Outbox.Support; +using System.Collections.Concurrent; +using System.Reflection; + +namespace Sa.Outbox.Publication; + +internal record OutboxMessageTypeInfo(string PartName); + +internal static class OutboxMessageTypeHelper +{ + private static readonly ConcurrentDictionary s_cache = new(); + + public static OutboxMessageTypeInfo GetOutboxMessageTypeInfo(Type type) + => s_cache.GetOrAdd(type, GetTypeInfo); + + public static OutboxMessageTypeInfo GetOutboxMessageTypeInfo() + => s_cache.GetOrAdd(typeof(T), GetTypeInfo); + + private static OutboxMessageTypeInfo GetTypeInfo(Type typeToCheck) + { + return new OutboxMessageTypeInfo(PartName: GetPartValue(typeToCheck)); + } + + private static string GetPartValue(Type type) + { + OutboxMessageAttribute? attribute = type.GetCustomAttribute(); + return attribute?.Part ?? OutboxMessageAttribute.Default.Part; + } +} diff --git a/src/Sa.Outbox/Publication/Setup.cs b/src/Sa.Outbox/Publication/Setup.cs new file mode 100644 index 0000000..df1ea50 --- /dev/null +++ b/src/Sa.Outbox/Publication/Setup.cs @@ -0,0 +1,13 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace Sa.Outbox.Publication; + +internal static class Setup +{ + public static IServiceCollection AddMessagePublisher(this IServiceCollection services) + { + services.TryAddSingleton(); + return services; + } +} diff --git a/src/Sa.Outbox/Readme.md b/src/Sa.Outbox/Readme.md new file mode 100644 index 0000000..d952527 --- /dev/null +++ b/src/Sa.Outbox/Readme.md @@ -0,0 +1,255 @@ +# Outbox + +Базовая локика + абстракции предназначеные для реализации паттерна Outbox, с поддержкой партиционирования. + +## Базовый интерфейс для определения Outbox сообщения + +```csharp +/// +/// Represents a message payload in the Outbox system. +/// +public interface IOutboxPayloadMessage +{ + /// + /// Gets the unique identifier for the payload. + /// + string PayloadId { get; } + + /// + /// Gets the identifier for the tenant associated with the payload. + /// + public int TenantId { get; } +} + + +// example message + +[OutboxMessage(part:"some_part")] // задаем партицию +public record SomeMessage(long Payload) : IOutboxPayloadMessage +{ + public string PayloadId => String.Empty; + public int TenantId => 0; +} +``` + + + +## Основные интерфейсы по работе с сообщениями + +### Публикация сообщения + +```csharp +/// +/// Defines a contract for publishing outbox messages. +/// +public interface IOutboxMessagePublisher +{ + /// + /// Publishes a collection of messages. + /// + ValueTask Publish(IReadOnlyCollection messages, CancellationToken cancellationToken = default) + where TMessage : IOutboxPayloadMessage; + + /// + /// Publishes a single message. + /// + ValueTask Publish(TMessage messages, CancellationToken cancellationToken = default) + where TMessage : IOutboxPayloadMessage => Publish(new[] { messages }, cancellationToken); +} +``` + +### Сохранение в хранилище + +```csharp +public interface IOutboxRepository +{ + ValueTask Save(string payloadType, ReadOnlyMemory> messages, CancellationToken cancellationToken = default); +} +``` + + +### Доставка до потребителя + +```csharp +public interface IDeliveryRepository +{ + /// + /// Exclusively take for processing for the client + /// + Task StartDelivery(Memory> writeBuffer, int batchSize, TimeSpan lockDuration, OutboxMessageFilter filter, CancellationToken cancellationToken); + + /// + /// Complete the delivery + /// + Task FinishDelivery(IOutboxContext[] outboxMessages, OutboxMessageFilter filter, CancellationToken cancellationToken); + + /// + /// Extend the delivery (retain the lock for the client) + /// + Task ExtendDelivery(TimeSpan lockExpiration, OutboxMessageFilter filter, CancellationToken cancellationToken); +} +``` + + +### Поддержка партиций +```csharp +/// +/// Represents a pair of tenant identifier and part information in the Outbox system. +/// This record is used to associate a tenant with a specific part of the Outbox message. +/// +/// The unique identifier for the tenant. +/// The part identifier associated with the tenant. +public record struct OutboxTenantPartPair(int TenantId, string Part); + +/// +/// Represents an interface for supporting partitioning in the Outbox processing system. +/// This interface defines a method for retrieving tenant-part pairs. +/// +public interface IOutboxPartitionalSupport +{ + /// + /// Asynchronously retrieves a collection of tenant-part pairs. + /// This method can be used to get the current mapping of tenants to their respective parts. + /// + Task> GetPartValues(CancellationToken cancellationToken); +} +``` + +### Потребитель сообщений +```csharp +/// +/// Represents a consumer interface for processing Outbox messages of a specific type. +/// +public interface IConsumer : IConsumer +{ + /// + /// Consumes a collection of Outbox messages. + /// + ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken); +} + +/// +/// Represents a base consumer interface for processing Outbox messages. +/// This interface can be extended by specific consumer implementations. +/// +public interface IConsumer +{ +} +``` + + + +## Примеры Outbox с использованием PostgreSQL + +### Пример конфигурирования + +```csharp +using Microsoft.Extensions.DependencyInjection; +using Sa.Outbox; +using Sa.Outbox.PostgreSql; + +public class Startup +{ + public void ConfigureServices(IServiceCollection services) + { + // Конфигурация Outbox + services.AddOutbox(builder => + { + // Настройка доставки + builder.WithDeliveries(deliveryBuilder => + { + // Добавить доставку (подкл. потребителя сообщений с типом - MyMessage) + deliveryBuilder.AddDelivery(); + }); + + // Настройка поддержки для работы с партициями + builder.WithPartitioningSupport((serviceProvider, partSettings) => + { + // Пример настройки для обработки сообщений для каждого арендатора + partSettings.ForEachTenant = true; + + // Возвращаем список тенантов для app + partSettings.GetTenantIds = async cancellationToken => + { + // Логика получения идентификаторов арендаторов + return await Task.FromResult(new int[] { 1, 2 }); + }; + }); + }); + + // Подключение Outbox с использованием PostgreSQL + services.AddOutboxUsingPostgreSql(cfg => + { + // коннекшен к БД + cfg.AddDataSource(c => c.WithConnectionString("Host=my_host;Database=my_db;Username=my_user;Password=my_password")); + + // настройки для работы Pg + cfg.WithPgOutboxSettings((_, settings) => + { + // Установка схемы базы данных + settings.TableSettings.DatabaseSchemaName = "public"; + + // Настройка очистки + settings.CleanupSettings.DropPartsAfterRetention = TimeSpan.FromDays(30); + }); + }); + } +} +``` + +### Пример отправки сообщения + +```csharp + +public class MessageSender(IOutboxMessagePublisher publisher) +{ + public async Task SendMessagesAsync(CancellationToken cancellationToken) + { + // Создание списка сообщений для отправки + var messages = new List + { + new MyMessage { PayloadId = Guid.NewGuid().ToString(), Content = "Hello, World!", TenantId = 1 }, + new MyMessage { PayloadId = Guid.NewGuid().ToString(), Content = "Another message", TenantId = 2 } + }; + + // Отправка сообщений через Outbox + ulong result = await publisher.Publish(messages, cancellationToken); + + Console.WriteLine($"Sent {result} messages."); + } +} +``` + + +### Пример потребления сообщений + +```csharp +using Sa.Outbox; + +namespace MyNamespace +{ + // Пример сообщения, которое будет отправляться через Outbox + [OutboxMessage] + public record MyMessage(string PayloadId, string Content) : IOutboxPayloadMessage + { + public int TenantId { get; init; } // Идентификатор арендатора + } + + // Пример потребителя, который будет обрабатывать сообщения MyMessage + public class MyMessageConsumer : IConsumer + { + public async ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken) + { + foreach (var messageContext in outboxMessages) + { + // Логика обработки сообщения + Console.WriteLine($"Processing message with ID: {messageContext.Payload.PayloadId} and Content: {messageContext.Payload.Content}"); + + // Успешная обработка сообщения + messageContext.Ok("Message processed successfully."); + } + } + } +} +``` + diff --git a/src/Sa.Outbox/Repositories/IDeliveryRepository.cs b/src/Sa.Outbox/Repositories/IDeliveryRepository.cs new file mode 100644 index 0000000..da83125 --- /dev/null +++ b/src/Sa.Outbox/Repositories/IDeliveryRepository.cs @@ -0,0 +1,22 @@ +namespace Sa.Outbox; + + +/// +/// needed External implementation +/// +public interface IDeliveryRepository +{ + /// + /// Exclusively take for processing for the client + /// + Task StartDelivery(Memory> writeBuffer, int batchSize, TimeSpan lockDuration, OutboxMessageFilter filter, CancellationToken cancellationToken); + /// + /// Complete the delivery + /// + Task FinishDelivery(IOutboxContext[] outboxMessages, OutboxMessageFilter filter, CancellationToken cancellationToken); + + /// + /// Extend the delivery (retain the lock for the client) + /// + Task ExtendDelivery(TimeSpan lockExpiration, OutboxMessageFilter filter, CancellationToken cancellationToken); +} diff --git a/src/Sa.Outbox/Repositories/IOutboxContext.cs b/src/Sa.Outbox/Repositories/IOutboxContext.cs new file mode 100644 index 0000000..9568d74 --- /dev/null +++ b/src/Sa.Outbox/Repositories/IOutboxContext.cs @@ -0,0 +1,93 @@ +namespace Sa.Outbox; + + +/// +/// Represents the context for an Outbox message processing operation. +/// This interface provides information about the message, its delivery status, and methods to update the status. +/// +public interface IOutboxContext +{ + /// + /// Gets the unique identifier for the Outbox message. + /// + string OutboxId { get; } + + /// + /// Gets information about the part of the Outbox message being processed. + /// + OutboxPartInfo PartInfo { get; } + + /// + /// Gets information about the delivery of the Outbox message. + /// + OutboxDeliveryInfo DeliveryInfo { get; } + + /// + /// Gets the result of the delivery attempt. + /// + DeliveryStatus DeliveryResult { get; } + + /// + /// Gets any exception that occurred during the processing of the message. + /// + Exception? Exception { get; } + + /// + /// Gets the duration for which the message processing is postponed. + /// + TimeSpan PostponeAt { get; } + + /// + /// Marks the message processing as successful. + /// + /// An optional message providing additional context. + /// The current Outbox context. + IOutboxContext Ok(string? message = null); + + /// + /// Marks the message processing as aborted. + /// + /// An optional message providing additional context. + /// The current Outbox context. + IOutboxContext Aborted(string? message = null); + + /// + /// Marks the message processing as an error. + /// + /// The exception that occurred during processing. + /// An optional message providing additional context. + /// The status code associated with the error. + /// An optional duration to postpone processing. + /// The current Outbox context. + IOutboxContext Error(Exception exception, string? message = null, int statusCode = DeliveryStatusCode.Error, TimeSpan? postpone = null); + + /// + /// Marks the message processing as a permanent error. + /// + /// The exception that occurred during processing. + /// An optional message providing additional context. + /// The status code associated with the permanent error. + /// The current Outbox context. + IOutboxContext PermanentError(Exception exception, string? message = null, int statusCode = DeliveryStatusCode.PermanentError); + + /// + /// Marks the message processing as postponed. + /// + /// The duration to postpone processing. + /// An optional message providing additional context. + /// The current Outbox context. + IOutboxContext Postpone(TimeSpan postpone, string? message = null); +} + +/// +/// Represents the context for an Outbox message processing operation with a specific message type. +/// This interface extends the to include the message payload. +/// +/// The type of the message being processed. +public interface IOutboxContext : IOutboxContext +{ + /// + /// Gets the payload of the Outbox message being processed. + /// + TMessage Payload { get; } +} \ No newline at end of file diff --git a/src/Sa.Outbox/Repositories/IOutboxRepository.cs b/src/Sa.Outbox/Repositories/IOutboxRepository.cs new file mode 100644 index 0000000..d4d6617 --- /dev/null +++ b/src/Sa.Outbox/Repositories/IOutboxRepository.cs @@ -0,0 +1,17 @@ +namespace Sa.Outbox; + +/// +/// Represents a repository for storing outbox messages. +/// +public interface IOutboxRepository +{ + /// + /// Saves a collection of outbox messages to the repository. + /// + /// The type of message being saved. + /// The type of payload being saved. + /// The collection of outbox messages to save. + /// A token to cancel the operation. + /// A task representing the asynchronous operation, containing the number of messages saved. + ValueTask Save(string payloadType, ReadOnlyMemory> messages, CancellationToken cancellationToken = default); +} \ No newline at end of file diff --git a/src/Sa.Outbox/Repositories/OutboxContext.cs b/src/Sa.Outbox/Repositories/OutboxContext.cs new file mode 100644 index 0000000..c8ea2c7 --- /dev/null +++ b/src/Sa.Outbox/Repositories/OutboxContext.cs @@ -0,0 +1,65 @@ +using Sa.Outbox.Exceptions; +using Sa.Timing.Providers; + + +namespace Sa.Outbox.Repository; + +/// +/// OutboxMessage +/// +internal class OutboxContext(OutboxDeliveryMessage delivery, ICurrentTimeProvider timeProvider) : IOutboxContext +{ + public string OutboxId { get; } = delivery.OutboxId; + public OutboxPartInfo PartInfo { get; } = delivery.PartInfo; + + + public string PayloadId { get; } = delivery.PayloadId; + public TMessage Payload { get; } = delivery.Payload; + + + public OutboxDeliveryInfo DeliveryInfo { get; } = delivery.DeliveryInfo; + + + public DeliveryStatus DeliveryResult { get; private set; } + public TimeSpan PostponeAt { get; private set; } + public Exception? Exception { get; private set; } + + public IOutboxContext PermanentError(Exception exception, string? message = null, int statusCode = DeliveryStatusCode.PermanentError) + { + return Error(exception, message, statusCode); + } + + public IOutboxContext Error(Exception exception, string? message = null, int statusCode = DeliveryStatusCode.Error, TimeSpan? postpone = null) + { + DeliveryException? deliveryException = exception as DeliveryException; + + DeliveryResult = new DeliveryStatus(deliveryException?.StatusCode ?? statusCode, message ?? exception.Message, timeProvider.GetUtcNow()); + Exception = exception; + PostponeAt = postpone ?? deliveryException?.PostponeAt ?? TimeSpan.Zero; + return this; + } + + public IOutboxContext Ok(string? message = null) + { + DeliveryResult = new DeliveryStatus(DeliveryStatusCode.Ok, message ?? string.Empty, timeProvider.GetUtcNow()); + Exception = null; + PostponeAt = TimeSpan.Zero; + return this; + } + + public IOutboxContext Postpone(TimeSpan postpone, string? message = null) + { + DeliveryResult = new DeliveryStatus(DeliveryStatusCode.Postpone, message ?? string.Empty, timeProvider.GetUtcNow()); + Exception = null; + PostponeAt = postpone; + return this; + } + + public IOutboxContext Aborted(string? message = null) + { + DeliveryResult = new DeliveryStatus(DeliveryStatusCode.Aborted, message ?? string.Empty, timeProvider.GetUtcNow()); + Exception = null; + PostponeAt = TimeSpan.Zero; + return this; + } +} diff --git a/src/Sa.Outbox/Repositories/OutboxMessage.cs b/src/Sa.Outbox/Repositories/OutboxMessage.cs new file mode 100644 index 0000000..cd42ef1 --- /dev/null +++ b/src/Sa.Outbox/Repositories/OutboxMessage.cs @@ -0,0 +1,102 @@ +namespace Sa.Outbox; + + +/// +/// Represents a message in the Outbox with its associated payload and part information. +/// +/// The type of the message payload. +public record struct OutboxMessage( + /// + /// Gets the unique identifier for the payload. + /// + string PayloadId, + + /// + /// Gets the actual message payload. + /// + TMessage Payload, + + /// + /// Gets information about the part of the Outbox message. + /// + OutboxPartInfo PartInfo +); + +/// +/// Represents a delivery message in the Outbox with its associated payload, part information, and delivery details. +/// +/// The type of the message payload. +public record struct OutboxDeliveryMessage( + /// + /// Gets the unique identifier for the Outbox delivery. + /// + string OutboxId, + + /// + /// Gets the unique identifier for the payload. + /// + string PayloadId, + + /// + /// Gets the actual message payload. + /// + TMessage Payload, + + /// + /// Gets information about the part of the Outbox message. + /// + OutboxPartInfo PartInfo, + + /// + /// Gets information about the delivery of the Outbox message. + /// + OutboxDeliveryInfo DeliveryInfo +); + +/// +/// Represents information about a part of the Outbox message. +/// +public record struct OutboxPartInfo( + /// + /// Gets the identifier for the tenant associated with the message. + /// + int TenantId, + + /// + /// Gets the part identifier for the Outbox message. + /// + string Part, + + /// + /// Gets the date and time when the part was created. + /// + DateTimeOffset CreatedAt +); + +/// +/// Represents information about the delivery of an Outbox message. +/// +/// The unique identifier for the delivery. +/// The number of delivery attempts made. +/// The identifier of the last error encountered during delivery. +/// The current status of the delivery. +/// The date and time when the delivery was created. +public record struct OutboxDeliveryInfo( + string? DeliveryId, + int Attempt, + string LastErrorId, + DeliveryStatus Status, + DateTimeOffset CreatedAt +); + +/// +/// Represents the status of a delivery attempt. +/// +/// The status code representing the result of the delivery. +/// A message providing additional context about the delivery status. +/// The date and time when the status was created. +public record struct DeliveryStatus( + int Code, + string Message, + DateTimeOffset CreatedAt +); \ No newline at end of file diff --git a/src/Sa.Outbox/Repositories/OutboxMessageFilter.cs b/src/Sa.Outbox/Repositories/OutboxMessageFilter.cs new file mode 100644 index 0000000..b54e69e --- /dev/null +++ b/src/Sa.Outbox/Repositories/OutboxMessageFilter.cs @@ -0,0 +1,39 @@ +namespace Sa.Outbox; + +/// +/// Represents a filter for querying Outbox messages based on specific criteria. +/// This record is used to define the parameters for filtering messages in the Outbox. +/// +public record OutboxMessageFilter( + /// + /// Gets the transaction identifier associated with the Outbox message. + /// + string TransactId, + + /// + /// Gets the type of the payload contained in the Outbox message. + /// + string PayloadType, + + /// + /// Gets the identifier for the tenant associated with the Outbox message. + /// + int TenantId, + + /// + /// Gets the part identifier for the Outbox message. + /// + string Part, + + /// + /// Gets the starting date and time for filtering messages. + /// Only messages created on or after this date will be included. + /// + DateTimeOffset FromDate, + + /// + /// Gets the current date and time for filtering messages. + /// Only messages created on or before this date will be included. + /// + DateTimeOffset NowDate +); \ No newline at end of file diff --git a/src/Sa.Outbox/Sa.Outbox.csproj b/src/Sa.Outbox/Sa.Outbox.csproj new file mode 100644 index 0000000..f0df714 --- /dev/null +++ b/src/Sa.Outbox/Sa.Outbox.csproj @@ -0,0 +1,15 @@ + + + + net8.0 + enable + enable + + + + + + + + + diff --git a/src/Sa.Outbox/Setup.cs b/src/Sa.Outbox/Setup.cs new file mode 100644 index 0000000..6dbc7ff --- /dev/null +++ b/src/Sa.Outbox/Setup.cs @@ -0,0 +1,14 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Outbox.Configuration; + +namespace Sa.Outbox; + +public static class Setup +{ + public static IServiceCollection AddOutbox(this IServiceCollection services, Action? build = null) + { + OutboxBuilder builder = new(services); + build?.Invoke(builder); + return services; + } +} diff --git a/src/Sa.Partitional.PostgreSql/Cache/IPartCache.cs b/src/Sa.Partitional.PostgreSql/Cache/IPartCache.cs new file mode 100644 index 0000000..f40aa75 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Cache/IPartCache.cs @@ -0,0 +1,10 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql.Cache; + +public interface IPartCache +{ + ValueTask InCache(string tableName, DateTimeOffset date, StrOrNum[] partValues, CancellationToken cancellationToken = default); + ValueTask EnsureCache(string tableName, DateTimeOffset date, StrOrNum[] partValues, CancellationToken cancellationToken = default); + ValueTask RemoveCache(string tableName, CancellationToken cancellationToken = default); +} diff --git a/src/Sa.Partitional.PostgreSql/Cache/PartCache.cs b/src/Sa.Partitional.PostgreSql/Cache/PartCache.cs new file mode 100644 index 0000000..d250421 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Cache/PartCache.cs @@ -0,0 +1,78 @@ +using Sa.Classes; +using Sa.Extensions; +using Sa.Timing.Providers; +using ZiggyCreatures.Caching.Fusion; + +namespace Sa.Partitional.PostgreSql.Cache; + +internal class PartCache( + IFusionCacheProvider cacheProvider + , IPartRepository repository + , ISqlBuilder sqlBuilder + , ICurrentTimeProvider timeProvider + , PartCacheSettings settings +) : IPartCache +{ + internal static class Env + { + public const string CacheName = "sa-partitional"; + } + + private readonly IFusionCache _cache = cacheProvider.GetCache(Env.CacheName); + + + public async ValueTask InCache(string tableName, DateTimeOffset date, StrOrNum[] partValues, CancellationToken cancellationToken = default) + { + if (sqlBuilder[tableName] == null) return false; + + List list = await GetPartsInCache(tableName, cancellationToken); + + if (list.Count == 0) return false; + + return list.Exists(c => partValues.SequenceEqual(c.PartValues) && c.PartBy.GetRange(c.FromDate).InRange(date)); + } + + private ValueTask> GetPartsInCache(string tableName, CancellationToken cancellationToken) + { + return _cache.GetOrSetAsync>( + tableName + , async (ctx, t) => await SelectPartsInDb(ctx, tableName, t) + , options: null + , token: cancellationToken); + } + + // search and set the cache duration based result set + private async Task> SelectPartsInDb(FusionCacheFactoryExecutionContext> context, string tableName, CancellationToken cancellationToken) + { + try + { + DateTimeOffset from = (timeProvider.GetUtcNow() - settings.CachedFromDate).StartOfDay(); + List list = await repository.GetPartsFromDate(tableName, from, cancellationToken); + context.Options.Duration = settings.CacheDuration; + return list; + } + catch (Npgsql.PostgresException ex) when (ex.SqlState == Npgsql.PostgresErrorCodes.UndefinedTable) + { + context.Options.Duration = TimeSpan.Zero; + + return []; + } + } + + public async ValueTask EnsureCache(string tableName, DateTimeOffset date, StrOrNum[] partValues, CancellationToken cancellationToken = default) + { + bool result = await InCache(tableName, date, partValues, cancellationToken); + if (result) return true; + + await repository.CreatePart(tableName, date, partValues, cancellationToken); + + await RemoveCache(tableName, cancellationToken); + + result = await InCache(tableName, date, partValues, cancellationToken); + + return result; + } + + public ValueTask RemoveCache(string tableName, CancellationToken cancellationToken = default) + => _cache.RemoveAsync(tableName, null, cancellationToken); +} diff --git a/src/Sa.Partitional.PostgreSql/Cache/PartCacheSettings.cs b/src/Sa.Partitional.PostgreSql/Cache/PartCacheSettings.cs new file mode 100644 index 0000000..3df12b9 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Cache/PartCacheSettings.cs @@ -0,0 +1,8 @@ +namespace Sa.Partitional.PostgreSql; + +public class PartCacheSettings +{ + public TimeSpan CacheDuration { get; set; } = TimeSpan.FromMinutes(2); + + public TimeSpan CachedFromDate { get; set; } = TimeSpan.FromDays(1); +} diff --git a/src/Sa.Partitional.PostgreSql/Cache/Setup.cs b/src/Sa.Partitional.PostgreSql/Cache/Setup.cs new file mode 100644 index 0000000..399c67b --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Cache/Setup.cs @@ -0,0 +1,26 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +using Sa.Data.Cache; + +namespace Sa.Partitional.PostgreSql.Cache; + + +internal static class Setup +{ + public static IServiceCollection AddPartCache(this IServiceCollection services, Action? configure = null) + { + + services.AddTransient(); + + services.AddFusionCacheEx(PartCache.Env.CacheName, (sp, opts) => + { + PartCacheSettings cacheSettings = sp.GetRequiredService(); + configure?.Invoke(sp, cacheSettings); + opts.Duration = cacheSettings.CacheDuration; + }); + + services.TryAddSingleton(); + return services; + } +} diff --git a/src/Sa.Partitional.PostgreSql/Cleaning/IPartCleanupService.cs b/src/Sa.Partitional.PostgreSql/Cleaning/IPartCleanupService.cs new file mode 100644 index 0000000..8073b8c --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Cleaning/IPartCleanupService.cs @@ -0,0 +1,26 @@ + +namespace Sa.Partitional.PostgreSql; + +/// +/// Represents a service interface for cleaning up Outbox message parts. +/// This interface defines methods for performing cleanup operations on message parts. +/// +public interface IPartCleanupService +{ + /// + /// Asynchronously cleans up Outbox message parts. + /// This method removes parts that are no longer needed based on the retention policy. + /// + /// A cancellation token to signal the operation's cancellation. + /// A task representing the asynchronous operation, containing the number of parts cleaned up. + Task Clean(CancellationToken cancellationToken); + + /// + /// Asynchronously cleans up Outbox message parts up to a specified date. + /// This method removes parts that are older than the provided date. + /// + /// The date up to which parts should be cleaned up. + /// A cancellation token to signal the operation's cancellation. + /// A task representing the asynchronous operation, containing the number of parts cleaned up. + Task Clean(DateTimeOffset toDate, CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/Sa.Partitional.PostgreSql/Cleaning/PartCleanupJob.cs b/src/Sa.Partitional.PostgreSql/Cleaning/PartCleanupJob.cs new file mode 100644 index 0000000..d3a9059 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Cleaning/PartCleanupJob.cs @@ -0,0 +1,11 @@ +using Sa.Schedule; + +namespace Sa.Partitional.PostgreSql.Cleaning; + +internal class PartCleanupJob(IPartCleanupService cleaningService) : IJob +{ + public Task Execute(IJobContext context, CancellationToken cancellationToken) + { + return cleaningService.Clean(cancellationToken); + } +} diff --git a/src/Sa.Partitional.PostgreSql/Cleaning/PartCleanupScheduleSettings.cs b/src/Sa.Partitional.PostgreSql/Cleaning/PartCleanupScheduleSettings.cs new file mode 100644 index 0000000..0f39b54 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Cleaning/PartCleanupScheduleSettings.cs @@ -0,0 +1,34 @@ +namespace Sa.Partitional.PostgreSql; + +/// +/// Represents the settings for scheduling the cleanup of Outbox message parts. +/// This class contains configuration options related to how and when parts should be cleaned up. +/// +public class PartCleanupScheduleSettings +{ + /// + /// Gets or sets a value indicating whether the cleanup should be executed as a background job. + /// Default is set to false, meaning the cleanup will not run as a job. + /// + public bool AsJob { get; set; } = false; + + /// + /// Gets or sets the duration after which old parts will be dropped. + /// Default is set to 30 days. + /// + public TimeSpan DropPartsAfterRetention { get; set; } = TimeSpan.FromDays(30); + + /// + /// Gets or sets the interval at which the cleanup job will be executed. + /// Default is set to every 4 hours, with a random additional delay of up to 59 minutes. + /// + public TimeSpan ExecutionInterval { get; set; } = TimeSpan + .FromHours(4) + .Add(TimeSpan.FromMinutes(Random.Shared.Next(1, 59))); + + /// + /// Gets or sets the initial delay before the cleanup job starts executing. + /// Default is set to 1 minute. + /// + public TimeSpan InitialDelay { get; set; } = TimeSpan.FromMinutes(1); +} diff --git a/src/Sa.Partitional.PostgreSql/Cleaning/PartCleanupService.cs b/src/Sa.Partitional.PostgreSql/Cleaning/PartCleanupService.cs new file mode 100644 index 0000000..65310b0 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Cleaning/PartCleanupService.cs @@ -0,0 +1,27 @@ +using Sa.Timing.Providers; + +namespace Sa.Partitional.PostgreSql.Cleaning; + +internal class PartCleanupService( + IPartRepository repository + , PartCleanupScheduleSettings settings + , ICurrentTimeProvider timeProvider + , ISqlBuilder sqlBuilder +) : IPartCleanupService +{ + public async Task Clean(DateTimeOffset toDate, CancellationToken cancellationToken) + { + int cnt = 0; + foreach (string tableName in sqlBuilder.Tables.Select(c => c.FullName)) + { + cnt += await repository.DropPartsToDate(tableName, toDate, cancellationToken); + } + return cnt; + } + + public Task Clean(CancellationToken cancellationToken) + { + DateTimeOffset toDate = timeProvider.GetUtcNow().Add(-settings.DropPartsAfterRetention); + return Clean(toDate, cancellationToken); + } +} diff --git a/src/Sa.Partitional.PostgreSql/Cleaning/Setup.cs b/src/Sa.Partitional.PostgreSql/Cleaning/Setup.cs new file mode 100644 index 0000000..ecf6710 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Cleaning/Setup.cs @@ -0,0 +1,69 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Schedule; +using System.Collections.Concurrent; + +namespace Sa.Partitional.PostgreSql.Cleaning; + +internal static class Setup +{ + readonly static Guid JobId = Guid.Parse("7da81411-9db7-4553-8e93-bd1f12d02b38"); + + private static readonly ConcurrentDictionary>> s_invokers = []; + + public static IServiceCollection AddPartCleaning(this IServiceCollection services, Action? configure = null) + { + + if (configure != null) + { + if (s_invokers.TryGetValue(services, out var builder)) + { + builder.Add(configure); + } + else + { + s_invokers[services] = [configure]; + } + } + + services.TryAddSingleton(sp => + { + var item = new PartCleanupScheduleSettings(); + if (s_invokers.TryGetValue(services, out var invokers)) + { + foreach (Action invoker in invokers) + { + invoker.Invoke(sp, item); + } + s_invokers.Remove(services, out _); + } + return item; + }); + + services.TryAddSingleton(); + + services.AddSchedule(b => b + .UseHostedService() + .AddJob((sp, builder) => + { + builder.WithName("Cleanup job"); + + PartCleanupScheduleSettings settings = sp.GetRequiredService(); + + builder + .WithInitialDelay(settings.InitialDelay) + .EveryTime(settings.ExecutionInterval) + .ConfigureErrorHandling(berr => berr.DoSuppressError(err => true)) + ; + + if (!settings.AsJob) + { + builder.Disabled(); + } + + }, JobId) + ); + + return services; + } +} diff --git a/src/Sa.Partitional.PostgreSql/Configuration/Builder/ISchemaBuilder.cs b/src/Sa.Partitional.PostgreSql/Configuration/Builder/ISchemaBuilder.cs new file mode 100644 index 0000000..d6fab81 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Configuration/Builder/ISchemaBuilder.cs @@ -0,0 +1,8 @@ +namespace Sa.Partitional.PostgreSql; + +public interface ISchemaBuilder +{ + ITableBuilder CreateTable(string tableName); + ITableBuilder AddTable(string tableName, params string[] sqlFields); + ITableSettings[] Build(); +} diff --git a/src/Sa.Partitional.PostgreSql/Configuration/Builder/ISettingsBuilder.cs b/src/Sa.Partitional.PostgreSql/Configuration/Builder/ISettingsBuilder.cs new file mode 100644 index 0000000..50c91c1 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Configuration/Builder/ISettingsBuilder.cs @@ -0,0 +1,9 @@ +namespace Sa.Partitional.PostgreSql; + + +public interface ISettingsBuilder +{ + ISettingsBuilder AddSchema(Action schemaBuilder); + ISettingsBuilder AddSchema(string schemaName, Action schemaBuilder); + ITableSettingsStorage Build(); +} diff --git a/src/Sa.Partitional.PostgreSql/Configuration/Builder/ITableBuilder.cs b/src/Sa.Partitional.PostgreSql/Configuration/Builder/ITableBuilder.cs new file mode 100644 index 0000000..3b2f804 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Configuration/Builder/ITableBuilder.cs @@ -0,0 +1,31 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql; + +public interface ITableBuilder +{ + ITableBuilder AddFields(params string[] sqlFields); + ITableBuilder PartByList(params string[] fieldNames); + ITableBuilder PartByRange(PgPartBy partBy, string? timestampFieldName = null); + + ITableBuilder TimestampAs(string timestampFieldName); + ITableBuilder WithPartSeparator(string partSeparator); + + ITableBuilder AddPostSql(Func postSql); + ITableBuilder AddConstraintPkSql(Func pkSql); + + + ITableSettings Build(); + + ITableBuilder AddMigration(IPartTableMigrationSupport migrationSupport); + + ITableBuilder AddMigration(Func> getPartValues); + + ITableBuilder AddMigration(params StrOrNum[] partValues); + + ITableBuilder AddMigration(StrOrNum parent, StrOrNum[] childs) + { + foreach (StrOrNum child in childs) AddMigration(parent, child); + return this; + } +} diff --git a/src/Sa.Partitional.PostgreSql/Configuration/Builder/SchemaBuilder.cs b/src/Sa.Partitional.PostgreSql/Configuration/Builder/SchemaBuilder.cs new file mode 100644 index 0000000..9c30244 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Configuration/Builder/SchemaBuilder.cs @@ -0,0 +1,19 @@ +namespace Sa.Partitional.PostgreSql.Configuration.Builder; + +internal class SchemaBuilder(string schemaName) : ISchemaBuilder +{ + public readonly Dictionary _tables = []; + + public ITableBuilder CreateTable(string tableName) + { + if (_tables.TryGetValue(tableName, out TableBuilder? table)) return table; + TableBuilder builder = new(schemaName, tableName); + _tables[tableName] = builder; + return builder; + } + + public ITableBuilder AddTable(string tableName, params string[] sqlFields) + => CreateTable(tableName).AddFields(sqlFields); + + public ITableSettings[] Build() => _tables.Values.Select(c => c.Build()).ToArray(); +} diff --git a/src/Sa.Partitional.PostgreSql/Configuration/Builder/SettingsBuilder.cs b/src/Sa.Partitional.PostgreSql/Configuration/Builder/SettingsBuilder.cs new file mode 100644 index 0000000..6f9439c --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Configuration/Builder/SettingsBuilder.cs @@ -0,0 +1,32 @@ +using Sa.Partitional.PostgreSql.Settings; + +namespace Sa.Partitional.PostgreSql.Configuration.Builder; + + +internal class SettingsBuilder : ISettingsBuilder +{ + private readonly Dictionary _schemas = []; + + public ISettingsBuilder AddSchema(Action schemaBuilder) + { + return AddSchema("public", schemaBuilder); + } + + public ISettingsBuilder AddSchema(string schemaName, Action schemaBuilder) + { + if (!_schemas.TryGetValue(schemaName, out ISchemaBuilder? builder)) + { + builder = new SchemaBuilder(schemaName); + _schemas[schemaName] = builder; + } + + schemaBuilder.Invoke(builder); + return this; + } + + public ITableSettingsStorage Build() + { + ITableSettings[] tables = _schemas.Values.SelectMany(c => c.Build()).ToArray(); + return new TableSettingsStorage(tables); + } +} diff --git a/src/Sa.Partitional.PostgreSql/Configuration/Builder/Setup.cs b/src/Sa.Partitional.PostgreSql/Configuration/Builder/Setup.cs new file mode 100644 index 0000000..3930b1e --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Configuration/Builder/Setup.cs @@ -0,0 +1,40 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using System.Collections.Concurrent; + +namespace Sa.Partitional.PostgreSql.Configuration.Builder; + + +public static class Setup +{ + private static readonly ConcurrentDictionary>> s_invokers = []; + + public static IServiceCollection AddSettigs(this IServiceCollection services, Action build) + { + if (s_invokers.TryGetValue(services, out var invokers)) + { + invokers.Add(build); + } + else + { + s_invokers[services] = [build]; + } + + services.TryAddSingleton(sp => + { + var builder = new SettingsBuilder(); + if (s_invokers.TryGetValue(services, out var invokers)) + { + foreach (Action build in invokers) + { + build.Invoke(sp, builder); + } + + s_invokers.Remove(services, out _); + } + return builder; + }); + + return services; + } +} diff --git a/src/Sa.Partitional.PostgreSql/Configuration/Builder/TableBuilder.cs b/src/Sa.Partitional.PostgreSql/Configuration/Builder/TableBuilder.cs new file mode 100644 index 0000000..1cbf523 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Configuration/Builder/TableBuilder.cs @@ -0,0 +1,125 @@ +using Sa.Classes; +using Sa.Partitional.PostgreSql.Settings; + +namespace Sa.Partitional.PostgreSql.Configuration.Builder; + +internal class TableBuilder(string schemaName, string tableName) : ITableBuilder +{ + private readonly List _fields = []; + private readonly List _parts = []; + + private readonly List _migrationPartValues = []; + private IPartTableMigrationSupport? _migrationSupport; + private Func>? _getPartValues; + + private string? _timestamp; + private PgPartBy? _partBy; + private string? _separator = null; + + private Func? _postSql = null; + private Func? _pkSql = null; + + public ITableBuilder AddFields(params string[] sqlFields) + { + ArgumentNullException.ThrowIfNull(sqlFields); + + if (sqlFields.Length == 0) throw new ArgumentException("fields is empty", nameof(sqlFields)); + _fields.AddRange(sqlFields); + return this; + } + + public ITableBuilder PartByList(params string[] fieldNames) + { + ArgumentNullException.ThrowIfNull(fieldNames); + + _parts.AddRange(fieldNames); + return this; + } + + public ITableBuilder TimestampAs(string timestampFieldName) + { + _timestamp = timestampFieldName; + return this; + } + + public ITableBuilder PartByRange(PgPartBy partBy, string? timestampFieldName = null) + { + _partBy = partBy; + _timestamp ??= timestampFieldName; + return this; + } + + public ITableBuilder WithPartSeparator(string partSeparator) + { + _separator = partSeparator; + return this; + } + + public ITableBuilder AddPostSql(Func postSql) + { + _postSql = postSql ?? throw new ArgumentNullException(nameof(postSql)); + return this; + } + + public ITableBuilder AddConstraintPkSql(Func pkSql) + { + _pkSql = pkSql ?? throw new ArgumentNullException(nameof(pkSql)); + return this; + } + + public ITableSettings Build() => new TableSettings( + schemaName + , tableName + , [.. _fields] + , [.. _parts] + , new PartTableMigrationSupport(_migrationPartValues, _getPartValues, _migrationSupport) + , _partBy + , _postSql + , _pkSql + , _timestamp + , _separator + ); + + public ITableBuilder AddMigration(params StrOrNum[] partValues) + { + _migrationPartValues.Add(partValues); + return this; + } + + public ITableBuilder AddMigration(Func> getPartValues) + { + _getPartValues = getPartValues; + return this; + } + + public ITableBuilder AddMigration(IPartTableMigrationSupport migrationSupport) + { + _migrationSupport = migrationSupport; + return this; + } + + + + internal class PartTableMigrationSupport(IReadOnlyCollection? partValues, Func>? getPartValues, IPartTableMigrationSupport? original) : IPartTableMigrationSupport + { + public async Task GetPartValues(CancellationToken cancellationToken) + { + List result = partValues != null ? [.. partValues] : []; + + if (getPartValues != null) + { + StrOrNum[][] partItems = await getPartValues(cancellationToken); + result.AddRange(partItems); + } + + if (original != null) + { + StrOrNum[][] partItems = await original.GetPartValues(cancellationToken); + result.AddRange(partItems); + } + + return [.. result]; + } + } + +} diff --git a/src/Sa.Partitional.PostgreSql/Configuration/IPartConfiguration.cs b/src/Sa.Partitional.PostgreSql/Configuration/IPartConfiguration.cs new file mode 100644 index 0000000..ea2894f --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Configuration/IPartConfiguration.cs @@ -0,0 +1,11 @@ +namespace Sa.Partitional.PostgreSql; + +public interface IPartConfiguration +{ + IPartConfiguration AddPartTables(Action configure); + IPartConfiguration AddPartCache(Action? configure = null); + IPartConfiguration AddPartMigrationSchedule(Action? configure = null); + IPartConfiguration AddPartCleanupSchedule(Action? configure = null); + + IPartConfiguration AddDataSource(Action? configure = null); +} diff --git a/src/Sa.Partitional.PostgreSql/Configuration/PartConfiguration.cs b/src/Sa.Partitional.PostgreSql/Configuration/PartConfiguration.cs new file mode 100644 index 0000000..aa9893b --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Configuration/PartConfiguration.cs @@ -0,0 +1,51 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Data.PostgreSql; +using Sa.Partitional.PostgreSql.Cache; +using Sa.Partitional.PostgreSql.Cleaning; +using Sa.Partitional.PostgreSql.Configuration.Builder; +using Sa.Partitional.PostgreSql.Migration; +using Sa.Partitional.PostgreSql.Repositories; +using Sa.Partitional.PostgreSql.SqlBuilder; + +namespace Sa.Partitional.PostgreSql.Configuration; + +internal class PartConfiguration(IServiceCollection services) : IPartConfiguration +{ + + public IPartConfiguration AddPartTables(Action configure) + { + services + .AddSettigs(configure) + .AddSqlBuilder() + ; + + return this; + } + + public IPartConfiguration AddDataSource(Action? configure = null) + { + services.AddPgDataSource(configure); + + // inner + services.AddPartRepository(); + return this; + } + + public IPartConfiguration AddPartCache(Action? configure = null) + { + services.AddPartCache(configure); + return this; + } + + public IPartConfiguration AddPartMigrationSchedule(Action? configure = null) + { + services.AddPartMigration(configure); + return this; + } + + public IPartConfiguration AddPartCleanupSchedule(Action? configure = null) + { + services.AddPartCleaning(configure); + return this; + } +} diff --git a/src/Sa.Partitional.PostgreSql/GlobalSuppressions.cs b/src/Sa.Partitional.PostgreSql/GlobalSuppressions.cs new file mode 100644 index 0000000..4ca2bde --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/GlobalSuppressions.cs @@ -0,0 +1,9 @@ +// This file is used by Code Analysis to maintain SuppressMessage +// attributes that are applied to this project. +// Project-level suppressions either have no target or are given +// a specific target and scoped to a namespace, type, member, etc. + +using System.Diagnostics.CodeAnalysis; + +[assembly: SuppressMessage("Style", "IDE0130:Namespace does not match folder structure")] + diff --git a/src/Sa.Partitional.PostgreSql/IPartitionManager.cs b/src/Sa.Partitional.PostgreSql/IPartitionManager.cs new file mode 100644 index 0000000..b6a4375 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/IPartitionManager.cs @@ -0,0 +1,35 @@ +namespace Sa.Partitional.PostgreSql; + +/// +/// interface for managing partitions in the database +/// +public interface IPartitionManager +{ + /// + /// Migrates the existing partitions in the database. + /// This method may be used to reorganize or update partitions based on the current state of the data. + /// + /// A token to monitor for cancellation requests. + /// A task representing the asynchronous operation, with an integer result indicating the number of partitions migrated. + Task Migrate(CancellationToken cancellationToken = default); + + /// + /// Migrates partitions for specific dates. + /// This method allows for targeted migration of partitions based on the provided date range. + /// + /// An array of dates for which partitions should be migrated. + /// A token to monitor for cancellation requests. + /// A task representing the asynchronous operation, with an integer result indicating the number of partitions migrated. + Task Migrate(DateTimeOffset[] dates, CancellationToken cancellationToken = default); + + /// + /// Ensures that the specified partitions exist for a given table and date. + /// This method checks if the specified partitions are present and creates them if they are not. + /// + /// The name of the table for which partitions are being ensured. + /// The date associated with the partition. + /// An array of values that define the partitions (could be strings or numbers). + /// A token to monitor for cancellation requests. + /// A value task representing the asynchronous operation, with a boolean result indicating whether the partitions were ensured successfully. + ValueTask EnsureParts(string tableName, DateTimeOffset date, Classes.StrOrNum[] partValues, CancellationToken cancellationToken = default); +} diff --git a/src/Sa.Partitional.PostgreSql/Migration/IPartMigrationService.cs b/src/Sa.Partitional.PostgreSql/Migration/IPartMigrationService.cs new file mode 100644 index 0000000..83e6dfe --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Migration/IPartMigrationService.cs @@ -0,0 +1,18 @@ + +namespace Sa.Partitional.PostgreSql; + +public interface IPartMigrationService +{ + CancellationToken OutboxMigrated { get; } + Task Migrate(CancellationToken cancellationToken = default); + Task Migrate(DateTimeOffset[] dates, CancellationToken cancellationToken = default); + + Task WaitMigration(TimeSpan timeout, CancellationToken cancellationToken = default) + { + if (OutboxMigrated.IsCancellationRequested) return Task.FromResult(true); + + var tcs = new TaskCompletionSource(); + OutboxMigrated.Register(() => tcs.SetResult()); + return Task.Run(() => Task.WaitAny(tcs.Task, Task.Delay(timeout, cancellationToken)) == 0); + } +} diff --git a/src/Sa.Partitional.PostgreSql/Migration/MigrationJobConstance.cs b/src/Sa.Partitional.PostgreSql/Migration/MigrationJobConstance.cs new file mode 100644 index 0000000..5eda465 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Migration/MigrationJobConstance.cs @@ -0,0 +1,7 @@ +namespace Sa.Partitional.PostgreSql.Migration; + +public static class MigrationJobConstance +{ + public readonly static Guid MigrationJobId = Guid.Parse("43588353-0005-4C84-97CA-40F2A620BC4C"); + public const string MigrationDefaultJobName = "Migration job"; +} diff --git a/src/Sa.Partitional.PostgreSql/Migration/PartMigrationJob.cs b/src/Sa.Partitional.PostgreSql/Migration/PartMigrationJob.cs new file mode 100644 index 0000000..a025652 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Migration/PartMigrationJob.cs @@ -0,0 +1,11 @@ +using Sa.Schedule; + +namespace Sa.Partitional.PostgreSql.Migration; + +internal class PartMigrationJob(IPartMigrationService service) : IJob +{ + public async Task Execute(IJobContext context, CancellationToken cancellationToken) + { + await service.Migrate(cancellationToken); + } +} diff --git a/src/Sa.Partitional.PostgreSql/Migration/PartMigrationScheduleSettings.cs b/src/Sa.Partitional.PostgreSql/Migration/PartMigrationScheduleSettings.cs new file mode 100644 index 0000000..3383859 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Migration/PartMigrationScheduleSettings.cs @@ -0,0 +1,16 @@ +namespace Sa.Partitional.PostgreSql; + +public class PartMigrationScheduleSettings +{ + public int ForwardDays { get; set; } = 2; + + public bool AsJob { get; set; } = false; + + public string? MigrationJobName { get; set; } + + public TimeSpan ExecutionInterval { get; set; } = TimeSpan + .FromHours(4) + .Add(TimeSpan.FromMinutes(Random.Shared.Next(1, 59))); + + public TimeSpan WaitMigrationTimeout { get; set; } = TimeSpan.FromSeconds(3); +} diff --git a/src/Sa.Partitional.PostgreSql/Migration/PartMigrationService.cs b/src/Sa.Partitional.PostgreSql/Migration/PartMigrationService.cs new file mode 100644 index 0000000..56e4943 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Migration/PartMigrationService.cs @@ -0,0 +1,59 @@ +using Sa.Extensions; +using Sa.Timing.Providers; + +namespace Sa.Partitional.PostgreSql.Migration; + +internal sealed class PartMigrationService( + IPartRepository repository + , ICurrentTimeProvider timeProvider + , PartMigrationScheduleSettings settings +) + : IPartMigrationService, IDisposable +{ + private int s_triggered = 0; + private readonly CancellationTokenSource _cts = new(); + + public CancellationToken OutboxMigrated => _cts.Token; + + public void Dispose() + { + _cts.Dispose(); + } + + public Task Migrate(DateTimeOffset[] dates, CancellationToken cancellationToken = default) + => repository.Migrate(dates, cancellationToken); + + public async Task Migrate(CancellationToken cancellationToken = default) + { + if (Interlocked.CompareExchange(ref s_triggered, 1, 0) == 0) + { + try + { + DateTimeOffset now = timeProvider.GetUtcNow().StartOfDay(); + DateTimeOffset[] dates = Enumerable + .Range(0, settings.ForwardDays) + .Select(i => now.AddDays(i)) + .ToArray(); + + int result = await repository.Migrate(dates, cancellationToken); + await _cts.CancelAsync(); + return result; + } + finally + { + Interlocked.CompareExchange(ref s_triggered, 0, 1); + } + } + else + { + do + { + Console.WriteLine("waiting"); + await Task.Delay(settings.WaitMigrationTimeout, cancellationToken); + } + while (s_triggered != 0); + } + + return -1; + } +} diff --git a/src/Sa.Partitional.PostgreSql/Migration/Setup.cs b/src/Sa.Partitional.PostgreSql/Migration/Setup.cs new file mode 100644 index 0000000..58c5987 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Migration/Setup.cs @@ -0,0 +1,70 @@ +using System.Collections.Concurrent; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Schedule; + +namespace Sa.Partitional.PostgreSql.Migration; + + + +internal static class Setup +{ + private static readonly ConcurrentDictionary>> s_invokers = []; + + public static IServiceCollection AddPartMigration(this IServiceCollection services, Action? configure = null) + { + + if (configure != null) + { + if (s_invokers.TryGetValue(services, out var builder)) + { + builder.Add(configure); + } + else + { + s_invokers[services] = [configure]; + } + } + + services.TryAddSingleton(sp => + { + var item = new PartMigrationScheduleSettings(); + if (s_invokers.TryGetValue(services, out var invokers)) + { + foreach (Action invoker in invokers) + { + invoker.Invoke(sp, item); + } + s_invokers.Remove(services, out _); + } + return item; + }); + + services.TryAddSingleton(); + + + services.AddSchedule(b => b + .UseHostedService() + .AddJob((sp, builder) => + { + PartMigrationScheduleSettings migrationSettings = sp.GetRequiredService(); + + builder.WithName(migrationSettings.MigrationJobName ?? MigrationJobConstance.MigrationDefaultJobName); + + builder + .StartImmediate() + .EveryTime(migrationSettings.ExecutionInterval) + .ConfigureErrorHandling(berr => berr.DoSuppressError(err => true)) + ; + + if (!migrationSettings.AsJob) + { + builder.Disabled(); + } + + }, MigrationJobConstance.MigrationJobId) + ); + + return services; + } +} diff --git a/src/Sa.Partitional.PostgreSql/Part.cs b/src/Sa.Partitional.PostgreSql/Part.cs new file mode 100644 index 0000000..78c4a57 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Part.cs @@ -0,0 +1,10 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql; + +public record Part(string Name, PartByRange PartBy) : Enumeration(Name.GetHashCode(), Name) +{ + public const string RootId = "root"; + + public static readonly Part Root = new(RootId, PartByRange.Day); +} diff --git a/src/Sa.Partitional.PostgreSql/PartByRange.cs b/src/Sa.Partitional.PostgreSql/PartByRange.cs new file mode 100644 index 0000000..55f3eb1 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/PartByRange.cs @@ -0,0 +1,20 @@ +namespace Sa.Partitional.PostgreSql; + +/// +/// Enumerates the possible partitional ranges for a PostgreSQL database. +/// +public enum PartByRange +{ + /// + /// Partition by day. + /// + Day, + /// + /// Partition by month. + /// + Month, + /// + /// Partition by year. + /// + Year +} \ No newline at end of file diff --git a/src/Sa.Partitional.PostgreSql/PartitionManager.cs b/src/Sa.Partitional.PostgreSql/PartitionManager.cs new file mode 100644 index 0000000..f4b5528 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/PartitionManager.cs @@ -0,0 +1,23 @@ +using Sa.Classes; +using Sa.Partitional.PostgreSql.Cache; + +namespace Sa.Partitional.PostgreSql; + + +internal class PartitionManager(IPartCache cache, IPartMigrationService migrationService) : IPartitionManager +{ + public ValueTask EnsureParts(string tableName, DateTimeOffset date, StrOrNum[] partValues, CancellationToken cancellationToken = default) + { + return cache.EnsureCache(tableName, date, partValues, cancellationToken); + } + + public Task Migrate(DateTimeOffset[] dates, CancellationToken cancellationToken = default) + { + return migrationService.Migrate(dates, cancellationToken); + } + + public Task Migrate(CancellationToken cancellationToken = default) + { + return migrationService.Migrate(cancellationToken); + } +} diff --git a/src/Sa.Partitional.PostgreSql/PgPartBy.cs b/src/Sa.Partitional.PostgreSql/PgPartBy.cs new file mode 100644 index 0000000..0b3de93 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/PgPartBy.cs @@ -0,0 +1,82 @@ +using Sa.Classes; +using Sa.Extensions; +using System.Text.RegularExpressions; + +namespace Sa.Partitional.PostgreSql; + +public record PgPartBy( + PartByRange PartByRange + , Func> GetRange + , Func Fmt + , Func ParseFmt +) + : Enumeration((int)PartByRange, PartByRange.ToString()) +{ + + public static readonly PgPartBy Day = new( + PartByRange: PartByRange.Day + , GetRange: static date => date.ToUniversalTime().StartOfDay().RangeTo(date => date.AddDays(1), false) + , Fmt: static ts => $"y{ts.Year:0000}m{ts.Month:00}d{ts.Day:00}" + , ParseFmt: static str => StrToDate(str, PartByRange.Day) + ); + + public static readonly PgPartBy Month = new( + PartByRange: PartByRange.Month + , GetRange: static date => date.ToUniversalTime().StartOfMonth().RangeTo(date => date.AddMonths(1), false) + , Fmt: static ts => $"y{ts.Year:0000}m{ts.Month:00}" + , ParseFmt: static str => StrToDate(str, PartByRange.Month) + ); + + public static readonly PgPartBy Year = new( + PartByRange: PartByRange.Year + , GetRange: static date => date.ToUniversalTime().StartOfYear().RangeTo(date => date.AddYears(1), false) + , Fmt: static ts => $"y{ts.Year:0000}" + , ParseFmt: static str => StrToDate(str, PartByRange.Year) + ); + + + + #region methods + + public static PgPartBy FromRange(PartByRange range) + => GetAll().FirstOrDefault(c => c.PartByRange == range) ?? Day; + + public static PgPartBy FromPartName(string part) + { + Part current = Part.TryFromName(part, out Part? item) ? item : Part.Root; + return FromRange(current.PartBy); + } + + public override string ToString() => Name; + + + private static DateTimeOffset? StrToDate(string str, PartByRange range) + { + if (string.IsNullOrWhiteSpace(str)) return null; + + ReadOnlySpan span = str.AsSpan(); + + return range switch + { + PartByRange.Day => FmtRegex.GetDayRegEx().IsMatch(span) ? new DateTimeOffset(span[^10..^6].StrToInt()!.Value, span[^5..^3].StrToInt()!.Value, span[^2..].StrToInt()!.Value, 0, 0, 0, TimeSpan.Zero) : null, + PartByRange.Month => FmtRegex.GetMonthRegEx().IsMatch(span) ? new DateTimeOffset(span[^7..^3].StrToInt()!.Value, span[^2..].StrToInt()!.Value, 1, 0, 0, 0, TimeSpan.Zero) : null, + PartByRange.Year => FmtRegex.GetYearRegEx().IsMatch(span) ? new DateTimeOffset(span[^4..].StrToInt()!.Value, 1, 1, 0, 0, 0, TimeSpan.Zero) : null, + _ => null, + }; + } + + #endregion +} + + +static partial class FmtRegex +{ + [GeneratedRegex(@".*y(\d{4})m(\d{2})d(\d{2})$")] + public static partial Regex GetDayRegEx(); + + [GeneratedRegex(@".*y(\d{4})m(\d{2})$")] + public static partial Regex GetMonthRegEx(); + + [GeneratedRegex(@".*y(\d{4})$")] + public static partial Regex GetYearRegEx(); +} diff --git a/src/Sa.Partitional.PostgreSql/Readme.md b/src/Sa.Partitional.PostgreSql/Readme.md new file mode 100644 index 0000000..cbe792e --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Readme.md @@ -0,0 +1,226 @@ +# Sa.Partitional.PostgreSql + +Библиотека, предназначенная для управления партиционированием таблиц в PostgreSQL +c целью улучшения производительности и управляемости в больших объемах данных. + +## Позволяет + +- Декларативно описать секционируемую таблицу по времени (день, месяц, год). +- Задать секции по спискам ключей для строк или чисел. +- Задать расписание миграций для создания новых партиций. +- Задать расписание для удаления старых партиций. +- Управлять партициями. + + +## Особенности + +- Так как макс. длина наименования таблицы составляет 63 символа, то следует учитывать выбор значения при создании партиции. +- Все таблицы имеют финальной секцией интервал по времени, который представлен столбцом с типом `int64` в формате Unix timestamps in seconds. +- Удаление старых партиций производится через DROP. + + +## Пример конфигурирования + +```csharp + +public static class PartitioningSetup +{ + public static IServiceCollection AddPartitioning(this IServiceCollection services) + { + services.AddPartitional((sp, builder) => + { + builder.AddSchema("public", schema => + { + // Настройка таблицы orders + schema.AddTable("orders", + "id INT NOT NULL", + "tenant_id INT NOT NULL", + "region TEXT NOT NULL", + "amount DECIMAL(10, 2) NOT NULL" + ) + // Партиционирование по tenant_id и region + .PartByList("tenant_id", "region") + // с интервалом в месяц по заданному столбцу + .PartByRange(PgPartBy.Month, "created_at") + ; + + + // Настройка таблицы customer + schema.AddTable("customer", + "id INT NOT NULL", + "country TEXT NOT NULL", + "city TEXT NOT NULL" + ) + // разделить в таблицах меж партиций + .WithPartSeparator("_") + // Партиционирование по country и city (если не задан PartByRange то по дням) + .PartByList("country", "city") + // Миграция партиций каждого тенанта по city + .AddMigration("RU", ["Moscow", "Samara"]) + .AddMigration("USA", ["Alabama", "New York"]) + .AddMigration("FR", ["Paris", "Lyon", "Bordeaux"]); + }); + }) + // расписание миграций - создания новых партиций + .AddPartMigrationSchedule((sp, opts) => + { + opts.AsJob = true; + opts.ExecutionInterval = TimeSpan.FromHour(12); + opts.ForwardDays = 2; + }) + // расписание удаления старых партиций + .AddPartCleanupSchedule((sp, opts) => + { + opts.AsJob = true; + opts.DropPartsAfterRetention = TimeSpan.FromDays(21); + }) + ; + + return services; + } +} + +``` + +### Результат миграции + +Для примера выше - результатом миграции будут две таблицы: + +`customer` - *таблица с данными* + +|id|country|city|created_at| +|--|-------|----|----------| +||||| + + +`customer_$part` - *таблица для учета партиций (фрагмент)* + +|id|root|part_values|part_by|from_date|to_date| +|--|----|-----------|-------|---------|-------| +|public."customer_RU_Samara_y2025m01d08"|public.customer|["s:RU","s:Samara"]|Day|1736294400|1736380800| +|public."customer_RU_Samara_y2025m01d09"|public.customer|["s:RU","s:Samara"]|Day|1736380800|1736467200| +|public."customer_USA_Alabama_y2025m01d08"|public.customer|["s:USA","s:Alabama"]|Day|1736294400|1736380800| + + +#### Итоговый DDL + +```sql + +CREATE TABLE public."customer_$part" ( + id text NOT NULL, + root text NOT NULL, + part_values text NOT NULL, + part_by text NOT NULL, + from_date int8 NOT NULL, + to_date int8 NOT NULL, + CONSTRAINT "customer_$part_pkey" PRIMARY KEY (id) +); + + +CREATE TABLE public.customer ( + id int4 NOT NULL, + country text NOT NULL, + city text NOT NULL, + created_at int8 NOT NULL, + CONSTRAINT pk_customer PRIMARY KEY (id, country, city, created_at) +) +PARTITION BY LIST (country); + +-- Partitions + +CREATE TABLE public."customer_FR" PARTITION OF public.customer FOR VALUES IN ('FR') +PARTITION BY LIST (city); + +-- Partitions + +CREATE TABLE public."customer_FR_Bordeaux" PARTITION OF public."customer_FR" FOR VALUES IN ('Bordeaux') +PARTITION BY RANGE (created_at); + +-- Partitions + +CREATE TABLE public."customer_FR_Bordeaux_y2025m01d08" PARTITION OF public."customer_FR_Bordeaux" FOR VALUES FROM ('1736294400') TO ('1736380800'); +CREATE TABLE public."customer_FR_Bordeaux_y2025m01d09" PARTITION OF public."customer_FR_Bordeaux" FOR VALUES FROM ('1736380800') TO ('1736467200'); + + +CREATE TABLE public."customer_FR_Lyon" PARTITION OF public."customer_FR" FOR VALUES IN ('Lyon') +PARTITION BY RANGE (created_at); + +-- Partitions + +CREATE TABLE public."customer_FR_Lyon_y2025m01d08" PARTITION OF public."customer_FR_Lyon" FOR VALUES FROM ('1736294400') TO ('1736380800'); +CREATE TABLE public."customer_FR_Lyon_y2025m01d09" PARTITION OF public."customer_FR_Lyon" FOR VALUES FROM ('1736380800') TO ('1736467200'); + + +CREATE TABLE public."customer_FR_Paris" PARTITION OF public."customer_FR" FOR VALUES IN ('Paris') +PARTITION BY RANGE (created_at); + +-- Partitions + +CREATE TABLE public."customer_FR_Paris_y2025m01d08" PARTITION OF public."customer_FR_Paris" FOR VALUES FROM ('1736294400') TO ('1736380800'); +CREATE TABLE public."customer_FR_Paris_y2025m01d09" PARTITION OF public."customer_FR_Paris" FOR VALUES FROM ('1736380800') TO ('1736467200'); + +-- RU + +CREATE TABLE public."customer_RU" PARTITION OF public.customer FOR VALUES IN ('RU') +PARTITION BY LIST (city); + +CREATE TABLE public."customer_RU_Moscow" PARTITION OF public."customer_RU" FOR VALUES IN ('Moscow') +PARTITION BY RANGE (created_at); + +CREATE TABLE public."customer_RU_Moscow_y2025m01d08" PARTITION OF public."customer_RU_Moscow" FOR VALUES FROM ('1736294400') TO ('1736380800'); +CREATE TAB... + +-- USA + +... +``` + + + +## PartByRange + +Используется для обозначения интервалов партиционирования данных - разбиение данных на части по дням, месяцам или годам. + +```csharp +/// +/// Enumerates the possible partitional ranges for a PostgreSQL database. +/// +public enum PartByRange +{ + Day, + Month, + Year +} +``` +*По умолчанию используется столбец `created_at` с разбиением по дням* + + +## IPartitionManager + +Интерфейс для управления партициями в базе данных. + +```csharp +public interface IPartitionManager +{ + /// + /// Migrates the existing partitions in the database. + /// This method may be used to reorganize or update partitions based on the current state of the data. + Task Migrate(CancellationToken cancellationToken = default); + + /// + /// Migrates partitions for specific dates. + /// This method allows for targeted migration of partitions based on the provided date range. + /// + Task Migrate(DateTimeOffset[] dates, CancellationToken cancellationToken = default); + + /// + /// Ensures that the specified partitions exist for a given table and date. + /// This method checks if the specified partitions are present and creates them if they are not. + /// + /// The name of the table for which partitions are being ensured. + /// The date associated with the partition. + /// An array of values that define the partitions (could be strings or numbers). + /// A token to monitor for cancellation requests. + ValueTask EnsureParts(string tableName, DateTimeOffset date, Classes.StrOrNum[] partValues, CancellationToken cancellationToken = default); +} +``` \ No newline at end of file diff --git a/src/Sa.Partitional.PostgreSql/Repositories/IPartRepository.cs b/src/Sa.Partitional.PostgreSql/Repositories/IPartRepository.cs new file mode 100644 index 0000000..dd66ed9 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Repositories/IPartRepository.cs @@ -0,0 +1,27 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql; + +/// +/// Represents information about a partition in a database table based on a range of values. +/// +/// The unique identifier for the partition - fully qualified name of the database table, including the schema. +/// The name of the original table from which this partition is derived. +/// An array of values that define the partitioning criteria, which can be either string or numeric. +/// The method used for partitioning (e.g., by range, list, etc.). +/// The date from which this partition is valid. +public record PartByRangeInfo(string Id, string RootTableName, StrOrNum[] PartValues, PgPartBy PartBy, DateTimeOffset FromDate); + +/// +/// Represents a repository interface for managing database partitions. +/// This interface defines methods for creating, migrating, retrieving, and dropping partitions in a database. +/// +public interface IPartRepository +{ + Task CreatePart(string tableName, DateTimeOffset date, StrOrNum[] partValues, CancellationToken cancellationToken = default); + Task Migrate(DateTimeOffset[] dates, CancellationToken cancellationToken = default); + Task Migrate(DateTimeOffset[] dates, Func> resolve, CancellationToken cancellationToken = default); + Task> GetPartsFromDate(string tableName, DateTimeOffset fromDate, CancellationToken cancellationToken = default); + Task> GetPartsToDate(string tableName, DateTimeOffset toDate, CancellationToken cancellationToken = default); + Task DropPartsToDate(string tableName, DateTimeOffset toDate, CancellationToken cancellationToken = default); +} diff --git a/src/Sa.Partitional.PostgreSql/Repositories/PartRepository.cs b/src/Sa.Partitional.PostgreSql/Repositories/PartRepository.cs new file mode 100644 index 0000000..02c6dad --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Repositories/PartRepository.cs @@ -0,0 +1,209 @@ +using Microsoft.Extensions.Logging; +using Npgsql; +using Sa.Classes; +using Sa.Data.PostgreSql; +using Sa.Extensions; +using Sa.Partitional.PostgreSql.SqlBuilder; + +namespace Sa.Partitional.PostgreSql.Repositories; + +internal sealed class PartRepository(IPgDataSource dataSource, ISqlBuilder sqlBuilder, ILogger? logger = null) : IPartRepository, IDisposable +{ + + /// + /// Semaphore to ensure we don't perform ddl sql concurrently for this data source. + /// + private readonly SemaphoreSlim _migrationSemaphore = new(1, 1); + + public async Task ExecuteDDL(string sql, CancellationToken cancellationToken) + { + await _migrationSemaphore.WaitAsync(cancellationToken); + try + { + return await dataSource.ExecuteNonQuery(sql, cancellationToken); + } + finally + { + _migrationSemaphore.Release(); + } + } + + public async Task CreatePart(string tableName, DateTimeOffset date, StrOrNum[] partValues, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(partValues); + + ISqlTableBuilder builder = sqlBuilder[tableName] ?? throw new KeyNotFoundException(nameof(tableName)); + string sql = builder.CreateSql(date, partValues); + return await ExecuteDDL(sql, cancellationToken); + } + + public async Task Migrate(DateTimeOffset[] dates, Func> resolve, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(dates); + ArgumentNullException.ThrowIfNull(resolve); + + int i = 0; + + await foreach (string sql in sqlBuilder.MigrateSql(dates, resolve)) + { + await ExecuteDDL(sql, cancellationToken); + i++; + } + return i; + } + + + public async Task Migrate(DateTimeOffset[] dates, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(dates); + int i = await Migrate(dates, async table => + { + ITableSettings? tableSettings = sqlBuilder[table]?.Settings; + + if (tableSettings != null) + { + IPartTableMigrationSupport? supMigration = tableSettings.Migration; + + if (supMigration != null) + { + return await supMigration.GetPartValues(cancellationToken); + } + else + { + if (tableSettings.PartByListFieldNames.Length > 0) + { + throw new InvalidOperationException($"Migration support is required for table '{table}' because 'PartByListFieldNames' is specified."); + } + } + } + + return []; + + }, cancellationToken); + + return i; + } + + public async Task> GetPartsFromDate(string tableName, DateTimeOffset fromDate, CancellationToken cancellationToken = default) + { + string sql = sqlBuilder.SelectPartsFromDateSql(tableName); + long unixTime = fromDate.ToUnixTimeSeconds(); + + return await Retry.Jitter( + async t => + { + try + { + return await dataSource.ExecuteReaderList(sql, ReadPartInfo, [new("from_date", unixTime)], t); + } + catch (PostgresException ex) when (UndefinedTable(ex)) + { + return []; + } + } + , next: HandleError + , cancellationToken: cancellationToken); + } + + + + public async Task> GetPartsToDate(string tableName, DateTimeOffset toDate, CancellationToken cancellationToken = default) + { + string sql = sqlBuilder.SelectPartsToDateSql(tableName); + try + { + return await dataSource.ExecuteReaderList( + sql + , ReadPartInfo + , [new("to_date", toDate.ToUnixTimeSeconds())] + , cancellationToken); + } + catch (PostgresException ex) when (UndefinedTable(ex)) + { + return []; + } + } + + public async Task DropPartsToDate(string tableName, DateTimeOffset toDate, CancellationToken cancellationToken = default) + { + int droppedCount = 0; + List list = await GetPartsToDate(tableName, toDate, cancellationToken); + + logger?.LogInformation("Starting to drop parts for table {TableName} up to date {ToDate}.", tableName, toDate); + + foreach (PartByRangeInfo part in list) + { + ITableSettings? settings = sqlBuilder[part.RootTableName]?.Settings; + + if (settings != null) + { + string sql = settings.DropPartSql(part.Id); + try + { + await ExecuteDDL(sql, cancellationToken); + droppedCount++; + logger?.LogInformation("Successfully dropped part with ID {PartId} from table {TableName}.", part.Id, part.RootTableName); + } + catch (PostgresException pgErr) when (UndefinedTable(pgErr)) + { + logger?.LogWarning(pgErr, "Skip to drop part with ID {PartId}.", part.Id); + } + catch (Exception ex) + { + logger?.LogError(ex, "Failed to drop part with ID {PartId}.", part.Id); + } + } + else + { + logger?.LogDebug("No settings found for root table {RootTableName}. Skipping part with ID {PartId}.", part.RootTableName, part.Id); + } + } + + + logger?.LogInformation("Finished dropping parts. Total dropped: {DroppedCount}.", droppedCount); + + return droppedCount; + } + + private static PartByRangeInfo ReadPartInfo(NpgsqlDataReader reader) + { + return new PartByRangeInfo( + reader.GetString(0) + , reader.GetString(1) + , reader.GetString(2).FromJson()! + , PgPartBy.FromPartName(reader.GetString(3)) + , reader.GetInt64(4).ToDateTimeOffsetFromUnixTimestamp() + ); + } + + private static bool HandleError(Exception ex, int _ = 0) + { + if (ex is PostgresException err) + { + return err.SqlState switch + { + PostgresErrorCodes.ConnectionException + or PostgresErrorCodes.ConnectionFailure + or PostgresErrorCodes.DeadlockDetected + or PostgresErrorCodes.CannotConnectNow + => true, //continue + + + _ => false, // abort + }; + } + + return true; + } + + + private static bool UndefinedTable(PostgresException ex) => + ex.SqlState == PostgresErrorCodes.UndefinedTable + || ex.SqlState == PostgresErrorCodes.InvalidSchemaName + ; + + public void Dispose() + { + _migrationSemaphore.Dispose(); + } +} diff --git a/src/Sa.Partitional.PostgreSql/Repositories/Setup.cs b/src/Sa.Partitional.PostgreSql/Repositories/Setup.cs new file mode 100644 index 0000000..0af8cb6 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Repositories/Setup.cs @@ -0,0 +1,13 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace Sa.Partitional.PostgreSql.Repositories; + +public static class Setup +{ + public static IServiceCollection AddPartRepository(this IServiceCollection services) + { + services.TryAddSingleton(); + return services; + } +} diff --git a/src/Sa.Partitional.PostgreSql/Sa.Partitional.PostgreSql.csproj b/src/Sa.Partitional.PostgreSql/Sa.Partitional.PostgreSql.csproj new file mode 100644 index 0000000..3e06b4e --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Sa.Partitional.PostgreSql.csproj @@ -0,0 +1,15 @@ + + + + net8.0 + enable + enable + + + + + + + + + diff --git a/src/Sa.Partitional.PostgreSql/Settings/IPartTableMigrationSupport.cs b/src/Sa.Partitional.PostgreSql/Settings/IPartTableMigrationSupport.cs new file mode 100644 index 0000000..02d43df --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Settings/IPartTableMigrationSupport.cs @@ -0,0 +1,8 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql; + +public interface IPartTableMigrationSupport +{ + Task GetPartValues(CancellationToken cancellationToken); +} diff --git a/src/Sa.Partitional.PostgreSql/Settings/ITableSettings.cs b/src/Sa.Partitional.PostgreSql/Settings/ITableSettings.cs new file mode 100644 index 0000000..469fb39 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Settings/ITableSettings.cs @@ -0,0 +1,68 @@ +namespace Sa.Partitional.PostgreSql; + +/// +/// for managing database table configurations +/// +public interface ITableSettings +{ + /// + /// Gets the full name of the table, including schema. + /// + string FullName { get; } + + /// + /// Gets the name of the database schema where the table resides. + /// + string DatabaseSchemaName { get; } + + /// + /// Gets the actual name of the table in the database. + /// + string DatabaseTableName { get; } + + /// + /// Gets the name of the primary key field for the table. + /// + string IdFieldName { get; } + + /// + /// Gets an array of field names that are part of the table. + /// + string[] Fields { get; } + + /// + /// Gets an array of field names used for partitioning the table by list. + /// + string[] PartByListFieldNames { get; } + + /// + /// Gets the name of the field used for range partitioning. + /// Typically a date or numeric field. + /// + string PartByRangeFieldName { get; } + + /// + /// Gets the type of partitioning being used (e.g., list, range). + /// + PgPartBy PartBy { get; } + + /// + /// Gets an instance that supports migration for partitioned tables. + /// + IPartTableMigrationSupport Migration { get; } + + /// + /// Gets the SQL separator used in partitioning queries. + /// + string SqlPartSeparator { get; } + + /// + /// Gets a function that returns additional SQL to be executed after the root SQL statement. + /// + Func? PostRootSql { get; } + + /// + /// Gets a function that returns SQL for defining primary key constraints. + /// + Func? ConstraintPkSql { get; } +} \ No newline at end of file diff --git a/src/Sa.Partitional.PostgreSql/Settings/ITableSettingsStorage.cs b/src/Sa.Partitional.PostgreSql/Settings/ITableSettingsStorage.cs new file mode 100644 index 0000000..1125fe4 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Settings/ITableSettingsStorage.cs @@ -0,0 +1,8 @@ + +namespace Sa.Partitional.PostgreSql; + +public interface ITableSettingsStorage +{ + IReadOnlyCollection Schemas { get; } + IReadOnlyCollection Tables { get; } +} diff --git a/src/Sa.Partitional.PostgreSql/Settings/TableSettings.cs b/src/Sa.Partitional.PostgreSql/Settings/TableSettings.cs new file mode 100644 index 0000000..a485f8f --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Settings/TableSettings.cs @@ -0,0 +1,69 @@ +using System.Diagnostics; + +namespace Sa.Partitional.PostgreSql.Settings; + +[DebuggerDisplay("settings root = {DatabaseTableName}")] +internal class TableSettings : ITableSettings +{ + static class Default + { + public readonly static PgPartBy DefaultPartBy = PgPartBy.Day; + public const string PartByRangeFieldName = "created_at"; + public const string SqlPartSeparator = "__"; + } + + public TableSettings( + string schemaName + , string tableName + , string[] fields + , string[] partByListFields + , IPartTableMigrationSupport migration + , PgPartBy? partByRange + , Func? postRootSql = null + , Func? constraintPkSql = null + , string? timestampField = null + , string? sqlPartSeparator = null + ) + { + DatabaseSchemaName = schemaName; + DatabaseTableName = tableName.Trim('"'); + Fields = fields; + Migration = migration; + + PartBy = partByRange ?? Default.DefaultPartBy; + + PartByListFieldNames = partByListFields; + PartByRangeFieldName = timestampField ?? Default.PartByRangeFieldName; + PartitionByFieldName = PartByListFieldNames.Length == 0 ? PartByRangeFieldName : partByListFields[0]; + + PostRootSql = postRootSql; + ConstraintPkSql = constraintPkSql; + + IdFieldName = GetIdName(Array.Find(fields, c => !string.IsNullOrWhiteSpace(c))); + + FullName = $@"{DatabaseSchemaName}.{DatabaseTableName}"; + SqlPartSeparator = sqlPartSeparator ?? Default.SqlPartSeparator; + } + + public string IdFieldName { get; } + public string[] Fields { get; } + public IPartTableMigrationSupport Migration { get; } + + public PgPartBy PartBy { get; } + + public string DatabaseSchemaName { get; } + public string DatabaseTableName { get; } + public string PartByRangeFieldName { get; } + public string[] PartByListFieldNames { get; } + public string PartitionByFieldName { get; } + public string FullName { get; } + public string SqlPartSeparator { get; } + + + // extensions + public Func? PostRootSql { get; } + public Func? ConstraintPkSql { get; } + + static string GetIdName(string? idSql) => idSql?.Trim().Split(' ')[0] ?? string.Empty; + +} diff --git a/src/Sa.Partitional.PostgreSql/Settings/TableSettingsStorage.cs b/src/Sa.Partitional.PostgreSql/Settings/TableSettingsStorage.cs new file mode 100644 index 0000000..2e34692 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Settings/TableSettingsStorage.cs @@ -0,0 +1,13 @@ + +namespace Sa.Partitional.PostgreSql.Settings; + +internal class TableSettingsStorage(IReadOnlyCollection settings) : ITableSettingsStorage +{ + + public IReadOnlyCollection Schemas { get; } = settings + .Select(c => c.DatabaseSchemaName) + .Distinct() + .ToArray(); + + public IReadOnlyCollection Tables => settings; +} diff --git a/src/Sa.Partitional.PostgreSql/Setup.cs b/src/Sa.Partitional.PostgreSql/Setup.cs new file mode 100644 index 0000000..52137ed --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/Setup.cs @@ -0,0 +1,23 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Partitional.PostgreSql.Configuration; + +namespace Sa.Partitional.PostgreSql; + +public static class Setup +{ + public static IPartConfiguration AddPartitional(this IServiceCollection services, Action configure, bool? asJob = null) + { + services.AddSaInfrastructure(); + services.TryAddSingleton(); + + return new PartConfiguration(services) + // defaults + .AddDataSource() + .AddPartTables(configure) + .AddPartCache() + .AddPartMigrationSchedule((_, settings) => settings.AsJob = asJob ?? settings.AsJob) + .AddPartCleanupSchedule((_, settings) => settings.AsJob = asJob ?? settings.AsJob) + ; + } +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/ISqlBuilder.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/ISqlBuilder.cs new file mode 100644 index 0000000..e8e5e4b --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/ISqlBuilder.cs @@ -0,0 +1,22 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql; + +public interface ISqlBuilder +{ + ISqlTableBuilder? this[string tableName] { get; } + + IReadOnlyCollection Tables { get; } + + IAsyncEnumerable MigrateSql(DateTimeOffset[] dates, Func> resolve); + + string CreatePartSql(string tableName, DateTimeOffset date, StrOrNum[] partValues); + + string SelectPartsQualifiedTablesSql(string tableName, StrOrNum[] partValues); + + string SelectPartsQualifiedTablesSql(string qualifiedTablesSql); + + string SelectPartsFromDateSql(string tableName); + + string SelectPartsToDateSql(string tableName); +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/ISqlBuilderFactory.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/ISqlBuilderFactory.cs new file mode 100644 index 0000000..a4a0f99 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/ISqlBuilderFactory.cs @@ -0,0 +1,6 @@ +namespace Sa.Partitional.PostgreSql; + +public interface ISqlBuilderFactory +{ + ISqlBuilder Create(); +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/ISqlTableBuilder.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/ISqlTableBuilder.cs new file mode 100644 index 0000000..7554f66 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/ISqlTableBuilder.cs @@ -0,0 +1,17 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql; + +public interface ISqlTableBuilder +{ + string FullName { get; } + + ITableSettings Settings { get; } + + string SelectPartsFromDate { get; } + string SelectPartsToDate { get; } + + string GetPartsSql(StrOrNum[] partValues); + + string CreateSql(DateTimeOffset date, params StrOrNum[] partValues); +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/Setup.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/Setup.cs new file mode 100644 index 0000000..31d00fd --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/Setup.cs @@ -0,0 +1,15 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace Sa.Partitional.PostgreSql.SqlBuilder; + +internal static class Setup +{ + public static IServiceCollection AddSqlBuilder(this IServiceCollection services) + { + services.TryAddSingleton(); + services.TryAddSingleton(sp => sp.GetRequiredService().Create()); + + return services; + } +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlBuilder.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlBuilder.cs new file mode 100644 index 0000000..3111bd1 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlBuilder.cs @@ -0,0 +1,89 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql.SqlBuilder; + +internal class SqlBuilder(ITableSettingsStorage storage) : ISqlBuilder +{ + private readonly Dictionary builders = storage + .Tables + .Select(table => new SqlTableBuilder(table)) + .ToDictionary(c => c.FullName); + + + public IReadOnlyCollection Tables => builders.Values; + + public async IAsyncEnumerable MigrateSql(DateTimeOffset[] dates, Func> resolve) + { + foreach (string table in builders.Keys) + { + SqlTableBuilder builder = builders[table] ?? throw new KeyNotFoundException(table); + + StrOrNum[][] parValues = await resolve(table); + + if (parValues.Length > 0) + { + foreach (StrOrNum[] parts in parValues) + { + foreach (DateTimeOffset date in dates) + { + string sql = builder.CreateSql(date, parts); + yield return sql; + } + } + } + else + { + foreach (DateTimeOffset date in dates) + { + string sql = builder.CreateSql(date); + yield return sql; + } + } + } + } + + public ISqlTableBuilder? this[string tableName] => Find(tableName ?? throw new ArgumentNullException(nameof(tableName))); + + + public string SelectPartsQualifiedTablesSql(string tableName, StrOrNum[] partValues) + => (Find(tableName) ?? throw new KeyNotFoundException(tableName)).GetPartsSql(partValues); + + public string SelectPartsQualifiedTablesSql(string qualifiedTablesSql) + => SqlTemplate.SelectPartsQualifiedTablesSql(qualifiedTablesSql); + + public string SelectPartsFromDateSql(string tableName) + => (Find(tableName) ?? throw new KeyNotFoundException(tableName)).SelectPartsFromDate; + + public string SelectPartsToDateSql(string tableName) + => (Find(tableName) ?? throw new KeyNotFoundException(tableName)).SelectPartsToDate; + + public string CreatePartSql(string tableName, DateTimeOffset date, StrOrNum[] partValues) + => (Find(tableName) ?? throw new KeyNotFoundException(tableName)).CreateSql(date); + + #region privates + private ISqlTableBuilder? Find(string tableName) + { + ISqlTableBuilder? item = + ( + storage.Schemas.Count == 1 + ? builders.GetValueOrDefault(GetFullName(storage.Schemas.First(), tableName)) + : builders.Values.FirstOrDefault(c => c.FullName == tableName) + ) + ?? builders.GetValueOrDefault(tableName); + + if (item != null) return item; + + if (tableName.Contains('"')) + { + tableName = tableName.Replace("\"", ""); + return Find(tableName); + } + return item; + } + + static string GetFullName(string schemaName, string tableName) => $"{schemaName}.{tableName}"; + + + + #endregion +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlBuilderFactory.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlBuilderFactory.cs new file mode 100644 index 0000000..21bcaea --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlBuilderFactory.cs @@ -0,0 +1,6 @@ +namespace Sa.Partitional.PostgreSql.SqlBuilder; + +internal class SqlBuilderFactory(ISettingsBuilder configuration) : ISqlBuilderFactory +{ + public ISqlBuilder Create() => new SqlBuilder(configuration.Build()); +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlPartListBuilder.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlPartListBuilder.cs new file mode 100644 index 0000000..ccd361e --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlPartListBuilder.cs @@ -0,0 +1,18 @@ +using Sa.Classes; +using Sa.Extensions; + +namespace Sa.Partitional.PostgreSql.SqlBuilder; + +/// +/// public."_outbox_root" +/// +internal class SqlPartListBuilder(ITableSettings settings) +{ + public string CreateSql(StrOrNum[] partValues) + => partValues.Length > 0 + ? partValues + .Select((c, i) => settings.CreateNestedSql(partValues[0..(i + 1)])) + .JoinByString(Environment.NewLine) + : string.Empty + ; +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlPartRangeBuilder.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlPartRangeBuilder.cs new file mode 100644 index 0000000..4ec9767 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlPartRangeBuilder.cs @@ -0,0 +1,12 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql.SqlBuilder; + +/// +/// public."_outbox_root__y2024m12d11" +/// +internal class SqlPartRangeBuilder(ITableSettings settings) +{ + public string CreateSql(DateTimeOffset date, StrOrNum[] partValues) + => settings.CreatePartByRangeSql(date, partValues); +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlRootBuilder.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlRootBuilder.cs new file mode 100644 index 0000000..ae9abbe --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlRootBuilder.cs @@ -0,0 +1,12 @@ +namespace Sa.Partitional.PostgreSql.SqlBuilder; + + +/// +/// public."_outbox" +/// -- CREATE INDEX IF NOT EXISTS ix__outbox__payload_type ON public."_outbox" (payload_type); +/// +internal class SqlRootBuilder(ITableSettings settings) +{ + private readonly Lazy _sql = new(settings.CreateRootSql); + public string CreateSql() => _sql.Value; +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlTableBuilder.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlTableBuilder.cs new file mode 100644 index 0000000..e1b0b58 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlTableBuilder.cs @@ -0,0 +1,47 @@ +using Sa.Classes; + +namespace Sa.Partitional.PostgreSql.SqlBuilder; + +internal class SqlTableBuilder(ITableSettings settings) : ISqlTableBuilder +{ + private readonly SqlRootBuilder rootBuilder = new(settings); + private readonly SqlPartListBuilder partListBuilder = new(settings); + private readonly SqlPartRangeBuilder partRangeBuilder = new(settings); + + public string FullName => settings.FullName; + + public ITableSettings Settings => settings; + + + public string CreateSql(DateTimeOffset date, params StrOrNum[] partValues) + { + if (settings.PartByListFieldNames.Length != partValues.Length) + { + return +$""" +-- {date} +{rootBuilder.CreateSql()} + +-- incomplete number of parts +{partListBuilder.CreateSql(partValues)} +"""; + + } + + return +$""" +-- {date} +{rootBuilder.CreateSql()} +{partListBuilder.CreateSql(partValues)} +{partRangeBuilder.CreateSql(date, partValues)} +"""; + } + + public string GetPartsSql(StrOrNum[] partValues) => settings.SelectPartsQualifiedTablesSql(partValues); + + public override string ToString() => $"{FullName} {Settings.PartByListFieldNames}"; + + public string SelectPartsFromDate { get; } = settings.SelectPartsFromDateSql(); + + public string SelectPartsToDate { get; } = settings.SelectPartsToDateSql(); +} diff --git a/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlTemplate.cs b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlTemplate.cs new file mode 100644 index 0000000..03ba734 --- /dev/null +++ b/src/Sa.Partitional.PostgreSql/SqlBuilder/SqlTemplate.cs @@ -0,0 +1,159 @@ +using Sa.Classes; +using Sa.Extensions; + +namespace Sa.Partitional.PostgreSql.SqlBuilder; + +internal static class SqlTemplate +{ + const string CacheByRangeTableNamePostfix = "$part"; + + public static string CreateRootSql(this ITableSettings settings) + { + string pkList = settings.PartByListFieldNames.Contains(settings.IdFieldName) + ? settings.PartByListFieldNames.JoinByString(",") + : new string[] { settings.IdFieldName }.Concat(settings.PartByListFieldNames).JoinByString(",") + ; + + return $""" + +CREATE SCHEMA IF NOT EXISTS {settings.DatabaseSchemaName}; + +CREATE TABLE IF NOT EXISTS {settings.GetQualifiedTableName()} ( + {settings.Fields.JoinByString($",{Environment.NewLine} ")}, + {settings.PartByRangeFieldName} bigint NOT NULL, + CONSTRAINT "{settings.Pk()}" PRIMARY KEY ({pkList},{settings.PartByRangeFieldName}) +) {settings.GetPartitionalSql(0)} +; + +-- post sql +{settings.PostRootSql?.Invoke()} + +"""; + } + + public static string CreateNestedSql(this ITableSettings settings, StrOrNum[] values) => +$""" + + +-- {settings.GetPartitionalSql(values.Length - 1)[18..]} + +CREATE TABLE IF NOT EXISTS {settings.GetQualifiedTableName(values)} +PARTITION OF {settings.GetQualifiedTableName(values[0..^1])} +FOR VALUES IN ({values[^1].Match(s => $"'{s}'", n => n.ToString())}) +{settings.GetPartitionalSql(values.Length)} +; +"""; + + public static string CreatePartByRangeSql(this ITableSettings settings, DateTimeOffset date, StrOrNum[] values) + { + string timeRangeTablename = settings.GetQualifiedTableName(date, values); + LimSection range = settings.PartBy.GetRange(date); + string cacheTablename = settings.GetCacheByRangeTableName(); + + return +$""" + + +-- ({settings.PartByRangeFieldName}) part by: {settings.PartBy} + +CREATE TABLE IF NOT EXISTS {timeRangeTablename} +PARTITION OF {settings.GetQualifiedTableName(values)} +FOR VALUES FROM ({range.Start.ToUnixTimeSeconds()}) TO ({range.End.ToUnixTimeSeconds()}) +; + +-- cache + +CREATE TABLE IF NOT EXISTS {cacheTablename} ( + id TEXT PRIMARY KEY, + root TEXT NOT NULL, + part_values TEXT NOT NULL, + part_by TEXT NOT NULL, + from_date bigint NOT NULL, + to_date bigint NOT NULL +) +; + +INSERT INTO {cacheTablename} (id,root,part_values,part_by,from_date,to_date) +VALUES ('{timeRangeTablename}','{settings.FullName}','{values.ToJson()}','{settings.PartBy.Name}',{range.Start.ToUnixTimeSeconds()},{range.End.ToUnixTimeSeconds()}) +ON CONFLICT (id) DO NOTHING +; + +""" + ; + } + + public static string SelectPartsFromDateSql(this ITableSettings settings) + { + return +$""" +SELECT id,root,part_values,part_by,from_date +FROM {settings.GetCacheByRangeTableName()} +WHERE root = '{settings.FullName}' AND from_date >= @from_date +ORDER BY from_date DESC +; +"""; + } + + + public static string SelectPartsToDateSql(this ITableSettings settings) + { + return +$""" +SELECT id,root,part_values,part_by,from_date +FROM {settings.GetCacheByRangeTableName()} +WHERE + root = '{settings.FullName}' + AND to_date <= @to_date +ORDER BY from_date ASC +; +"""; + } + + + public static string SelectPartsQualifiedTablesSql(this ITableSettings settings, StrOrNum[] values) + => SelectPartsQualifiedTablesSql(settings.GetQualifiedTableName(values)); + + public static string SelectPartsQualifiedTablesSql(string qualifiedTableName) + => +$""" +WITH pt AS ( + SELECT inhrelid::regclass AS pt + FROM pg_inherits + WHERE inhparent = '{qualifiedTableName}'::regclass +) +SELECT pt::text from pt +; +"""; + + + public static string DropPartSql(this ITableSettings settings, string qualifiedTableName) + { + return $""" +DROP TABLE IF EXISTS {qualifiedTableName}; +DELETE FROM {settings.GetCacheByRangeTableName()} WHERE id='{qualifiedTableName}'; +"""; + } + + public static string GetQualifiedTableName(this ITableSettings settings, DateTimeOffset? date, StrOrNum[] values) + => date != null + ? settings.GetQualifiedTableName([.. values, settings.PartBy.Fmt(date.Value)]) + : settings.GetQualifiedTableName(values); + + + private static string GetCacheByRangeTableName(this ITableSettings settings) => settings.GetQualifiedTableName(CacheByRangeTableNamePostfix); + + + static string GetQualifiedTableName(this ITableSettings settings, params StrOrNum[] values) + => values.Length > 0 + ? $"{settings.DatabaseSchemaName}.\"{settings.DatabaseTableName}{settings.SqlPartSeparator}{values.JoinByString(settings.SqlPartSeparator)}\"" + : $"{settings.DatabaseSchemaName}.\"{settings.DatabaseTableName}\"" + ; + + static string GetPartitionalSql(this ITableSettings settings, int partIndex) + => partIndex >= 0 && partIndex < settings.PartByListFieldNames.Length + ? $"PARTITION BY LIST ({settings.PartByListFieldNames[partIndex]})" + : $"PARTITION BY RANGE ({settings.PartByRangeFieldName})" + ; + + static string Pk(this ITableSettings settings) => settings.ConstraintPkSql?.Invoke() ?? $"pk_{settings.DatabaseTableName}"; +} diff --git a/src/Sa.Partitional.Support/Part.cs b/src/Sa.Partitional.Support/Part.cs new file mode 100644 index 0000000..5757441 --- /dev/null +++ b/src/Sa.Partitional.Support/Part.cs @@ -0,0 +1,10 @@ +using Sa.Classes; + +namespace Sa.Partitional.Support; + +public record Part(string Name, PartByRange PartBy) : Enumeration(Name.GetHashCode(), Name) +{ + public const string RootId = "root"; + + public static readonly Part Root = new(RootId, PartByRange.Day); +} diff --git a/src/Sa.Partitional.Support/PartByRange.cs b/src/Sa.Partitional.Support/PartByRange.cs new file mode 100644 index 0000000..ed08c74 --- /dev/null +++ b/src/Sa.Partitional.Support/PartByRange.cs @@ -0,0 +1,8 @@ +namespace Sa.Partitional.Support; + +public enum PartByRange +{ + Day, + Month, + Year +} diff --git a/src/Sa.Partitional.Support/Sa.Partitional.Support.csproj b/src/Sa.Partitional.Support/Sa.Partitional.Support.csproj new file mode 100644 index 0000000..c9a9dd6 --- /dev/null +++ b/src/Sa.Partitional.Support/Sa.Partitional.Support.csproj @@ -0,0 +1,13 @@ + + + + net8.0 + enable + enable + + + + + + + diff --git a/src/Sa.Schedule/Engine/JobContext.cs b/src/Sa.Schedule/Engine/JobContext.cs new file mode 100644 index 0000000..27b1b9d --- /dev/null +++ b/src/Sa.Schedule/Engine/JobContext.cs @@ -0,0 +1,59 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Sa.Schedule.Settings; + +namespace Sa.Schedule.Engine; + +public class JobContext(IJobSettings settings) : IJobContext +{ + public Guid JobId => settings.JobId; + + public string JobName => settings.Properties.JobName ?? $"{JobId}"; + + public JobStatus Status { get; set; } + + public IJobSettings Settings => settings; + + public ulong NumIterations { get; set; } + + public ulong FailedIterations { get; set; } + + public ulong CompetedIterations { get; set; } + + public int FailedRetries { get; set; } + + public JobException? LastError { get; set; } + + public DateTimeOffset CreatedAt { get; set; } + + public DateTimeOffset? ExecuteAt { get; set; } + + public ulong NumRuns { get; set; } + + public IServiceProvider JobServices { get; set; } = NullJobServices.Instance; + + public Queue Stack { get; private set; } = new(); + + IEnumerable IJobContext.Stack => Stack.Reverse(); + + public ILogger Logger => JobServices.GetService>() ?? NullLogger.Instance; + + public IJobContext Clone() + { + JobContext clone = new(Settings) + { + NumIterations = NumIterations, + FailedRetries = FailedRetries, + FailedIterations = FailedIterations, + LastError = LastError, + CreatedAt = CreatedAt, + ExecuteAt = ExecuteAt, + Status = Status, + NumRuns = NumRuns, + Stack = new Queue(Stack.Select(x => x.Clone())), + }; + + return clone; + } +} diff --git a/src/Sa.Schedule/Engine/JobController.cs b/src/Sa.Schedule/Engine/JobController.cs new file mode 100644 index 0000000..2a89ced --- /dev/null +++ b/src/Sa.Schedule/Engine/JobController.cs @@ -0,0 +1,132 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Sa.Schedule.Settings; +using Sa.Timing.Providers; + +namespace Sa.Schedule.Engine; + + +/// +/// job lifecycly controller with context +/// +internal class JobController(IJobSettings settings, IInterceptorSettings interceptorSettings, IServiceScopeFactory scopeFactory) : IJobController +{ + private readonly JobContext context = new(settings); + + private JobPipeline? _job; + + public IJobContext Context => context; + + public DateTimeOffset UtcNow => context.JobServices.GetService()?.GetUtcNow() + ?? DateTimeOffset.UtcNow; + + public async ValueTask WaitToRun(CancellationToken cancellationToken) + { + if (context.NumRuns == 0 && settings.Properties.InitialDelay.HasValue && settings.Properties.InitialDelay.Value != TimeSpan.Zero) + { + context.Status = JobStatus.WaitingToRun; + await Task.Delay(settings.Properties.InitialDelay.Value, cancellationToken); + } + } + + public void Running() + { + _job = new JobPipeline(settings, interceptorSettings, scopeFactory); + + context.JobServices = _job.JobServices; + context.Status = JobStatus.Running; + + if (context.NumRuns == 0) context.CreatedAt = UtcNow; + context.NumRuns++; + } + + public void Stopped(TaskStatus status) + { + switch (status) + { + case TaskStatus.Faulted: context.Status = JobStatus.Failed; break; + case TaskStatus.Canceled: context.Status = JobStatus.Cancelled; break; + case TaskStatus.RanToCompletion: context.Status = JobStatus.Completed; break; + } + + context.JobServices = NullJobServices.Instance; + _job?.Dispose(); + } + + public async ValueTask CanExecute(CancellationToken cancellationToken) + { + if (settings.Properties.IsRunOnce == true && context.NumIterations > 0) + return CanJobExecuteResult.Abort; + + if (context.NumIterations == 0 && settings.Properties.Immediate == true) + return CanJobExecuteResult.Ok; + + IJobTiming? timing = settings.Properties.Timing; + + if (timing != null) + { + DateTimeOffset now = UtcNow; + + DateTimeOffset? next = timing.GetNextOccurrence(now, context); + + if (!next.HasValue) + return CanJobExecuteResult.Abort; + + TimeSpan delay = next.Value - now; + + if (delay.TotalMilliseconds > 0) + { + await Task.Delay(delay, cancellationToken); + } + } + + int stackSize = settings.Properties.ContextStackSize.GetValueOrDefault(); + + if (stackSize > 0) + { + if (context.Stack.Count == stackSize) context.Stack.Dequeue(); + context.Stack.Enqueue(context.Clone()); + } + + return !cancellationToken.IsCancellationRequested + ? CanJobExecuteResult.Ok + : CanJobExecuteResult.Abort; + } + + public Task Execute(CancellationToken cancellationToken) + { + context.NumIterations++; + context.ExecuteAt = UtcNow; + return _job!.Execute(Context, cancellationToken); + } + + public void ExecutionCompleted() + { + context.CompetedIterations++; + context.FailedRetries = 0; + } + + public void ExecutionFailed(Exception exception) + { + JobException error = new(context, exception); + context.FailedIterations++; + context.LastError = error; + + IJobErrorHandling errorHandling = settings.ErrorHandling; + + if (errorHandling.HasSuppressError && errorHandling.SuppressError?.Invoke(exception) == true) + { + context.Logger.LogWarning("[{JobName}] the error: “{Error}” on job was suppressed to continue", context.JobName, exception.Message); + return; + } + + if (context.FailedRetries < settings.ErrorHandling.RetryCount) + { + context.FailedRetries++; + context.Logger.LogWarning("[{JobName}] {FailedRetryAttempts} out of {RetryCount} reps when the job failed due to an error: “{Error}”", context.JobName, context.FailedRetries, errorHandling.RetryCount, exception.Message); + return; + } + + context.JobServices.GetService()?.HandleError(Context, error); + } +} diff --git a/src/Sa.Schedule/Engine/JobErrorHandler.cs b/src/Sa.Schedule/Engine/JobErrorHandler.cs new file mode 100644 index 0000000..d80b05b --- /dev/null +++ b/src/Sa.Schedule/Engine/JobErrorHandler.cs @@ -0,0 +1,91 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; + +namespace Sa.Schedule.Engine; + +internal class JobErrorHandler(IScheduleSettings settings, IHostApplicationLifetime? lifetime, ILogger? logger) : IJobErrorHandler +{ + public void HandleError(IJobContext context, Exception exception) + { + if (settings.HandleError?.Invoke(context, exception) != true) + { + // default handle + DoHandleError(context, exception); + } + + throw exception; + } + + private void DoHandleError(IJobContext context, Exception exception) + { + switch (context.Settings.ErrorHandling.ThenAction) + { + case ErrorHandlingAction.AbortJob: + AbortJob(context.JobName, exception); + break; + + case ErrorHandlingAction.CloseApplication: + CloseApplication(context.JobName, context.JobServices.GetRequiredService(), exception); + break; + + case ErrorHandlingAction.StopAllJobs: + StopAllJobs(context.JobName, context.JobServices.GetRequiredService(), exception); + break; + + default: + logger?.LogError(exception, "[{JobName}] Unknown error", context.JobName); + break; + } + + throw context.LastError ?? exception; + } + + private void AbortJob(string jobName, Exception exception) + { + logger?.LogError(exception, @" +************ +JOB: {JobName} +ERROR: The job will be aborted, the reason is an error: +{Error} +************ + ", jobName, exception.Message); + } + + private void StopAllJobs(string jobName, IScheduler scheduler, Exception exception) + { + logger?.LogError(exception, @" +************ +JOB: {JobName} +ERROR: The all jobs will be stoped, the reason is an error: +{Error} +************ + ", jobName, exception.Message); + + + if (scheduler == null) throw exception; + scheduler.Stop(); + } + + private void CloseApplication(string jobName, IScheduler scheduler, Exception exception) + { + logger?.LogError(exception, @" +************ +JOB: {JobName} +ERROR: The application will be closed, the reason is an error: +{Error} +************ + ", jobName, exception.Message); + + if (lifetime == null) throw exception; + + if (scheduler.Settings.IsHostedService) + { + lifetime.StopApplication(); + } + else + { + scheduler.Stop().ContinueWith(_ => lifetime.StopApplication()); + } + } +} diff --git a/src/Sa.Schedule/Engine/JobFactory.cs b/src/Sa.Schedule/Engine/JobFactory.cs new file mode 100644 index 0000000..be332d3 --- /dev/null +++ b/src/Sa.Schedule/Engine/JobFactory.cs @@ -0,0 +1,13 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Schedule.Settings; + +namespace Sa.Schedule.Engine; + +internal class JobFactory(IServiceScopeFactory scopeFactory, IInterceptorSettings interceptorSettings, IJobRunner jobRunner) : IJobFactory +{ + public IJobController CreateJobController(IJobSettings settings) + => new JobController(settings, interceptorSettings, scopeFactory); + + public IJobScheduler CreateJobSchedule(IJobSettings settings) + => new JobScheduler(jobRunner, CreateJobController(settings)); +} diff --git a/src/Sa.Schedule/Engine/JobPipeline.cs b/src/Sa.Schedule/Engine/JobPipeline.cs new file mode 100644 index 0000000..3400709 --- /dev/null +++ b/src/Sa.Schedule/Engine/JobPipeline.cs @@ -0,0 +1,47 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Schedule.Settings; + +namespace Sa.Schedule.Engine; + +internal sealed class JobPipeline : IJob, IDisposable +{ + #region proxy + class JobProxy(IJob job, IJobInterceptor interceptor, object? key) : IJob + { + public Task Execute(IJobContext context, CancellationToken cancellationToken) + => interceptor.OnHandle( + context + , () => job.Execute(context, cancellationToken) + , key + , cancellationToken); + } + #endregion + + private readonly IServiceScope _scope; + private readonly IJob _job; + + public JobPipeline(IJobSettings settings, IInterceptorSettings interceptorSettings, IServiceScopeFactory scopeFactory) + { + _scope = scopeFactory.CreateScope(); + IJob originalJob = (IJob)_scope.ServiceProvider.GetRequiredKeyedService(settings.JobType, settings.JobId); + + if (interceptorSettings.Interceptors.Count > 0) + { + _job = interceptorSettings + .Interceptors + .Reverse() + .Aggregate(originalJob, (job, s) + => new JobProxy(job, (IJobInterceptor)_scope.ServiceProvider.GetRequiredKeyedService(s.HandlerType, s.Key), s.Key)); + } + else + { + _job = originalJob; + } + } + + public IServiceProvider JobServices => _scope.ServiceProvider; + + public void Dispose() => _scope.Dispose(); + + public Task Execute(IJobContext context, CancellationToken cancellationToken) => _job.Execute(context, cancellationToken); +} diff --git a/src/Sa.Schedule/Engine/JobRunner.cs b/src/Sa.Schedule/Engine/JobRunner.cs new file mode 100644 index 0000000..4dcc155 --- /dev/null +++ b/src/Sa.Schedule/Engine/JobRunner.cs @@ -0,0 +1,44 @@ +using System.Diagnostics; + +namespace Sa.Schedule.Engine; + + +internal class JobRunner() : IJobRunner +{ + public async Task Run(IJobController controller, CancellationToken cancellationToken) + { + await controller.WaitToRun(cancellationToken); + + controller.Running(); + + await RunLoop(controller, cancellationToken) + .ContinueWith(t => controller.Stopped(t.Status), CancellationToken.None); + } + + [StackTraceHidden] + private static async Task RunLoop(IJobController controller, CancellationToken cancellationToken) + { + while (!cancellationToken.IsCancellationRequested) + { + CanJobExecuteResult next = await controller.CanExecute(cancellationToken); + + if (next == CanJobExecuteResult.Abort) break; + if (next == CanJobExecuteResult.Skip) continue; + + try + { + await controller.Execute(cancellationToken); + controller.ExecutionCompleted(); + } + catch (OperationCanceledException ex) when (ex.CancellationToken == cancellationToken) + { + // skip + } + catch (Exception ex) + { + controller.ExecutionFailed(ex); + } + } + } +} + diff --git a/src/Sa.Schedule/Engine/JobScheduler.cs b/src/Sa.Schedule/Engine/JobScheduler.cs new file mode 100644 index 0000000..6612427 --- /dev/null +++ b/src/Sa.Schedule/Engine/JobScheduler.cs @@ -0,0 +1,99 @@ +using Microsoft.Extensions.Primitives; + +namespace Sa.Schedule.Engine; + +internal sealed class JobScheduler(IJobRunner runner, IJobController controller) : IJobScheduler, IDisposable, IAsyncDisposable +{ + private readonly static IChangeToken NoneChangeToken = new CancellationChangeToken(CancellationToken.None); + + private readonly object _locked = new(); + + private TaskCompletionSource? _stoppingTask; + + private CancellationTokenSource _stoppingToken = new(); + private CancellationToken _originalToken; + + private bool _disposed; + + public IJobContext Context => controller.Context; + + public bool IsActive => _stoppingTask?.Task.Status == TaskStatus.WaitingForActivation; + + public IChangeToken GetActiveChangeToken() + { + if (_disposed) return NoneChangeToken; + + lock (_locked) + { + return new CancellationChangeToken(_stoppingToken.Token); + } + } + + /// + /// Start all jobs + /// + public bool Start(CancellationToken cancellationToken) + { + if (IsActive) return false; + + lock (_locked) + { + _stoppingToken.Cancel(); + _stoppingToken.Dispose(); + + _originalToken = cancellationToken; + _stoppingToken = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + + _stoppingTask = new TaskCompletionSource(); + + Task task = runner.Run(controller, _stoppingToken.Token); + + task.ContinueWith(Done, CancellationToken.None); + + return true; + } + } + + public bool Restart() => Start(_originalToken); + + public Task Stop() + { + if (IsActive) + { + lock (_locked) + { + _stoppingToken?.Cancel(); + } + } + + return _stoppingTask?.Task ?? Task.CompletedTask; + } + + private void Done(Task task) + { + lock (_locked) + { + _stoppingTask?.TrySetResult(); + } + } + + public void Dispose() + { + if (!_disposed) + { + _disposed = true; + _ = Stop(); + _stoppingToken.Dispose(); + } + } + + public async ValueTask DisposeAsync() + { + if (!_disposed) + { + _disposed = true; + await Stop(); + _stoppingToken.Dispose(); + } + } +} diff --git a/src/Sa.Schedule/Engine/JobTiming.cs b/src/Sa.Schedule/Engine/JobTiming.cs new file mode 100644 index 0000000..89e9eb3 --- /dev/null +++ b/src/Sa.Schedule/Engine/JobTiming.cs @@ -0,0 +1,12 @@ +namespace Sa.Schedule.Engine; + +public class JobTiming(Func nextTime, string name) : IJobTiming +{ + public string TimingName => name; + public DateTimeOffset? GetNextOccurrence(DateTimeOffset dateTime, IJobContext context) => nextTime(dateTime, context); + + public static IJobTiming EveryTime(TimeSpan timeSpan, string? name = null) => + new JobTiming((dateTime, _) => dateTime.Add(timeSpan), name ?? $"every {timeSpan}"); + + public static IJobTiming Default { get; } = EveryTime(TimeSpan.FromSeconds(1), "default every seconds"); +} diff --git a/src/Sa.Schedule/Engine/ScheduleHost.cs b/src/Sa.Schedule/Engine/ScheduleHost.cs new file mode 100644 index 0000000..0920b6c --- /dev/null +++ b/src/Sa.Schedule/Engine/ScheduleHost.cs @@ -0,0 +1,17 @@ +using Microsoft.Extensions.Hosting; + +namespace Sa.Schedule.Engine; + +internal sealed class ScheduleHost(IScheduler controller) : IHostedService +{ + public Task StartAsync(CancellationToken cancellationToken) + { + controller.Start(cancellationToken); + return Task.CompletedTask; + } + + public async Task StopAsync(CancellationToken cancellationToken) + { + await controller.Stop(); + } +} diff --git a/src/Sa.Schedule/Engine/Scheduler.cs b/src/Sa.Schedule/Engine/Scheduler.cs new file mode 100644 index 0000000..8e94f19 --- /dev/null +++ b/src/Sa.Schedule/Engine/Scheduler.cs @@ -0,0 +1,33 @@ +namespace Sa.Schedule.Engine; + +internal sealed class Scheduler(IScheduleSettings settings, IJobFactory factory) : IScheduler, IDisposable, IAsyncDisposable +{ + private bool _disposed; + + public IScheduleSettings Settings => settings; + + public IReadOnlyCollection Schedules { get; } = settings + .GetJobSettings() + .Select(factory.CreateJobSchedule) + .ToArray(); + + /// + /// Start all jobs + /// + public int Start(CancellationToken cancellationToken) => Schedules.Count(c => c.Start(cancellationToken)); + + public int Restart() => Schedules.Count(c => c.Restart()); + + public async Task Stop() => await Task.WhenAll(Schedules.Select(c => c.Stop())); + + public void Dispose() + { + if (!_disposed) + { + _disposed = true; + _ = Stop(); + } + } + + public async ValueTask DisposeAsync() => await Stop(); +} diff --git a/src/Sa.Schedule/ErrorHandlingAction.cs b/src/Sa.Schedule/ErrorHandlingAction.cs new file mode 100644 index 0000000..cd168a4 --- /dev/null +++ b/src/Sa.Schedule/ErrorHandlingAction.cs @@ -0,0 +1,20 @@ +namespace Sa.Schedule; + +/// +/// Specifies the actions to be taken when an error occurs. +/// +public enum ErrorHandlingAction +{ + /// + /// Closes the entire application. + /// + CloseApplication, + /// + /// Aborts the current job. + /// + AbortJob, + /// + /// Stops all running jobs. + /// + StopAllJobs +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJob.cs b/src/Sa.Schedule/IJob.cs new file mode 100644 index 0000000..b188ac0 --- /dev/null +++ b/src/Sa.Schedule/IJob.cs @@ -0,0 +1,15 @@ +namespace Sa.Schedule; + +/// +/// Represents a job that can be executed. +/// +public interface IJob +{ + /// + /// Executes the job. + /// + /// The job context. + /// The cancellation token. + /// A task representing the execution of the job. + Task Execute(IJobContext context, CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJobBuilder.cs b/src/Sa.Schedule/IJobBuilder.cs new file mode 100644 index 0000000..396e0ee --- /dev/null +++ b/src/Sa.Schedule/IJobBuilder.cs @@ -0,0 +1,96 @@ +namespace Sa.Schedule; + +/// +/// Defines a builder for creating and configuring jobs. +/// +public interface IJobBuilder +{ + /// + /// Sets the name of the job. + /// + /// The name of the job. + /// The current builder instance. + IJobBuilder WithName(string name); + + /// + /// Starts the job immediately. + /// + /// The current builder instance. + IJobBuilder StartImmediate(); + + /// + /// Configures the job to run once. + /// + /// The current builder instance. + IJobBuilder RunOnce(); + + /// + /// Sets the context stack size for the job. + /// + /// The context stack size. + /// The current builder instance. + IJobBuilder WithContextStackSize(int size); + + /// + /// job schedule delay before start + /// + /// The delay time span. + /// The current builder instance. + IJobBuilder WithInitialDelay(TimeSpan delay); + + /// + /// Sets the timing for the job. + /// + /// The job timing. + /// The current builder instance. + IJobBuilder WithTiming(IJobTiming timing); + + /// + /// Sets a tag for the job. + /// + /// The job tag. + /// The current builder instance. + IJobBuilder WithTag(object tag); + + /// + /// Configures the job to run at the specified time interval. + /// + /// The time interval. + /// The name of the interval (optional). + /// The current builder instance. + IJobBuilder EveryTime(TimeSpan timeSpan, string? name = null); + + /// + /// Configures the job to run every specified number of seconds. + /// + /// The number of seconds (default is 1). + /// The current builder instance. + IJobBuilder EverySeconds(int seconds = 1) => EveryTime(TimeSpan.FromSeconds(seconds), $"every {seconds} seconds"); + + /// + /// Configures the job to run every specified number of minutes. + /// + /// The number of minutes (default is 1). + /// The current builder instance. + IJobBuilder EveryMinutes(int minutes = 1) => EveryTime(TimeSpan.FromMinutes(minutes), $"every {minutes} minutes"); + + /// + /// Merges the specified job properties into the current job configuration. + /// + /// The job properties to merge. + /// The current builder instance. + IJobBuilder Merge(IJobProperties props); + + /// + /// Configures error handling for the job. + /// + /// The error handling configuration action. + /// The current builder instance. + IJobBuilder ConfigureErrorHandling(Action configure); + + /// + /// Disables the job. + /// + /// The current builder instance. + IJobBuilder Disabled(); +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJobContext.cs b/src/Sa.Schedule/IJobContext.cs new file mode 100644 index 0000000..1fd4346 --- /dev/null +++ b/src/Sa.Schedule/IJobContext.cs @@ -0,0 +1,33 @@ +using Microsoft.Extensions.Logging; + +namespace Sa.Schedule; + +/// +/// Provides information about the job context. +/// +/// +/// This interface defines the properties and methods that are available for a job context. +/// +/// +/// +public interface IJobContext +{ + + Guid JobId { get; } + string JobName { get; } + JobStatus Status { get; } + IJobSettings Settings { get; } + ulong NumIterations { get; } + ulong FailedIterations { get; } + ulong CompetedIterations { get; } + int FailedRetries { get; } + DateTimeOffset CreatedAt { get; } + DateTimeOffset? ExecuteAt { get; } + JobException? LastError { get; } + IServiceProvider JobServices { get; } + IEnumerable Stack { get; } + ILogger Logger { get; } + + IJobContext Clone(); + bool Active => Status == JobStatus.Running || Status == JobStatus.WaitingToRun; +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJobController.cs b/src/Sa.Schedule/IJobController.cs new file mode 100644 index 0000000..9d7ca05 --- /dev/null +++ b/src/Sa.Schedule/IJobController.cs @@ -0,0 +1,29 @@ +namespace Sa.Schedule; + + +public enum CanJobExecuteResult +{ + Ok, + Abort, + Skip +} + +/// +/// Defines the contract for a job controller, responsible for managing the lifecycle of a job. +/// +public interface IJobController +{ + // scope context + public IJobContext Context { get; } + + // scope events + ValueTask WaitToRun(CancellationToken cancellationToken); + void Running(); + void Stopped(TaskStatus status); + + // iteration events + ValueTask CanExecute(CancellationToken cancellationToken); + Task Execute(CancellationToken cancellationToken); + void ExecutionFailed(Exception exception); + void ExecutionCompleted(); +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJobErrorHandler.cs b/src/Sa.Schedule/IJobErrorHandler.cs new file mode 100644 index 0000000..463b931 --- /dev/null +++ b/src/Sa.Schedule/IJobErrorHandler.cs @@ -0,0 +1,14 @@ +namespace Sa.Schedule; + +/// +/// Defines the contract for handling job errors. +/// +public interface IJobErrorHandler +{ + /// + /// Handles an error that occurred during job execution. + /// + /// The job context. + /// The exception that occurred. + void HandleError(IJobContext context, Exception exception); +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJobErrorHandling.cs b/src/Sa.Schedule/IJobErrorHandling.cs new file mode 100644 index 0000000..854eca0 --- /dev/null +++ b/src/Sa.Schedule/IJobErrorHandling.cs @@ -0,0 +1,32 @@ +namespace Sa.Schedule; + +/// +/// Defines the error handling behavior for a job. +/// +public interface IJobErrorHandling +{ + /// + /// Gets the action to take after an error occurs. + /// + ErrorHandlingAction ThenAction { get; } + + /// + /// Gets the number of retry attempts. + /// + int RetryCount { get; } + + /// + /// Gets a function that determines whether to suppress an error. + /// + Func? SuppressError { get; } + + /// + /// Gets a value indicating whether an error suppression function is defined. + /// + bool HasSuppressError => SuppressError != null; + + /// + /// Gets a value indicating whether retry attempts are enabled. + /// + bool HasRetryAttempts => RetryCount > 0; +} diff --git a/src/Sa.Schedule/IJobErrorHandlingBuilder.cs b/src/Sa.Schedule/IJobErrorHandlingBuilder.cs new file mode 100644 index 0000000..d1020c4 --- /dev/null +++ b/src/Sa.Schedule/IJobErrorHandlingBuilder.cs @@ -0,0 +1,36 @@ +namespace Sa.Schedule; + +public interface IJobErrorHandlingBuilder +{ + /// + /// Specifies the retry policy for the job in case of an error. + /// + /// The number of times to retry the job. If null, the job will not be retried. + /// The current IJobErrorHandlingBuilder instance. + IJobErrorHandlingBuilder IfErrorRetry(int? count = null); + + /// + /// Specifies that the application should be closed if an error occurs. + /// + /// The current IJobErrorHandlingBuilder instance. + IJobErrorHandlingBuilder ThenCloseApplication(); + + /// + /// Specifies that all jobs should be stopped if an error occurs. + /// + /// The current IJobErrorHandlingBuilder instance. + IJobErrorHandlingBuilder ThenStopAllJobs(); + + /// + /// Specifies that the current job should be stopped if an error occurs. + /// + /// The current IJobErrorHandlingBuilder instance. + IJobErrorHandlingBuilder ThenStopJob(); + + /// + /// Specifies a custom error suppression policy. + /// + /// A function that determines whether an error should be suppressed. + /// The current IJobErrorHandlingBuilder instance. + IJobErrorHandlingBuilder DoSuppressError(Func? suppressError = null); +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJobFactory.cs b/src/Sa.Schedule/IJobFactory.cs new file mode 100644 index 0000000..057641f --- /dev/null +++ b/src/Sa.Schedule/IJobFactory.cs @@ -0,0 +1,7 @@ +namespace Sa.Schedule; + +public interface IJobFactory +{ + IJobController CreateJobController(IJobSettings settings); + IJobScheduler CreateJobSchedule(IJobSettings settings); +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJobInterceptor.cs b/src/Sa.Schedule/IJobInterceptor.cs new file mode 100644 index 0000000..3cddffd --- /dev/null +++ b/src/Sa.Schedule/IJobInterceptor.cs @@ -0,0 +1,17 @@ +namespace Sa.Schedule; + +/// +/// Defines an interface for job interceptors, allowing for custom logic to be executed before or after a job is handled. +/// +public interface IJobInterceptor +{ + /// + /// Called when a job is being handled, allowing the interceptor to perform custom logic before or after the job is executed. + /// + /// The context of the job being handled. + /// A function that represents the next step in the job handling pipeline. + /// An optional key associated with the job. + /// A token that can be used to cancel the job handling operation. + /// A task that represents the result of the job handling operation. + Task OnHandle(IJobContext context, Func next, object? key, CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJobProperties.cs b/src/Sa.Schedule/IJobProperties.cs new file mode 100644 index 0000000..47cdbce --- /dev/null +++ b/src/Sa.Schedule/IJobProperties.cs @@ -0,0 +1,47 @@ +namespace Sa.Schedule; + +/// +/// Represents the properties of a job. +/// +public interface IJobProperties +{ + /// + /// Gets the name of the job. + /// + string? JobName { get; } + + /// + /// Gets a value indicating whether the job should be executed immediately. + /// + bool? Immediate { get; } + + /// + /// Gets a value indicating whether the job should run only once. + /// + bool? IsRunOnce { get; } + + /// + /// Gets a value indicating whether the job is disabled. + /// + bool? Disabled { get; } + + /// + /// Gets the delay before the job starts. + /// + TimeSpan? InitialDelay { get; } + + /// + /// Gets the timing configuration for the job. + /// + IJobTiming? Timing { get; } + + /// + /// Gets the size of the context stack for the job. + /// + int? ContextStackSize { get; } + + /// + /// Gets an optional tag associated with the job. + /// + object? Tag { get; } +} diff --git a/src/Sa.Schedule/IJobRunner.cs b/src/Sa.Schedule/IJobRunner.cs new file mode 100644 index 0000000..788ee28 --- /dev/null +++ b/src/Sa.Schedule/IJobRunner.cs @@ -0,0 +1,6 @@ +namespace Sa.Schedule; + +public interface IJobRunner +{ + Task Run(IJobController controller, CancellationToken cancellationToken); +} diff --git a/src/Sa.Schedule/IJobScheduler.cs b/src/Sa.Schedule/IJobScheduler.cs new file mode 100644 index 0000000..c917c2f --- /dev/null +++ b/src/Sa.Schedule/IJobScheduler.cs @@ -0,0 +1,41 @@ +using Microsoft.Extensions.Primitives; + +namespace Sa.Schedule; + + +/// +/// This individual task scheduler is responsible for managing specific tasks. +/// +public interface IJobScheduler +{ + /// + /// Gets a value indicating whether the job scheduler is currently active. + /// + bool IsActive { get; } + + /// + /// Gets the context associated with the job scheduler. + /// + IJobContext Context { get; } + + /// + /// Gets a change token that can be used to track changes to the active state of the scheduler. + /// + IChangeToken GetActiveChangeToken(); + + /// + /// Starts the job scheduler asynchronously. + /// + bool Start(CancellationToken cancellationToken); + + /// + /// Restarts the job scheduler asynchronously. + /// + bool Restart(); + + /// + /// Stops the job scheduler asynchronously. + /// + Task Stop(); +} + diff --git a/src/Sa.Schedule/IJobSettings.cs b/src/Sa.Schedule/IJobSettings.cs new file mode 100644 index 0000000..a98427f --- /dev/null +++ b/src/Sa.Schedule/IJobSettings.cs @@ -0,0 +1,24 @@ +namespace Sa.Schedule; + +/// +/// Defines the settings for a job. +/// +public interface IJobSettings +{ + /// + /// Gets the unique identifier of the job. + /// + Guid JobId { get; } + /// + /// Gets the type of the job. + /// + Type JobType { get; } + /// + /// Gets the properties of the job. + /// + IJobProperties Properties { get; } + /// + /// Gets the error handling settings for the job. + /// + IJobErrorHandling ErrorHandling { get; } +} \ No newline at end of file diff --git a/src/Sa.Schedule/IJobTiming.cs b/src/Sa.Schedule/IJobTiming.cs new file mode 100644 index 0000000..54e3056 --- /dev/null +++ b/src/Sa.Schedule/IJobTiming.cs @@ -0,0 +1,20 @@ +namespace Sa.Schedule; + +/// +/// Represents a job timing interface. +/// +public interface IJobTiming +{ + /// + /// Gets the name of the timing. + /// + string TimingName { get; } + + /// + /// Gets the next occurrence of the job timing. + /// + /// The current date and time. + /// The job context. + /// The next occurrence of the job timing, or null if no next occurrence is found. + DateTimeOffset? GetNextOccurrence(DateTimeOffset dateTime, IJobContext context); +} \ No newline at end of file diff --git a/src/Sa.Schedule/IScheduleBuilder.cs b/src/Sa.Schedule/IScheduleBuilder.cs new file mode 100644 index 0000000..936ddb6 --- /dev/null +++ b/src/Sa.Schedule/IScheduleBuilder.cs @@ -0,0 +1,50 @@ +namespace Sa.Schedule; + +public interface IScheduleBuilder +{ + /// + /// Adds a job of type to the schedule. + /// + /// The type of job to add. + /// The ID of the job. If not specified, a new ID will be generated. + /// A builder for the added job. + IJobBuilder AddJob(Guid? jobId = null) where T : class, IJob; + + /// + /// Adds a job with the specified action to the schedule. + /// + /// The action to execute when the job is run. + /// The ID of the job. If not specified, a new ID will be generated. + /// A builder for the added job. + IJobBuilder AddJob(Func action, Guid? jobId = null); + + /// + /// Adds a job of type to the schedule and configures it using the specified action. + /// + /// The type of job to add. + /// An action to configure the job. + /// The ID of the job. If not specified, a new ID will be generated. + /// The schedule builder. + IScheduleBuilder AddJob(Action configure, Guid? jobId = null) where T : class, IJob; + + /// + /// Adds an interceptor of type to the schedule. + /// + /// The type of interceptor to add. + /// The key to use for the interceptor. If not specified, a default key will be used. + /// The schedule builder. + IScheduleBuilder AddInterceptor(object? key = null) where T : class, IJobInterceptor; + + /// + /// Configures the schedule to use a hosted service. + /// + /// The schedule builder. + IScheduleBuilder UseHostedService(); + + /// + /// Adds an error handler to the schedule. + /// + /// A function to handle errors that occur during job execution. + /// The schedule builder. + IScheduleBuilder AddErrorHandler(Func handler); +} diff --git a/src/Sa.Schedule/IScheduleSettings.cs b/src/Sa.Schedule/IScheduleSettings.cs new file mode 100644 index 0000000..6accf9a --- /dev/null +++ b/src/Sa.Schedule/IScheduleSettings.cs @@ -0,0 +1,22 @@ +namespace Sa.Schedule; + +/// +/// Defines the settings for a schedule. +/// +public interface IScheduleSettings +{ + /// + /// Gets a value indicating whether the schedule is hosted as a service. + /// + bool IsHostedService { get; } + + /// + /// Gets a function that handles errors that occur during job execution. + /// + Func? HandleError { get; } + + /// + /// Gets a collection of job settings. + /// + IEnumerable GetJobSettings(); +} diff --git a/src/Sa.Schedule/IScheduler.cs b/src/Sa.Schedule/IScheduler.cs new file mode 100644 index 0000000..5a24d13 --- /dev/null +++ b/src/Sa.Schedule/IScheduler.cs @@ -0,0 +1,36 @@ +namespace Sa.Schedule; + +/// +/// This scheduler that manages multiple job schedulers. +/// +public interface IScheduler +{ + /// + /// Gets the schedule settings. + /// + IScheduleSettings Settings { get; } + + /// + /// Gets the collection of job schedulers. + /// + IReadOnlyCollection Schedules { get; } + + /// + /// Starts the scheduler. + /// + /// The cancellation token. + /// The number of jobs started. + int Start(CancellationToken cancellationToken); + + /// + /// Restarts the scheduler. + /// + /// The number of jobs restarted. + int Restart(); + + /// + /// Stops the scheduler. + /// + /// A task representing the asynchronous operation. + Task Stop(); +} \ No newline at end of file diff --git a/src/Sa.Schedule/JobException.cs b/src/Sa.Schedule/JobException.cs new file mode 100644 index 0000000..eac6df3 --- /dev/null +++ b/src/Sa.Schedule/JobException.cs @@ -0,0 +1,7 @@ +namespace Sa.Schedule; + +public class JobException(IJobContext context, Exception? innerException) + : Exception($"[{context.JobName}] job error", innerException) +{ + public IJobContext JobContext { get; } = context.Clone(); +} diff --git a/src/Sa.Schedule/JobInterceptorSettings.cs b/src/Sa.Schedule/JobInterceptorSettings.cs new file mode 100644 index 0000000..60a03a9 --- /dev/null +++ b/src/Sa.Schedule/JobInterceptorSettings.cs @@ -0,0 +1,3 @@ +namespace Sa.Schedule; + +public record JobInterceptorSettings(Type HandlerType, object? Key = null); diff --git a/src/Sa.Schedule/JobStatus.cs b/src/Sa.Schedule/JobStatus.cs new file mode 100644 index 0000000..bb67c90 --- /dev/null +++ b/src/Sa.Schedule/JobStatus.cs @@ -0,0 +1,10 @@ +namespace Sa.Schedule; + +public enum JobStatus +{ + WaitingToRun = 2, + Running = 3, + Completed = 5, + Cancelled = 6, + Failed = 7 +} diff --git a/src/Sa.Schedule/Readme.md b/src/Sa.Schedule/Readme.md new file mode 100644 index 0000000..cadb6ad --- /dev/null +++ b/src/Sa.Schedule/Readme.md @@ -0,0 +1,113 @@ +# Sa.Schedule + +Библиотека Sa.Schedule предоставляет способ настройки и выполнения задач по расписанию. Позволяет управлять набором задач, которые будут выполняться в определенное время или по определенной периодичности. + + +## Пример использования + +### Конфигурирование расписания + +```csharp +Services.AddSchedule(b => +{ + b.AddJob((sp, builder) => + { + builder + .EveryTime(TimeSpan.FromMilliseconds(100)) + .RunOnce() + .StartImmediate(); + }); +}); +``` + +### Задание (Job) + +Задание реализует интерфейс IJob и метод Execute, который выполняет основную логику + +```csharp +class SomeJob : IJob +{ + // IJobContext предоставляет доступ к контексту выполнения + public async Task Execute(IJobContext context, CancellationToken cancellationToken) + { + await Task.Delay(10, cancellationToken); // Имитация работы + } +} +``` + + +## Управление расписаниями + +Осуществлеятся посредством интерфейсов IScheduler и IJobScheduler. + +```csharp +/// +/// This scheduler that manages multiple job schedulers. +/// +public interface IScheduler +{ + /// + /// Gets the schedule settings. + /// + IScheduleSettings Settings { get; } + + /// + /// Gets the collection of job schedulers. + /// + IReadOnlyCollection Schedules { get; } + + /// + /// Starts the scheduler. + /// + /// The cancellation token. + /// The number of jobs started. + int Start(CancellationToken cancellationToken); + + /// + /// Restarts the scheduler. + /// + int Restart(); + + /// + /// Stops the scheduler. + /// + Task Stop(); +} + +/// +/// This individual task scheduler is responsible for managing specific tasks. +/// +public interface IJobScheduler +{ + /// + /// Gets a value indicating whether the job scheduler is currently active. + /// + bool IsActive { get; } + + /// + /// Gets the context associated with the job scheduler. + /// + IJobContext Context { get; } + + /// + /// Gets a change token that can be used to track changes to the active state of the scheduler. + /// + IChangeToken GetActiveChangeToken(); + + /// + /// Starts the job scheduler asynchronously. + /// + bool Start(CancellationToken cancellationToken); + + /// + /// Restarts the job scheduler. + /// + bool Restart(); + + /// + /// Stops the job scheduler. + /// + Task Stop(); +} + +``` \ No newline at end of file diff --git a/src/Sa.Schedule/Sa.Schedule.csproj b/src/Sa.Schedule/Sa.Schedule.csproj new file mode 100644 index 0000000..c9a9dd6 --- /dev/null +++ b/src/Sa.Schedule/Sa.Schedule.csproj @@ -0,0 +1,13 @@ + + + + net8.0 + enable + enable + + + + + + + diff --git a/src/Sa.Schedule/Settings/IInterceptorSettings.cs b/src/Sa.Schedule/Settings/IInterceptorSettings.cs new file mode 100644 index 0000000..3561ec5 --- /dev/null +++ b/src/Sa.Schedule/Settings/IInterceptorSettings.cs @@ -0,0 +1,7 @@ + +namespace Sa.Schedule.Settings; + +internal interface IInterceptorSettings +{ + IReadOnlyCollection Interceptors { get; } +} \ No newline at end of file diff --git a/src/Sa.Schedule/Settings/InterceptorSettings.cs b/src/Sa.Schedule/Settings/InterceptorSettings.cs new file mode 100644 index 0000000..c014dc8 --- /dev/null +++ b/src/Sa.Schedule/Settings/InterceptorSettings.cs @@ -0,0 +1,19 @@ +namespace Sa.Schedule.Settings; + +internal class InterceptorSettings : IInterceptorSettings +{ + private readonly List _interceptors = []; + + public IReadOnlyCollection Interceptors => _interceptors; + + public InterceptorSettings(IEnumerable items) + { + foreach (JobInterceptorSettings item in items) + { + if (!_interceptors.Contains(item)) + { + _interceptors.Add(item); + } + } + } +} diff --git a/src/Sa.Schedule/Settings/Job.cs b/src/Sa.Schedule/Settings/Job.cs new file mode 100644 index 0000000..5d0da4b --- /dev/null +++ b/src/Sa.Schedule/Settings/Job.cs @@ -0,0 +1,7 @@ +namespace Sa.Schedule.Settings; + +internal class Job(Func action) : IJob +{ + public Task Execute(IJobContext context, CancellationToken cancellationToken) + => action(context, cancellationToken); +} diff --git a/src/Sa.Schedule/Settings/JobBuilder.cs b/src/Sa.Schedule/Settings/JobBuilder.cs new file mode 100644 index 0000000..10e6f14 --- /dev/null +++ b/src/Sa.Schedule/Settings/JobBuilder.cs @@ -0,0 +1,71 @@ +namespace Sa.Schedule.Settings; + + +internal class JobBuilder(JobSettings settings) : IJobBuilder +{ + public IJobBuilder ConfigureErrorHandling(Action configure) + { + configure.Invoke(settings.ErrorHandling); + return this; + } + + public IJobBuilder EveryTime(TimeSpan timeSpan, string? name = null) + { + settings.Properties.EveryTime(timeSpan, name); + return this; + } + + public IJobBuilder Merge(IJobProperties props) + { + settings.Properties.Merge(props); + return this; + } + + public IJobBuilder RunOnce() + { + settings.Properties.RunOnce(); + return this; + } + + public IJobBuilder StartImmediate() + { + settings.Properties.StartImmediate(); + return this; + } + + public IJobBuilder WithInitialDelay(TimeSpan delay) + { + settings.Properties.WithInitialDelay(delay); + return this; + } + + public IJobBuilder WithName(string name) + { + settings.Properties.WithName(name); + return this; + } + + public IJobBuilder WithTag(object tag) + { + settings.Properties.WithTag(tag); + return this; + } + + public IJobBuilder WithTiming(IJobTiming timing) + { + settings.Properties.WithTiming(timing); + return this; + } + + public IJobBuilder WithContextStackSize(int size) + { + settings.Properties.WithContextStackSize(size); + return this; + } + + public IJobBuilder Disabled() + { + settings.Properties.SetDisabled(); + return this; + } +} diff --git a/src/Sa.Schedule/Settings/JobErrorHandling.cs b/src/Sa.Schedule/Settings/JobErrorHandling.cs new file mode 100644 index 0000000..5da6748 --- /dev/null +++ b/src/Sa.Schedule/Settings/JobErrorHandling.cs @@ -0,0 +1,60 @@ +using Sa.Extensions; + +namespace Sa.Schedule.Settings; + +internal class JobErrorHandling : IJobErrorHandling, IJobErrorHandlingBuilder +{ + public static class Default + { + public const ErrorHandlingAction Action = ErrorHandlingAction.CloseApplication; + public const int RetryCount = 2; + public readonly static Func SuppressError = ex => !ex.IsCritical(); + } + + public ErrorHandlingAction ThenAction { get; private set; } = Default.Action; + + public int RetryCount { get; private set; } + + public Func? SuppressError { get; private set; } + + internal JobErrorHandling Merge(IJobErrorHandling handling) + { + if (handling.ThenAction != Default.Action) { ThenAction = handling.ThenAction; } + if (handling.HasRetryAttempts) { RetryCount = handling.RetryCount; } + if (handling.HasSuppressError) { SuppressError = handling.SuppressError; } + return this; + } + + + public IJobErrorHandlingBuilder IfErrorRetry(int? count = null) + { + RetryCount = count ?? Default.RetryCount; + return this; + } + + public IJobErrorHandlingBuilder ThenCloseApplication() + { + ThenAction = ErrorHandlingAction.CloseApplication; + return this; + } + + public IJobErrorHandlingBuilder ThenStopJob() + { + ThenAction = ErrorHandlingAction.AbortJob; + return this; + } + + public IJobErrorHandlingBuilder ThenStopAllJobs() + { + ThenAction = ErrorHandlingAction.StopAllJobs; + return this; + } + + + public IJobErrorHandlingBuilder DoSuppressError(Func? suppressError = null) + { + SuppressError = suppressError ?? Default.SuppressError; + return this; + } + +} diff --git a/src/Sa.Schedule/Settings/JobProperies.cs b/src/Sa.Schedule/Settings/JobProperies.cs new file mode 100644 index 0000000..ea40607 --- /dev/null +++ b/src/Sa.Schedule/Settings/JobProperies.cs @@ -0,0 +1,40 @@ +using Sa.Schedule.Engine; + +namespace Sa.Schedule.Settings; + +internal class JobProperies : IJobProperties +{ + public string? JobName { get; private set; } + public bool? Immediate { get; private set; } + public bool? IsRunOnce { get; private set; } + public TimeSpan? InitialDelay { get; private set; } + public bool? Disabled { get; private set; } + public IJobTiming? Timing { get; private set; } + public object? Tag { get; private set; } + public int? ContextStackSize { get; private set; } + + public void WithName(string name) => JobName = name; + public void RunOnce() => IsRunOnce = true; + public void StartImmediate() => Immediate = true; + public void WithInitialDelay(TimeSpan time) => InitialDelay = time; + public void WithTiming(IJobTiming timing) => Timing = timing; + public void SetDisabled() => Disabled = true; + public void WithContextStackSize(int size) => ContextStackSize = size; + public void WithTag(object tag) => Tag = tag; + + public void EveryTime(TimeSpan timeSpan, string? name = null) + => Timing = JobTiming.EveryTime(timeSpan, name); + + internal JobProperies Merge(IJobProperties props) + { + JobName ??= props.JobName; + Immediate ??= props.Immediate; + Disabled ??= props.Disabled; + Timing ??= props.Timing; + IsRunOnce ??= props.IsRunOnce; + InitialDelay ??= props.InitialDelay; + ContextStackSize ??= props.ContextStackSize; + Tag ??= props.Tag; + return this; + } +} diff --git a/src/Sa.Schedule/Settings/JobSettings.cs b/src/Sa.Schedule/Settings/JobSettings.cs new file mode 100644 index 0000000..3bd4cdf --- /dev/null +++ b/src/Sa.Schedule/Settings/JobSettings.cs @@ -0,0 +1,33 @@ + +namespace Sa.Schedule.Settings; + +internal class JobSettings(Type jobType, Guid jobId) : IJobSettings +{ + /// + /// handler id + /// + public Guid JobId { get; } = jobId; + public Type JobType => jobType; + + public JobProperies Properties { get; } = new(); + public JobErrorHandling ErrorHandling { get; } = new(); + + IJobProperties IJobSettings.Properties => Properties; + IJobErrorHandling IJobSettings.ErrorHandling => ErrorHandling; + + internal JobSettings Merge(IJobSettings info) + { + Properties.Merge(info.Properties); + ErrorHandling.Merge(info.ErrorHandling); + return this; + } + + + public static JobSettings Create(Guid jobId) + where T : class, IJob => new(typeof(T), jobId); + + public static JobSettings Create(IJobSettings options) + => new JobSettings(options.JobType, options.JobId).Merge(options); + + public IJobSettings Clone() => new JobSettings(JobType, JobId).Merge(this); +} diff --git a/src/Sa.Schedule/Settings/NullJobServices.cs b/src/Sa.Schedule/Settings/NullJobServices.cs new file mode 100644 index 0000000..9a77618 --- /dev/null +++ b/src/Sa.Schedule/Settings/NullJobServices.cs @@ -0,0 +1,8 @@ +namespace Sa.Schedule.Settings; + +internal class NullJobServices : IServiceProvider +{ + public object? GetService(Type serviceType) => null; + + public static IServiceProvider Instance { get; } = new NullJobServices(); +} diff --git a/src/Sa.Schedule/Settings/ScheduleBuilder.cs b/src/Sa.Schedule/Settings/ScheduleBuilder.cs new file mode 100644 index 0000000..d51b877 --- /dev/null +++ b/src/Sa.Schedule/Settings/ScheduleBuilder.cs @@ -0,0 +1,96 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Schedule.Engine; + +namespace Sa.Schedule.Settings; + +internal class ScheduleBuilder : IScheduleBuilder +{ + private readonly IServiceCollection _services; + + private bool _isHostedService; + + private Func? _handleError; + + public ScheduleBuilder(IServiceCollection services) + { + _services = services; + + _services.TryAddSingleton(sp => + { + IEnumerable jobSettings = sp.GetServices(); + ScheduleSettings settings = ScheduleSettings.Create(jobSettings, _isHostedService, _handleError); + return settings; + }); + + _services.TryAddSingleton(sp => + { + IEnumerable jobSettings = sp.GetServices(); + return new InterceptorSettings(jobSettings); + }); + } + + public IJobBuilder AddJob(Guid? jobId = null) + where T : class, IJob + { + Guid id = GetId(jobId); + _services.TryAddKeyedScoped(id); + + JobSettings jobSettings = JobSettings.Create(id); + _services.AddSingleton(jobSettings); + + return new JobBuilder(jobSettings); + } + + public IScheduleBuilder AddJob(Action configure, Guid? jobId = null) + where T : class, IJob + { + Guid id = GetId(jobId); + _services.TryAddKeyedScoped(id); + + _services.AddSingleton(sp => + { + JobSettings jobSettings = JobSettings.Create(id); + configure.Invoke(sp, new JobBuilder(jobSettings)); + return jobSettings; + }); + + return this; + } + + public IJobBuilder AddJob(Func action, Guid? jobId = null) + { + Guid id = GetId(jobId); + _services + .RemoveAllKeyed(jobId) + .AddKeyedScoped(id, (_, __) => new Job(action)); + + JobSettings jobSettings = JobSettings.Create(id); + _services.AddSingleton(jobSettings); + + return new JobBuilder(jobSettings); + } + + public IScheduleBuilder AddErrorHandler(Func handler) + { + _handleError = handler; + return this; + } + + private static Guid GetId(Guid? jobId) => jobId.GetValueOrDefault(Guid.NewGuid()); + + public IScheduleBuilder UseHostedService() + { + _isHostedService = true; + _services.AddHostedService(); + return this; + } + + public IScheduleBuilder AddInterceptor(object? key = null) + where T : class, IJobInterceptor + { + _services.AddSingleton(new JobInterceptorSettings(typeof(T), key)); + _services.TryAddKeyedScoped(typeof(T), key); + return this; + } +} diff --git a/src/Sa.Schedule/Settings/ScheduleSettings.cs b/src/Sa.Schedule/Settings/ScheduleSettings.cs new file mode 100644 index 0000000..fbfc7df --- /dev/null +++ b/src/Sa.Schedule/Settings/ScheduleSettings.cs @@ -0,0 +1,29 @@ +namespace Sa.Schedule.Settings; + +internal class ScheduleSettings : IScheduleSettings +{ + private Dictionary _storage = []; + + public bool IsHostedService { get; private set; } + + public Func? HandleError { get; private set; } + + public virtual IEnumerable GetJobSettings() => _storage.Values.Where(c => c.Properties.Disabled != true); + + public void UseHostedService() => IsHostedService = true; + + internal static ScheduleSettings Create(IEnumerable jobSettings, bool isHostedService, Func? handleError) + { + IEnumerable items = jobSettings.GroupBy( + c => (c.JobId, c.JobType) + , (k, items) => items.Aggregate(seed: new JobSettings(k.JobType, k.JobId), (s1, s2) => s1.Merge(s2)) + ); + + return new ScheduleSettings + { + HandleError = handleError, + IsHostedService = isHostedService, + _storage = items.ToDictionary(c => c.JobId) + }; + } +} diff --git a/src/Sa.Schedule/Setup.cs b/src/Sa.Schedule/Setup.cs new file mode 100644 index 0000000..5d49641 --- /dev/null +++ b/src/Sa.Schedule/Setup.cs @@ -0,0 +1,31 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Sa.Schedule.Engine; +using Sa.Schedule.Settings; +using Sa.Timing.Providers; + +namespace Sa.Schedule; + +/// +/// Provides extension methods for setting up the scheduling system. +/// +public static class Setup +{ + /// + /// Adds the scheduling system to the service collection. + /// + /// The service collection to add the scheduling system to. + /// An action to configure the scheduling system. + /// The service collection with the scheduling system added. + public static IServiceCollection AddSchedule(this IServiceCollection services, Action configure) + { + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + + configure.Invoke(new ScheduleBuilder(services)); + return services; + } +} \ No newline at end of file diff --git a/src/Sa.StateMachine/ISettingsBuilder.cs b/src/Sa.StateMachine/ISettingsBuilder.cs new file mode 100644 index 0000000..f8ebe4f --- /dev/null +++ b/src/Sa.StateMachine/ISettingsBuilder.cs @@ -0,0 +1,8 @@ +namespace Sa.StateMachine; + +public interface ISettingsBuilder where TState : SmState +{ + ISettingsBuilder Add(ITransition transition); + ISettingsBuilder Add(TState start, TState[] ends, object? tag = null); + ISettingsBuilder Add(TState start, Func ends, object? tag = null) => Add(start, ends(), tag); +} diff --git a/src/Sa.StateMachine/ISmContext.cs b/src/Sa.StateMachine/ISmContext.cs new file mode 100644 index 0000000..340a5ba --- /dev/null +++ b/src/Sa.StateMachine/ISmContext.cs @@ -0,0 +1,9 @@ +namespace Sa.StateMachine; + +public interface ISmContext where TState : IComparable +{ + TState CurrentState { get; } + Exception? Error { get; } + IReadOnlyCollection NextStates { get; } + CancellationToken CancellationToken { get; } +} diff --git a/src/Sa.StateMachine/ISmProcessor.cs b/src/Sa.StateMachine/ISmProcessor.cs new file mode 100644 index 0000000..ff42591 --- /dev/null +++ b/src/Sa.StateMachine/ISmProcessor.cs @@ -0,0 +1,7 @@ +namespace Sa.StateMachine; + +public interface ISmProcessor where TState : IComparable +{ + ValueTask MoveNext(ISmContext context); + ValueTask Finished(ISmContext context); +} diff --git a/src/Sa.StateMachine/ISmSettings.cs b/src/Sa.StateMachine/ISmSettings.cs new file mode 100644 index 0000000..cbfe450 --- /dev/null +++ b/src/Sa.StateMachine/ISmSettings.cs @@ -0,0 +1,9 @@ +namespace Sa.StateMachine; + +public interface ISmSettings where TState : IComparable +{ + TState StartState { get; } + TState ErrorState { get; } + ITransitionGraph Graph { get; } + IReadOnlyDictionary> Transitions { get; } +} diff --git a/src/Sa.StateMachine/ITransition.cs b/src/Sa.StateMachine/ITransition.cs new file mode 100644 index 0000000..224ad7d --- /dev/null +++ b/src/Sa.StateMachine/ITransition.cs @@ -0,0 +1,8 @@ +namespace Sa.StateMachine; + +public interface ITransition where TState : IComparable +{ + TState Start { get; } + TState[] End { get; } + object? Tag { get; } +} diff --git a/src/Sa.StateMachine/ITransitionGraph.cs b/src/Sa.StateMachine/ITransitionGraph.cs new file mode 100644 index 0000000..705c489 --- /dev/null +++ b/src/Sa.StateMachine/ITransitionGraph.cs @@ -0,0 +1,17 @@ +namespace Sa.StateMachine; + +public interface ITransitionGraph where TNode : IComparable +{ + IReadOnlyCollection this[TNode start] { get; } + + IReadOnlyCollection Starts { get; } + IReadOnlyCollection Ends { get; } + + IReadOnlyCollection Roots { get; } + IReadOnlyCollection Leaves { get; } + + IReadOnlyCollection Nodes { get; } + + bool IsRootNode(TNode node); + bool IsLeafNode(TNode node); +} diff --git a/src/Sa.StateMachine/Internal/SettingsBulder.cs b/src/Sa.StateMachine/Internal/SettingsBulder.cs new file mode 100644 index 0000000..f9e6e03 --- /dev/null +++ b/src/Sa.StateMachine/Internal/SettingsBulder.cs @@ -0,0 +1,55 @@ +namespace Sa.StateMachine.Internal; + + +class SettingsBulder() : ISettingsBuilder where TState : SmState +{ + #region ITransition + protected class Transition(TState start, TState[] end, object? tag = null) : ITransition + { + public TState Start => start; + public TState[] End => end; + public object? Tag => tag; + } + #endregion + + private readonly Dictionary> _transitions = []; + + public ISettingsBuilder Add(ITransition transition) + { + _transitions[transition.Start] = transition; + return this; + } + + public ISettingsBuilder Add(TState start, TState[] ends, object? tag = null) + => Add(new Transition(start, ends, tag)); + + private TransitionGraph BuildGraph() + => new(_transitions.Values.SelectMany(c => c.End.Select(end => (c.Start, end)))); + + public SmSettings Build() + { + TransitionGraph graph = BuildGraph(); + + bool reload = false; + // bind transition from start + foreach (var cRoot in graph.Roots.Where(c => c.Kind != StateKind.Start)) + { + reload = true; + Add(SmState.Start, [cRoot]); + } + + TState endState = graph.Leaves.FirstOrDefault(c => c.Kind == StateKind.Finish) ?? SmState.Finish; + + // bind transition to end + IEnumerable noEnds = graph.Leaves.Where(c => c.Kind != StateKind.Finish); + foreach (TState enode in noEnds) + { + Add(enode, [endState]); + reload = true; + } + + if (reload) graph = BuildGraph(); + + return new(SmState.Start, SmState.Error, _transitions, graph); + } +} diff --git a/src/Sa.StateMachine/Internal/SmSettings.cs b/src/Sa.StateMachine/Internal/SmSettings.cs new file mode 100644 index 0000000..99f57f2 --- /dev/null +++ b/src/Sa.StateMachine/Internal/SmSettings.cs @@ -0,0 +1,8 @@ +namespace Sa.StateMachine.Internal; + +record SmSettings( + TState StartState, + TState ErrorState, + IReadOnlyDictionary> Transitions, + ITransitionGraph Graph +) : ISmSettings where TState : IComparable; diff --git a/src/Sa.StateMachine/Internal/TransitionGraph.cs b/src/Sa.StateMachine/Internal/TransitionGraph.cs new file mode 100644 index 0000000..022883d --- /dev/null +++ b/src/Sa.StateMachine/Internal/TransitionGraph.cs @@ -0,0 +1,78 @@ +using Sa.Classes; + +namespace Sa.StateMachine.Internal; + +public class TransitionGraph : ITransitionGraph + where TNode : IComparable +{ + private readonly Dictionary<(TNode start, TNode end), object?> _transitions; + + private readonly ResetLazy _starts; + private readonly ResetLazy _ends; + + private readonly ResetLazy _nodes; + private readonly ResetLazy _leaves; + + private readonly ResetLazy _roots; + + private readonly ResetLazy> _nexts; + + + public TransitionGraph(IEnumerable<(TNode start, TNode end)> transitions) + { + _transitions = new(transitions.Select(t => KeyValuePair.Create((t.start, t.end), default(object)))); + + _starts = new(() => _transitions.Keys.Select(t => t.start).Distinct().ToArray()); + _ends = new(() => _transitions.Keys.Select(t => t.end).Distinct().ToArray()); + _nodes = new(() => _starts.Value.Concat(_ends.Value).Distinct().ToArray()); + _leaves = new(() => _ends.Value.Except(_starts.Value).Distinct().ToArray()); + + _roots = new(() => _starts.Value + .Except(_transitions.Keys + .Where(c => !IsSelfLoop(c)) + .Select(c => c.end)) + .ToArray()); + + _nexts = new(() => _transitions.GroupBy( + t => t.Key.start, + (start, items) => KeyValuePair.Create(start, items.Select(c => c.Key.end).ToArray())) + .ToDictionary()); + } + + public IReadOnlyCollection this[TNode start] => _nexts.Value.GetValueOrDefault(start) ?? []; + + public IReadOnlyCollection Roots => _roots.Value; + public IReadOnlyCollection Ends => _ends.Value; + public IReadOnlyCollection Leaves => _leaves.Value; + public IReadOnlyCollection Nodes => _nodes.Value; + public IReadOnlyCollection Starts => _starts.Value; + + + public TransitionGraph Add(TNode start, TNode[] ends, object? state = null) + { + Reset(); + foreach ((TNode start, TNode end) transit in ends.Select(end => (start, end))) + { + _transitions[transit] = state; + } + return this; + } + + protected void Reset() + { + _starts.Reset(); + _ends.Reset(); + _roots.Reset(); + _nodes.Reset(); + _leaves.Reset(); + _nexts.Reset(); + } + + public static bool IsSelfLoop((TNode start, TNode end) node) => IsEquals(node.start, node.end); + + public static bool IsEquals(TNode start, TNode end) => start.CompareTo(end) == 0; + + + public bool IsRootNode(TNode node) => Roots.Any(c => c.CompareTo(node) == 0); + public bool IsLeafNode(TNode node) => Leaves.Any(c => c.CompareTo(node) == 0); +} diff --git a/src/Sa.StateMachine/Sa.StateMachine.csproj b/src/Sa.StateMachine/Sa.StateMachine.csproj new file mode 100644 index 0000000..c9a9dd6 --- /dev/null +++ b/src/Sa.StateMachine/Sa.StateMachine.csproj @@ -0,0 +1,13 @@ + + + + net8.0 + enable + enable + + + + + + + diff --git a/src/Sa.StateMachine/SmEnumerable.cs b/src/Sa.StateMachine/SmEnumerable.cs new file mode 100644 index 0000000..b9107b7 --- /dev/null +++ b/src/Sa.StateMachine/SmEnumerable.cs @@ -0,0 +1,77 @@ +namespace Sa.StateMachine; + + +public abstract class SmEnumerable(ISmSettings settings) : IAsyncEnumerable> + where TState : SmState +{ + + sealed class SmEnumerator(ISmSettings settings, ISmProcessor processor, CancellationToken cancellationToken) + : IAsyncEnumerator>, ISmContext + { + public TState CurrentState { get; private set; } = settings.StartState; + public Exception? Error { get; private set; } + public IReadOnlyCollection NextStates { get; private set; } = []; + public CancellationToken CancellationToken => cancellationToken; + + ISmContext IAsyncEnumerator>.Current => this; + + public override string ToString() => $"{CurrentState}"; + + async ValueTask IAsyncEnumerator>.MoveNextAsync() + { + if (cancellationToken.IsCancellationRequested) return false; + + try + { + NextStates = settings.Graph[CurrentState]; + + TState nextState = await processor.MoveNext(this); + + if (!NextStates.Contains(nextState)) + throw new ArgumentException($"Expected {NextStates} but found {nextState}"); + + if (nextState.Kind == StateKind.Finish || settings.Graph.IsLeafNode(nextState)) + { + await processor.Finished(this); + return false; + } + + CurrentState = nextState; + return true; + } + catch (Exception error) + { + Error = error; + CurrentState = settings.ErrorState; + } + + return true; + } + + ValueTask IAsyncDisposable.DisposeAsync() => ValueTask.CompletedTask; + } + + protected abstract ISmProcessor CreateProcessor(); + + public virtual IAsyncEnumerator> GetAsyncEnumerator(CancellationToken cancellationToken = default) + => new SmEnumerator(settings, CreateProcessor(), cancellationToken); + + + public async virtual ValueTask<(TState state, Exception? error)> Run(CancellationToken cancellationToken = default) + { + var inumerator = GetAsyncEnumerator(cancellationToken); + try + { + while (await inumerator.MoveNextAsync()) + { + // not used + } + + return (inumerator.Current.CurrentState, inumerator.Current.Error); + } + finally + { + await inumerator.DisposeAsync(); + } + } +} diff --git a/src/Sa.StateMachine/SmLongProcess.cs b/src/Sa.StateMachine/SmLongProcess.cs new file mode 100644 index 0000000..fe3b0b6 --- /dev/null +++ b/src/Sa.StateMachine/SmLongProcess.cs @@ -0,0 +1,52 @@ +namespace Sa.StateMachine; + + +public abstract class SmLongProcess : SmEnumerable +{ + public record State : SmState + { + public static readonly State WaitingToRun = Create(SmStateId.WaitingToRun, nameof(WaitingToRun)); + public static readonly State Running = Create(SmStateId.Running, nameof(Running)); + public static readonly State Succeed = Create(SmStateId.Succeed, nameof(Succeed)); + + + public static readonly ISmSettings Settings = BuildSettings(builder => builder + .Add(Start, [WaitingToRun]) + .Add(WaitingToRun, [WaitingToRun, Running]) + .Add(Running, [Succeed, Error]) + .Add(Succeed, [Finish]) + .Add(Error, [Finish]) + ); + + public State(int id, string name, StateKind state) + : base(id, name, state) + { + } + } + + protected SmLongProcess() : base(State.Settings) + { + } + + protected override ISmProcessor CreateProcessor() => new Processor(); + + public class Processor : ISmProcessor + { + public ValueTask Finished(ISmContext context) + { + return ValueTask.CompletedTask; + } + + public virtual ValueTask MoveNext(ISmContext context) + { + return context.CurrentState.Id switch + { + SmStateId.Start => ValueTask.FromResult(State.WaitingToRun), + SmStateId.WaitingToRun => ValueTask.FromResult(State.Running), + SmStateId.Running => ValueTask.FromResult(State.Succeed), + SmStateId.Succeed => ValueTask.FromResult(State.Finish), + _ => throw new NotImplementedException(), + }; + } + } +} diff --git a/src/Sa.StateMachine/SmState.cs b/src/Sa.StateMachine/SmState.cs new file mode 100644 index 0000000..5d8f8f6 --- /dev/null +++ b/src/Sa.StateMachine/SmState.cs @@ -0,0 +1,45 @@ +using Sa.Classes; +using Sa.StateMachine.Internal; + +namespace Sa.StateMachine; + + +public abstract record SmState : Enumeration + where TState : SmState +{ + + public static readonly TState Start = CreateStart(); + public static readonly TState Error = CreateError(); + public static readonly TState Finish = CreateFinish(); + + protected SmState(int id, string name, StateKind kind = StateKind.Default) + : base(id, name) + { + Kind = kind; + } + + public StateKind Kind { get; } = StateKind.Default; + + public static ISmSettings BuildSettings(Action> configure) + { + SettingsBulder sb = new(); + configure.Invoke(sb); + return sb.Build(); + } + + public static TState Create(int id, string name) + { + id = id < 100 ? throw new ArgumentException("id must be greater than `99`") : id; + name = name ?? throw new ArgumentNullException(nameof(name)); + return (TState)Activator.CreateInstance(typeof(TState), id, name, StateKind.Default)!; + } + + private static TState CreateStart() + => (TState)Activator.CreateInstance(typeof(TState), SmStateId.Start, nameof(StateKind.Start), StateKind.Start)!; + + private static TState CreateFinish() + => (TState)Activator.CreateInstance(typeof(TState), SmStateId.Finish, nameof(StateKind.Finish), StateKind.Finish)!; + + private static TState CreateError() + => (TState)Activator.CreateInstance(typeof(TState), SmStateId.Error, nameof(StateKind.Error), StateKind.Error)!; +} diff --git a/src/Sa.StateMachine/SmStateId.cs b/src/Sa.StateMachine/SmStateId.cs new file mode 100644 index 0000000..e327139 --- /dev/null +++ b/src/Sa.StateMachine/SmStateId.cs @@ -0,0 +1,13 @@ +namespace Sa.StateMachine; + +public static class SmStateId +{ + // sys ids + public const int Start = 1; + public const int Finish = 0; + public const int Error = -1; + // user ids + public const int WaitingToRun = 101; + public const int Running = 102; + public const int Succeed = 200; +} diff --git a/src/Sa.StateMachine/StateKind.cs b/src/Sa.StateMachine/StateKind.cs new file mode 100644 index 0000000..e80545d --- /dev/null +++ b/src/Sa.StateMachine/StateKind.cs @@ -0,0 +1,9 @@ +namespace Sa.StateMachine; + +public enum StateKind +{ + Default, + Start, + Error, + Finish +} diff --git a/src/Sa.sln b/src/Sa.sln new file mode 100644 index 0000000..2a49a71 --- /dev/null +++ b/src/Sa.sln @@ -0,0 +1,201 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.11.35312.102 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa", "Sa\Sa.csproj", "{BB3A8ECF-5D33-4C41-AF4E-BD384528AAC0}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Schedule", "Sa.Schedule\Sa.Schedule.csproj", "{D8098DBA-E1D4-4005-A062-541FAC08C5E6}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Samples", "Samples", "{610D8708-2CB6-403A-B865-3C1FE0115519}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Schedule.Console", "Samples\Schedule.Console\Schedule.Console.csproj", "{175E26B3-95F8-44C5-AE18-F7F310A3D04B}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.StateMachine", "Sa.StateMachine\Sa.StateMachine.csproj", "{6FC2E5C6-41E6-4C64-9BF5-912E40BB8E02}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StateMachine.Console", "Samples\StateMachine.Console\StateMachine.Console.csproj", "{F8C07ACF-8036-4045-B429-850A2AF8674D}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{BDD5208F-0CF4-4C6A-B443-6527514C89E8}" + ProjectSection(SolutionItems) = preProject + Tests\Host.Test.Properties.xml = Tests\Host.Test.Properties.xml + EndProjectSection +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Outbox", "Sa.Outbox\Sa.Outbox.csproj", "{958CAA75-B8D6-4356-B4AA-B75658C17374}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Outbox.PostgreSql", "Sa.Outbox.PostgreSql\Sa.Outbox.PostgreSql.csproj", "{2EA016FD-E9EA-4231-89F1-63737868EAB2}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Outbox", "Outbox", "{6197ECD9-AA02-4EF1-AAD0-D84D428A7352}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Schedule", "Schedule", "{272C3FEB-29B5-4619-8CAC-9A750E55604A}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StateMachine", "StateMachine", "{2719F62C-D1D5-469A-BA1D-EB2999244158}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{F9621075-6836-47F5-9160-F9C4743F6147}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Data.PostgreSql", "Sa.Data.PostgreSql\Sa.Data.PostgreSql.csproj", "{F899211C-2E6E-4DF8-839A-11D5CB9B04D3}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Outbox.Support", "Sa.Outbox.Attributes\Sa.Outbox.Support.csproj", "{CC86704D-43C7-44E7-BCF5-E59140CE41DE}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Data.Cache", "Sa.Data.Cache\Sa.Data.Cache.csproj", "{ABE20E99-6F54-4418-8577-E532DCBB462C}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Cache", "Cache", "{0BF06FFE-437E-4B1F-98C9-9E8D39AE902E}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PostgreSql", "PostgreSql", "{96FCE737-A321-445B-8EC1-05D8863461EA}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Media", "Media", "{7B85862F-5BDB-43DF-AD55-F850DE04120E}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Media", "Sa.Media\Sa.Media.csproj", "{FFA9194D-D6B2-4350-BE28-B2288A26637A}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.MediaTests", "Tests\Sa.MediaTests\Sa.MediaTests.csproj", "{F776F33F-E44B-4FF5-BFB6-43C98667FD66}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Data.PostgreSql.Migration", "Sa.Data.PostgreSql.Migration\Sa.Data.PostgreSql.Migration.csproj", "{075F4DEC-4E00-4982-A634-BDEBD7F63A70}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Fixtures", "Fixtures", "{6FAA2520-3F94-4BD7-A67E-87A9961E094E}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{161802B9-8D73-45AF-B52B-62A416CD9FBC}" + ProjectSection(SolutionItems) = preProject + Common.Properties.xml = Common.Properties.xml + Tests\Host.Test.Properties.xml = Tests\Host.Test.Properties.xml + EndProjectSection +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Data.PostgreSqlTests", "Tests\Sa.Data.PostgreSqlTests\Sa.Data.PostgreSqlTests.csproj", "{0A171BD6-3200-4FE8-9C19-B44E3B84529F}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Fixture", "Tests\Fixtures\Sa.Fixture\Sa.Fixture.csproj", "{8D998D31-07BA-4185-B273-03EC3C68DA35}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Data.PostgreSql.Fixture", "Tests\Fixtures\Sa.Data.PostgreSql.Fixture\Sa.Data.PostgreSql.Fixture.csproj", "{944004BF-81CA-4953-B482-AAE275CE601D}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Outbox.PostgreSqlTests", "Tests\Sa.Outbox.PostgreSqlTests\Sa.Outbox.PostgreSqlTests.csproj", "{52F64196-9FA5-4B50-AE71-CF6C4DF4B23E}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SaTests", "Tests\SaTests\SaTests.csproj", "{4C199038-F002-45B6-9158-2FD27A7392DC}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Partitional", "Partitional", "{71B86B2B-03B6-4637-BC64-5E09BAF88192}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Partitional.PostgreSql", "Sa.Partitional.PostgreSql\Sa.Partitional.PostgreSql.csproj", "{23D36989-1D57-40AE-970D-085B18283D44}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.Partitional.PostgreSqlTests", "Tests\Sa.Partitional.PostgreSqlTests\Sa.Partitional.PostgreSqlTests.csproj", "{F35C5D81-7632-46A2-82C0-CAC9AC65AF30}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sa.ScheduleTests", "Tests\Sa.ScheduleTests\Sa.ScheduleTests.csproj", "{4FEF2F15-D4DC-416A-8640-66152F42CDFE}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {BB3A8ECF-5D33-4C41-AF4E-BD384528AAC0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BB3A8ECF-5D33-4C41-AF4E-BD384528AAC0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BB3A8ECF-5D33-4C41-AF4E-BD384528AAC0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BB3A8ECF-5D33-4C41-AF4E-BD384528AAC0}.Release|Any CPU.Build.0 = Release|Any CPU + {D8098DBA-E1D4-4005-A062-541FAC08C5E6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D8098DBA-E1D4-4005-A062-541FAC08C5E6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D8098DBA-E1D4-4005-A062-541FAC08C5E6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D8098DBA-E1D4-4005-A062-541FAC08C5E6}.Release|Any CPU.Build.0 = Release|Any CPU + {175E26B3-95F8-44C5-AE18-F7F310A3D04B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {175E26B3-95F8-44C5-AE18-F7F310A3D04B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {175E26B3-95F8-44C5-AE18-F7F310A3D04B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {175E26B3-95F8-44C5-AE18-F7F310A3D04B}.Release|Any CPU.Build.0 = Release|Any CPU + {6FC2E5C6-41E6-4C64-9BF5-912E40BB8E02}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6FC2E5C6-41E6-4C64-9BF5-912E40BB8E02}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6FC2E5C6-41E6-4C64-9BF5-912E40BB8E02}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6FC2E5C6-41E6-4C64-9BF5-912E40BB8E02}.Release|Any CPU.Build.0 = Release|Any CPU + {F8C07ACF-8036-4045-B429-850A2AF8674D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F8C07ACF-8036-4045-B429-850A2AF8674D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F8C07ACF-8036-4045-B429-850A2AF8674D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F8C07ACF-8036-4045-B429-850A2AF8674D}.Release|Any CPU.Build.0 = Release|Any CPU + {958CAA75-B8D6-4356-B4AA-B75658C17374}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {958CAA75-B8D6-4356-B4AA-B75658C17374}.Debug|Any CPU.Build.0 = Debug|Any CPU + {958CAA75-B8D6-4356-B4AA-B75658C17374}.Release|Any CPU.ActiveCfg = Release|Any CPU + {958CAA75-B8D6-4356-B4AA-B75658C17374}.Release|Any CPU.Build.0 = Release|Any CPU + {2EA016FD-E9EA-4231-89F1-63737868EAB2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2EA016FD-E9EA-4231-89F1-63737868EAB2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2EA016FD-E9EA-4231-89F1-63737868EAB2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2EA016FD-E9EA-4231-89F1-63737868EAB2}.Release|Any CPU.Build.0 = Release|Any CPU + {F899211C-2E6E-4DF8-839A-11D5CB9B04D3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F899211C-2E6E-4DF8-839A-11D5CB9B04D3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F899211C-2E6E-4DF8-839A-11D5CB9B04D3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F899211C-2E6E-4DF8-839A-11D5CB9B04D3}.Release|Any CPU.Build.0 = Release|Any CPU + {CC86704D-43C7-44E7-BCF5-E59140CE41DE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CC86704D-43C7-44E7-BCF5-E59140CE41DE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CC86704D-43C7-44E7-BCF5-E59140CE41DE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CC86704D-43C7-44E7-BCF5-E59140CE41DE}.Release|Any CPU.Build.0 = Release|Any CPU + {ABE20E99-6F54-4418-8577-E532DCBB462C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ABE20E99-6F54-4418-8577-E532DCBB462C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ABE20E99-6F54-4418-8577-E532DCBB462C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ABE20E99-6F54-4418-8577-E532DCBB462C}.Release|Any CPU.Build.0 = Release|Any CPU + {FFA9194D-D6B2-4350-BE28-B2288A26637A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FFA9194D-D6B2-4350-BE28-B2288A26637A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FFA9194D-D6B2-4350-BE28-B2288A26637A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FFA9194D-D6B2-4350-BE28-B2288A26637A}.Release|Any CPU.Build.0 = Release|Any CPU + {F776F33F-E44B-4FF5-BFB6-43C98667FD66}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F776F33F-E44B-4FF5-BFB6-43C98667FD66}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F776F33F-E44B-4FF5-BFB6-43C98667FD66}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F776F33F-E44B-4FF5-BFB6-43C98667FD66}.Release|Any CPU.Build.0 = Release|Any CPU + {075F4DEC-4E00-4982-A634-BDEBD7F63A70}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {075F4DEC-4E00-4982-A634-BDEBD7F63A70}.Debug|Any CPU.Build.0 = Debug|Any CPU + {075F4DEC-4E00-4982-A634-BDEBD7F63A70}.Release|Any CPU.ActiveCfg = Release|Any CPU + {075F4DEC-4E00-4982-A634-BDEBD7F63A70}.Release|Any CPU.Build.0 = Release|Any CPU + {0A171BD6-3200-4FE8-9C19-B44E3B84529F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0A171BD6-3200-4FE8-9C19-B44E3B84529F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0A171BD6-3200-4FE8-9C19-B44E3B84529F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0A171BD6-3200-4FE8-9C19-B44E3B84529F}.Release|Any CPU.Build.0 = Release|Any CPU + {8D998D31-07BA-4185-B273-03EC3C68DA35}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8D998D31-07BA-4185-B273-03EC3C68DA35}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8D998D31-07BA-4185-B273-03EC3C68DA35}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8D998D31-07BA-4185-B273-03EC3C68DA35}.Release|Any CPU.Build.0 = Release|Any CPU + {944004BF-81CA-4953-B482-AAE275CE601D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {944004BF-81CA-4953-B482-AAE275CE601D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {944004BF-81CA-4953-B482-AAE275CE601D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {944004BF-81CA-4953-B482-AAE275CE601D}.Release|Any CPU.Build.0 = Release|Any CPU + {52F64196-9FA5-4B50-AE71-CF6C4DF4B23E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {52F64196-9FA5-4B50-AE71-CF6C4DF4B23E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {52F64196-9FA5-4B50-AE71-CF6C4DF4B23E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {52F64196-9FA5-4B50-AE71-CF6C4DF4B23E}.Release|Any CPU.Build.0 = Release|Any CPU + {4C199038-F002-45B6-9158-2FD27A7392DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4C199038-F002-45B6-9158-2FD27A7392DC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4C199038-F002-45B6-9158-2FD27A7392DC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4C199038-F002-45B6-9158-2FD27A7392DC}.Release|Any CPU.Build.0 = Release|Any CPU + {23D36989-1D57-40AE-970D-085B18283D44}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {23D36989-1D57-40AE-970D-085B18283D44}.Debug|Any CPU.Build.0 = Debug|Any CPU + {23D36989-1D57-40AE-970D-085B18283D44}.Release|Any CPU.ActiveCfg = Release|Any CPU + {23D36989-1D57-40AE-970D-085B18283D44}.Release|Any CPU.Build.0 = Release|Any CPU + {F35C5D81-7632-46A2-82C0-CAC9AC65AF30}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F35C5D81-7632-46A2-82C0-CAC9AC65AF30}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F35C5D81-7632-46A2-82C0-CAC9AC65AF30}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F35C5D81-7632-46A2-82C0-CAC9AC65AF30}.Release|Any CPU.Build.0 = Release|Any CPU + {4FEF2F15-D4DC-416A-8640-66152F42CDFE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4FEF2F15-D4DC-416A-8640-66152F42CDFE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4FEF2F15-D4DC-416A-8640-66152F42CDFE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4FEF2F15-D4DC-416A-8640-66152F42CDFE}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {D8098DBA-E1D4-4005-A062-541FAC08C5E6} = {272C3FEB-29B5-4619-8CAC-9A750E55604A} + {175E26B3-95F8-44C5-AE18-F7F310A3D04B} = {610D8708-2CB6-403A-B865-3C1FE0115519} + {6FC2E5C6-41E6-4C64-9BF5-912E40BB8E02} = {2719F62C-D1D5-469A-BA1D-EB2999244158} + {F8C07ACF-8036-4045-B429-850A2AF8674D} = {610D8708-2CB6-403A-B865-3C1FE0115519} + {958CAA75-B8D6-4356-B4AA-B75658C17374} = {6197ECD9-AA02-4EF1-AAD0-D84D428A7352} + {2EA016FD-E9EA-4231-89F1-63737868EAB2} = {6197ECD9-AA02-4EF1-AAD0-D84D428A7352} + {F899211C-2E6E-4DF8-839A-11D5CB9B04D3} = {96FCE737-A321-445B-8EC1-05D8863461EA} + {CC86704D-43C7-44E7-BCF5-E59140CE41DE} = {6197ECD9-AA02-4EF1-AAD0-D84D428A7352} + {ABE20E99-6F54-4418-8577-E532DCBB462C} = {0BF06FFE-437E-4B1F-98C9-9E8D39AE902E} + {0BF06FFE-437E-4B1F-98C9-9E8D39AE902E} = {F9621075-6836-47F5-9160-F9C4743F6147} + {96FCE737-A321-445B-8EC1-05D8863461EA} = {F9621075-6836-47F5-9160-F9C4743F6147} + {FFA9194D-D6B2-4350-BE28-B2288A26637A} = {7B85862F-5BDB-43DF-AD55-F850DE04120E} + {F776F33F-E44B-4FF5-BFB6-43C98667FD66} = {BDD5208F-0CF4-4C6A-B443-6527514C89E8} + {075F4DEC-4E00-4982-A634-BDEBD7F63A70} = {96FCE737-A321-445B-8EC1-05D8863461EA} + {6FAA2520-3F94-4BD7-A67E-87A9961E094E} = {BDD5208F-0CF4-4C6A-B443-6527514C89E8} + {0A171BD6-3200-4FE8-9C19-B44E3B84529F} = {BDD5208F-0CF4-4C6A-B443-6527514C89E8} + {8D998D31-07BA-4185-B273-03EC3C68DA35} = {6FAA2520-3F94-4BD7-A67E-87A9961E094E} + {944004BF-81CA-4953-B482-AAE275CE601D} = {6FAA2520-3F94-4BD7-A67E-87A9961E094E} + {52F64196-9FA5-4B50-AE71-CF6C4DF4B23E} = {BDD5208F-0CF4-4C6A-B443-6527514C89E8} + {4C199038-F002-45B6-9158-2FD27A7392DC} = {BDD5208F-0CF4-4C6A-B443-6527514C89E8} + {23D36989-1D57-40AE-970D-085B18283D44} = {71B86B2B-03B6-4637-BC64-5E09BAF88192} + {F35C5D81-7632-46A2-82C0-CAC9AC65AF30} = {BDD5208F-0CF4-4C6A-B443-6527514C89E8} + {4FEF2F15-D4DC-416A-8640-66152F42CDFE} = {BDD5208F-0CF4-4C6A-B443-6527514C89E8} + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {968372C4-E303-4F1F-A924-7568CCDE3E7C} + EndGlobalSection +EndGlobal diff --git a/src/Sa/Classes/ArrayPooler.cs b/src/Sa/Classes/ArrayPooler.cs new file mode 100644 index 0000000..29500e7 --- /dev/null +++ b/src/Sa/Classes/ArrayPooler.cs @@ -0,0 +1,28 @@ +using System.Buffers; + +namespace Sa.Classes; + + +public interface IArrayPooler +{ + T[] Rent(int minimumLength); + void Return(T[] array, bool clearArray = false); +} + +public interface IArrayPoolFactory +{ + IArrayPooler Create(); +} + + + +internal class ArrayPooler : IArrayPooler +{ + public T[] Rent(int minimumLength) => ArrayPool.Shared.Rent(minimumLength); + public void Return(T[] array, bool clearArray = false) => ArrayPool.Shared.Return(array, clearArray); +} + +internal class ArrayPoolFactory : IArrayPoolFactory +{ + public IArrayPooler Create() => new ArrayPooler(); +} diff --git a/src/Sa/Classes/Enumeration.cs b/src/Sa/Classes/Enumeration.cs new file mode 100644 index 0000000..50eeed8 --- /dev/null +++ b/src/Sa/Classes/Enumeration.cs @@ -0,0 +1,67 @@ +using Sa.Infrastructure; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Reflection; + +namespace Sa.Classes; + +/// +/// https://josef.codes/enumeration-class-in-c-sharp-using-records/ +/// +/// +public record Enumeration(int Id, string Name) : IHasId, IComparable + where T : Enumeration +{ + + private static readonly Lazy> AllItems = new(() => + { + return typeof(T) + .GetFields(BindingFlags.Public | BindingFlags.Static | BindingFlags.DeclaredOnly) + .Where(x => x.FieldType == typeof(T)) + .Select(x => x.GetValue(null)) + .Cast() + .ToDictionary(x => x.Id, x => x); + }); + + private static readonly Lazy> AllItemsByName = new(() => + { + Dictionary items = new(AllItems.Value.Count); + foreach (T? value in AllItems.Value.Select(c => c.Value)) + { + items.TryAdd(value.Name, value); + } + return items; + }); + + [DebuggerStepThrough] + public static IEnumerable GetAll() => AllItems.Value.Values; + + [DebuggerStepThrough] + public static int DiffId(Enumeration firstId, Enumeration secondId) + => Math.Abs(firstId.Id - secondId.Id); + + [DebuggerStepThrough] + public static T FromId(int id) + => TryFromId(id, out var matchingItem) + ? matchingItem + : throw new InvalidOperationException($"'{id}' is not a valid value in {typeof(T)}"); + + [DebuggerStepThrough] + public static T FromName(string name) + => (TryFromName(name, out var matchingItem)) + ? matchingItem + : throw new InvalidOperationException($"'{name}' is not a valid display name in {typeof(T)}"); + + [DebuggerStepThrough] + public static bool TryFromName(string name, [MaybeNullWhen(false)] out T item) + => AllItemsByName.Value.TryGetValue(name, out item); + + [DebuggerStepThrough] + public static bool TryFromId(int id, [MaybeNullWhen(false)] out T item) + => AllItems.Value.TryGetValue(id, out item); + + [DebuggerStepThrough] + public int CompareTo(T? other) => Id.CompareTo(other!.Id); + + public override string ToString() => Name; +} diff --git a/src/Sa/Classes/KeepLocker.cs b/src/Sa/Classes/KeepLocker.cs new file mode 100644 index 0000000..1272ebc --- /dev/null +++ b/src/Sa/Classes/KeepLocker.cs @@ -0,0 +1,41 @@ +namespace Sa.Classes; + +public static class KeepLocker +{ + public static IDisposable KeepLocked(TimeSpan lockExpiration, Func extendLocked, bool blockImmediately = false, CancellationToken cancellationToken = default) + { + var timer = new PeriodicTimer(lockExpiration); + var task = Task.Run(async () => + { + try + { + if (blockImmediately) + { + await extendLocked(cancellationToken); + } + + while (await timer.WaitForNextTickAsync(cancellationToken)) + { + await extendLocked(cancellationToken); + } + } + catch + { + // ignore + } + }, cancellationToken); + + IDisposable keeper = new DisposableTimer(timer, task); + + return keeper; + } + + private sealed class DisposableTimer(PeriodicTimer timer, Task task) : IDisposable + { + public void Dispose() + { + timer.Dispose(); + task.Wait(); // Ожидание завершения задачи перед освобождением ресурсов + } + } +} \ No newline at end of file diff --git a/src/Sa/Classes/MurmurHash3.cs b/src/Sa/Classes/MurmurHash3.cs new file mode 100644 index 0000000..cf0a8cc --- /dev/null +++ b/src/Sa/Classes/MurmurHash3.cs @@ -0,0 +1,57 @@ +using System.Runtime.CompilerServices; + +namespace Sa.Classes; + +/// +/// https://github.com/odinmillion/MurmurHash.Net/blob/master/src/MurmurHash.Net/MurmurHash3.cs +/// +public static class MurmurHash3 +{ + [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S907:\"goto\" statement should not be used", Justification = "")] + public static uint Hash32(ReadOnlySpan bytes, uint seed) + { + var length = bytes.Length; + var h1 = seed; + var remainder = length & 3; + var position = length - remainder; + for (var start = 0; start < position; start += 4) + h1 = (uint)((int)RotateLeft(h1 ^ RotateLeft(BitConverter.ToUInt32(bytes.Slice(start, 4)) * 3432918353U, 15) * 461845907U, 13) * 5 - 430675100); + + if (remainder > 0) + { + uint num = 0; + switch (remainder) + { + case 1: + num ^= (uint)bytes[position]; + break; + case 2: + num ^= (uint)bytes[position + 1] << 8; + goto case 1; + case 3: + num ^= (uint)bytes[position + 2] << 16; + goto case 2; + } + + h1 ^= RotateLeft(num * 3432918353U, 15) * 461845907U; + } + + h1 = FMix(h1 ^ (uint)length); + + return h1; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static uint RotateLeft(uint x, byte r) + { + return x << (int)r | x >> 32 - (int)r; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static uint FMix(uint h) + { + h = (uint)(((int)h ^ (int)(h >> 16)) * -2048144789); + h = (uint)(((int)h ^ (int)(h >> 13)) * -1028477387); + return h ^ h >> 16; + } +} \ No newline at end of file diff --git a/src/Sa/Classes/Quartz.cs b/src/Sa/Classes/Quartz.cs new file mode 100644 index 0000000..336e9fb --- /dev/null +++ b/src/Sa/Classes/Quartz.cs @@ -0,0 +1,120 @@ +using System.Diagnostics; + +namespace Sa.Classes; + +public static partial class Quartz +{ + private static IEnumerable Empty() => []; + + private static void ValidateParameters(TimeSpan delay, int retryCount, string delayParamName) + { + if (delay < TimeSpan.Zero) throw new ArgumentOutOfRangeException(delayParamName, delay, "should be >= 0ms"); + if (retryCount < 0) throw new ArgumentOutOfRangeException(nameof(retryCount), retryCount, "should be >= 0"); + } + + [DebuggerStepThrough] + public static IEnumerable GenerateConstant(TimeSpan delay, int retryCount, bool fastFirst = false) + { + ValidateParameters(delay, retryCount, nameof(delay)); + return retryCount == 0 ? Empty() : Generator.GenConstant(delay, retryCount, fastFirst); + } + + [DebuggerStepThrough] + public static IEnumerable GenerateLinear(TimeSpan initialDelay, int retryCount, double factor = 1.0, bool fastFirst = true) + { + ValidateParameters(initialDelay, retryCount, nameof(initialDelay)); + if (factor < 0) throw new ArgumentOutOfRangeException(nameof(factor), factor, "should be >= 0"); + + return retryCount == 0 ? Empty() : Generator.GenLinear(initialDelay, retryCount, factor, fastFirst); + } + + [DebuggerStepThrough] + public static IEnumerable GenerateExponential(TimeSpan initialDelay, int retryCount, double factor = 2.0, bool fastFirst = true) + { + ValidateParameters(initialDelay, retryCount, nameof(initialDelay)); + if (factor < 1.0) throw new ArgumentOutOfRangeException(nameof(factor), factor, "should be >= 1.0"); + + return retryCount == 0 ? Empty() : Generator.GenExponential(initialDelay, retryCount, factor, fastFirst); + } + + [DebuggerStepThrough] + public static IEnumerable GenerateJitter(TimeSpan medianFirstRetryDelay, int retryCount, bool fastFirst = true) + { + ValidateParameters(medianFirstRetryDelay, retryCount, nameof(medianFirstRetryDelay)); + return retryCount == 0 ? Empty() : Generator.GenJitter(medianFirstRetryDelay, retryCount, fastFirst); + } + + + static class Generator + { + + public static IEnumerable GenConstant(TimeSpan delay, int retryCount, bool fastFirst) + { + if (fastFirst) + { + yield return TimeSpan.Zero; + } + + for (int i = fastFirst ? 1 : 0; i < retryCount; i++) + { + yield return delay; + } + } + + public static IEnumerable GenLinear(TimeSpan initialDelay, int retryCount, double factor, bool fastFirst) + { + if (fastFirst) + { + yield return TimeSpan.Zero; + } + + double ms = initialDelay.TotalMilliseconds; + double increment = factor * ms; + + for (int i = fastFirst ? 1 : 0; i < retryCount; i++, ms += increment) + { + yield return TimeSpan.FromMilliseconds(ms); + } + } + + public static IEnumerable GenExponential(TimeSpan initialDelay, int retryCount, double factor, bool fastFirst) + { + if (fastFirst) + { + yield return TimeSpan.Zero; + } + + double ms = initialDelay.TotalMilliseconds; + + for (int i = fastFirst ? 1 : 0; i < retryCount; i++, ms *= factor) + { + yield return TimeSpan.FromMilliseconds(ms); + } + } + + public static IEnumerable GenJitter(TimeSpan medianFirstRetryDelay, int retryCount, bool fastFirst) + { + const double pFactor = 4.0; + const double rpScalingFactor = 1 / 1.4d; + double maxTimeSpanDouble = (double)TimeSpan.MaxValue.Ticks - 1000; + + if (fastFirst) + { + yield return TimeSpan.Zero; + } + + long targetTicksFirstDelay = medianFirstRetryDelay.Ticks; + double prev = 0.0; + + for (int i = fastFirst ? 1 : 0; i < retryCount; i++) + { + double t = i + Random.Shared.NextDouble(); + double next = Math.Pow(2, t) * Math.Tanh(Math.Sqrt(pFactor * t)); + double formulaIntrinsicValue = next - prev; + + yield return TimeSpan.FromTicks((long)Math.Min(formulaIntrinsicValue * rpScalingFactor * targetTicksFirstDelay, maxTimeSpanDouble)); + prev = next; + } + } + } +} diff --git a/src/Sa/Classes/ResetLazy.cs b/src/Sa/Classes/ResetLazy.cs new file mode 100644 index 0000000..6514ef0 --- /dev/null +++ b/src/Sa/Classes/ResetLazy.cs @@ -0,0 +1,105 @@ +using System.Diagnostics; + +namespace Sa.Classes; + +public interface IResetLazy +{ + object? Value { get; } + void Reset(); + void Load(); +} + + +/// +/// Provides support for lazy initialization with reset +/// +/// The type of object that is being lazily initialized. +[DebuggerStepThrough] +public sealed class ResetLazy(Func valueFactory, LazyThreadSafetyMode mode = LazyThreadSafetyMode.PublicationOnly, Action? valueReset = null) : IResetLazy +{ + record Box(T Value); + + private readonly Func _valueFactory = valueFactory ?? throw new ArgumentNullException(nameof(valueFactory)); + + private readonly object _syncLock = new(); + + private Box? _box; + + public T Value + { + [DebuggerStepThrough] + get + { + Box? b1 = _box; + if (b1 != null) + return b1.Value; + + if (mode == LazyThreadSafetyMode.ExecutionAndPublication) + { + lock (_syncLock) + { + Box? b2 = _box; + if (b2 != null) + return b2.Value; + + _box = new Box(CreateValue()); + + return _box.Value; + } + } + else if (mode == LazyThreadSafetyMode.PublicationOnly) + { + T newValue = CreateValue(); + + lock (_syncLock) + { + Box? b2 = _box; + if (b2 != null) + return b2.Value; + + _box = new Box(newValue); + + return _box.Value; + } + } + else + { + Box? b = new(CreateValue()); + _box = b; + return b.Value; + } + } + } + + private T CreateValue() => _valueFactory(); + + public void Load() => _ = Value; + + public bool IsValueCreated => _box != null; + + object? IResetLazy.Value => Value; + + public void Reset() + { + if (mode != LazyThreadSafetyMode.None) + { + lock (_syncLock) + { + ResetBox(); + } + } + else + { + ResetBox(); + } + } + + private void ResetBox() + { + if (IsValueCreated) + { + valueReset?.Invoke(_box!.Value); + _box = null; + } + } +} diff --git a/src/Sa/Classes/Retry.cs b/src/Sa/Classes/Retry.cs new file mode 100644 index 0000000..7477e90 --- /dev/null +++ b/src/Sa/Classes/Retry.cs @@ -0,0 +1,227 @@ +using Sa.Extensions; +using System.Diagnostics; + +namespace Sa.Classes; + + + +public static class Retry +{ + /// + /// For example: 500ms, 500ms, 500ms ... + /// + [DebuggerStepThrough] + public static ValueTask Constant( + Func> fun, + I input, + int retryCount = 3, + int waitTime = 500, + Func? next = null, + CancellationToken cancellationToken = default) + { + return Quartz.GenerateConstant(TimeSpan.FromMilliseconds(waitTime), retryCount, fastFirst: true) + .WaitAndRetry(fun, input, next, cancellationToken: cancellationToken); + } + + [DebuggerStepThrough] + public static ValueTask Constant( + Func> fun, + int retryCount = 3, + int waitTime = 500, + Func? next = null, + CancellationToken cancellationToken = default) + { + return Quartz.GenerateConstant(TimeSpan.FromMilliseconds(waitTime), retryCount, fastFirst: true) + .WaitAndRetry(fun, next, cancellationToken: cancellationToken); + } + + /// + /// For example: 100ms, 200ms, 400ms, 800ms, ... + /// + [DebuggerStepThrough] + public static ValueTask Exponential( + Func> fun, + I input, + int retryCount = 3, + int initialDelay = 100, + Func? next = null, + CancellationToken cancellationToken = default) + { + return Quartz.GenerateExponential(TimeSpan.FromMilliseconds(initialDelay), retryCount, fastFirst: true) + .WaitAndRetry(fun, input, next, cancellationToken: cancellationToken); + } + + /// + /// For example: 100ms, 200ms, 400ms, 800ms, ... + /// + [DebuggerStepThrough] + public static ValueTask Exponential( + Func> fun, + int retryCount = 3, + int initialDelay = 100, + Func? next = null, + CancellationToken cancellationToken = default) + { + return Quartz.GenerateExponential(TimeSpan.FromMilliseconds(initialDelay), retryCount, fastFirst: true) + .WaitAndRetry(fun, next, cancellationToken: cancellationToken); + } + + /// + /// For example: 100ms, 200ms, 300ms, 400ms, .. + /// + [DebuggerStepThrough] + public static ValueTask Linear( + Func> fun, + I input, + int retryCount = 3, + int initialDelay = 100, + Func? next = null, + CancellationToken cancellationToken = default) + { + return Quartz.GenerateLinear(TimeSpan.FromMilliseconds(initialDelay), retryCount, fastFirst: true) + .WaitAndRetry(fun, input, next, cancellationToken); + } + + + /// + /// For example: 100ms, 200ms, 300ms, 400ms, .. + /// + [DebuggerStepThrough] + public static ValueTask Linear( + Func> fun, + int retryCount = 3, + int initialDelay = 100, + Func? next = null, + CancellationToken cancellationToken = default) + { + return Quartz.GenerateLinear(TimeSpan.FromMilliseconds(initialDelay), retryCount, fastFirst: true) + .WaitAndRetry(fun, next, cancellationToken); + } + + + + /// + /// For example: 850ms, 1455ms, 3060ms. + /// + [DebuggerStepThrough] + public static ValueTask Jitter( + Func> fun, + I input, + int retryCount = 3, + int initialDelay = 530, + Func? next = null, + CancellationToken cancellationToken = default) + { + return Quartz.GenerateJitter(TimeSpan.FromMilliseconds(initialDelay), retryCount, fastFirst: true) + .WaitAndRetry(fun, input, next, cancellationToken: cancellationToken); + } + + /// + /// For example: 850ms, 1455ms, 3060ms. + /// + [DebuggerStepThrough] + public static ValueTask Jitter( + Func> fun, + int retryCount = 3, + int initialDelay = 530, + Func? next = null, + CancellationToken cancellationToken = default) + { + return Quartz.GenerateJitter(TimeSpan.FromMilliseconds(initialDelay), retryCount, fastFirst: true) + .WaitAndRetry(fun, next, cancellationToken: cancellationToken); + } + + [DebuggerStepThrough] + public static async ValueTask WaitAndRetry( + this IEnumerable timeSpans, + Func> fun, + I input, + Func? next = null, + CancellationToken cancellationToken = default) + { + TimeSpan[] points = timeSpans.ToArray(); + + for (int i = 0; i < points.Length - 1; i++) + { + if (cancellationToken.IsCancellationRequested) break; + + try + { + return await fun(input, cancellationToken); + } + catch (Exception e) + { + if (e is TaskCanceledException) + { + break; + } + else if (e.IsCritical() || next != null && !next(e, i)) + { + throw; + } + + await Wait(points[i], cancellationToken); + } + } + + if (points.Length > 0) + { + await Wait(points[^1], cancellationToken); + } + + return await fun(input, cancellationToken); + } + + [DebuggerStepThrough] + public static async ValueTask WaitAndRetry( + this IEnumerable timeSpans, + Func> fun, + Func? next = null, + CancellationToken cancellationToken = default) + { + TimeSpan[] points = timeSpans.ToArray(); + + for (int i = 0; i < points.Length - 1; i++) + { + if (cancellationToken.IsCancellationRequested) break; + + try + { + return await fun(cancellationToken); + } + catch (Exception e) + { + if (e is TaskCanceledException) + { + break; + } + else if (e.IsCritical() || next != null && !next(e, i)) + { + throw; + } + + await Wait(points[i], cancellationToken); + } + } + + if (points.Length > 0) + { + await Wait(points[^1], cancellationToken); + } + + return await fun(cancellationToken); + } + + private static async Task Wait(TimeSpan delay, CancellationToken cancellationToken) + { + if (cancellationToken.IsCancellationRequested) return; + try + { + await Task.Delay(delay, cancellationToken); + } + catch (TaskCanceledException) + { + // ignore + } + } +} \ No newline at end of file diff --git a/src/Sa/Classes/Section.cs b/src/Sa/Classes/Section.cs new file mode 100644 index 0000000..5ac7bf4 --- /dev/null +++ b/src/Sa/Classes/Section.cs @@ -0,0 +1,239 @@ +using System.Diagnostics; + +namespace Sa.Classes; + +/// +/// line +/// экземпляр с конкретным началом и окончанием +/// +/// +/// начало +/// конец +[DebuggerStepThrough] +public record Section(T Start, T End) where T : IComparable +{ + public static readonly Section Empty = new(default!, default!); +} + +/// +/// line with lim end +/// экземпляр с конкретным началом, окончанием и указанием включен ли конец в диапазон +/// +/// +/// начало +/// конец +/// Indicates whether the value at the end of the range is included + +[DebuggerStepThrough] +public record LimSection(T Start, T End, bool HasEnd = false) + : Section(Start, End) where T : IComparable +{ + public static readonly new LimSection Empty = new(default!, default!, false); +} + +/// +/// half-line or ray +/// экземпляр с конкретным началом, возможным окончанием и указанием включен ли конец в диапазон +/// +/// +/// начало +/// конец или бесконечность +/// Indicates whether the value at the end of the range is included +[DebuggerStepThrough] +public record HalfSection(T Start, T? End, bool HasEnd = false) +{ + public static readonly HalfSection Empty = new(default!, default, false); +} + +[DebuggerStepThrough] +public static class RangeExtensions +{ + public static LimSection RangeTo(Section range, bool hasEnd) where T : IComparable => new(range.Start, range.End, hasEnd); + public static Section RangeTo(this T from, T to) where T : IComparable => new(from, to); + public static Section RangeTo(this T from, Func to) where T : IComparable => new(from, to(from)); + public static LimSection RangeTo(this T from, T to, bool hasEnd) where T : IComparable => new(from, to, hasEnd); + public static LimSection RangeTo(this T from, Func to, bool hasEnd) where T : IComparable => new(from, to(from), hasEnd); + public static bool IsPositive(this Section range) where T : IComparable => range.Start.CompareTo(range.End) <= 0; + public static bool IsPositive(this LimSection range) where T : IComparable => range.Start.CompareTo(range.End) <= 0; + public static Section Reverse(this Section range) where T : IComparable => range.End.RangeTo(range.Start); + public static LimSection Reverse(this LimSection range) where T : IComparable => range.End.RangeTo(range.Start, range.HasEnd); + public static Section Normalize(this Section range) where T : IComparable => range.IsPositive() ? range : Reverse(range); + public static LimSection Normalize(this LimSection range) where T : IComparable => range.IsPositive() ? range : Reverse(range); + public static bool InRange(this Section range, T value) where T : IComparable => range.Start.CompareTo(value) <= 0 && (range.End.CompareTo(value) >= 0); + public static bool InRange(this LimSection range, T value) where T : IComparable + => range.Start.CompareTo(value) <= 0 && (range.HasEnd ? range.End.CompareTo(value) >= 0 : range.End.CompareTo(value) > 0); + + /// + /// >список пустых (незанятых) интервалов + /// + /// + /// интервал + /// список отрезков, которые заняты + /// список пустых (незанятых) интервалов + public static List> FindEmptyIntervals(this Section range, IEnumerable> busyIntervals) + where T : IComparable + { + List> emptyIntervals = []; + + // Сортируем временные отрезки по начальному времени busyIntervals + Section[] sortedBusyIntervals = busyIntervals.ToArray(); + + if (sortedBusyIntervals.Length > 1) + { + Array.Sort(sortedBusyIntervals, (a, b) => a.Start.CompareTo(b.Start)); + } + + T currentStart = range.Start; + foreach (Section interval in sortedBusyIntervals) + { + if (interval.Start.CompareTo(currentStart) > 0) + { + T currentEnd = interval.Start; + emptyIntervals.Add(new Section(currentStart, currentEnd)); + } + currentStart = interval.End; + } + + if (range.End.CompareTo(currentStart) > 0) + { + emptyIntervals.Add(new Section(currentStart, range.End)); + } + + return emptyIntervals; + } + + /// + /// Поиск пересечения + /// + public static Section? FindIntersections(this Section self, Section other) + where T : IComparable + { + if (other.End.CompareTo(self.Start) >= 0 && self.End.CompareTo(other.Start) >= 0) + { + T start = self.Start.CompareTo(other.Start) >= 0 ? self.Start : other.Start; + T end = self.End.CompareTo(other.End) < 0 ? self.End : other.End; + return new Section(start, end); + } + + return null; + } + + /// + /// объединения интервалов + /// + public static List> MergeIntervals(this Section self, IEnumerable> intervals) + where T : IComparable + { + var list = new List>(intervals) + { + self + }; + + return MergeIntervals(list); + } + + /// + /// объединения интервалов + /// + public static List> MergeIntervals(IEnumerable> intervals) + where T : IComparable + { + List> sortedList = new(intervals); + + if (sortedList.Count > 1) + { + sortedList.Sort((a, b) => a.Start.CompareTo(b.Start)); // Сортировка по начальным точкам + } + + List> mergedIntervals = []; + + Section currentInterval = sortedList[0]; + + foreach (Section interval in sortedList) + { + if (currentInterval.End.CompareTo(interval.Start) >= 0) // Пересечение интервалов + { + var currentEnd = currentInterval.End.CompareTo(interval.End) >= 0 + ? currentInterval.End + : interval.End; + + currentInterval = new Section(currentInterval.Start, currentEnd); + } + else + { + mergedIntervals.Add(currentInterval); + currentInterval = interval; + } + } + + mergedIntervals.Add(currentInterval); // Добавление последнего интервала + + return mergedIntervals; + } + + + public static IEnumerable Enumerate(this Section self, Func next) + where T : IComparable + { + T? c = self.Start; + while (c is not null && self.End.CompareTo(c) > 0) + { + yield return c; + c = next(c); + } + } + + + /// + /// разделения интервала + /// + public static List> SplitInterval(this Section self, params T[] points) + where T : IComparable => SplitInterval(self, points.AsEnumerable()); + + public static List> SplitInterval(this Section self, IEnumerable points) + where T : IComparable + { + var sortedPoints = points.ToArray(); + + if (sortedPoints.Length > 1) Array.Sort(sortedPoints); + + List> splitIntervals = []; + + T prevPoint = self.Start; + foreach (T point in sortedPoints) + { + if (point.CompareTo(self.Start) > 0 && self.End.CompareTo(point) > 0) + { + splitIntervals.Add(prevPoint.RangeTo(point)); + prevPoint = point; + } + } + + splitIntervals.Add(prevPoint.RangeTo(self.End)); + return splitIntervals; + } + + /// + /// разбиение интервала + /// + public static List> SplitInterval(this Section self, Func next) + where T : IComparable => SplitInterval(self, Enumerate(self, next).Skip(1)); + + /// + /// проверки включения интервалов + /// + public static bool IsIntervalIncluded(this Section self, Section other) + where T : IComparable => other.Start.CompareTo(self.Start) >= 0 && self.End.CompareTo(other.End) >= 0; + + public static bool IsPoint(this Section self) where T : IComparable => self.Start.CompareTo(self.End) == 0; + + public static bool IsEmpty(this Section self) where T : IComparable => self == Section.Empty; + public static bool IsEmpty(this LimSection self) where T : IComparable => self == LimSection.Empty; + public static bool IsEmpty(this HalfSection self) where T : IComparable => self == HalfSection.Empty; + + public static TimeSpan GetLength(this Section range) => range.End.ToUniversalTime() - range.Start.ToUniversalTime(); + public static int GetLength(this Section range) => range.End - range.Start; + public static long GetLength(this Section range) => range.End - range.Start; + public static float GetLength(this Section range) => range.End - range.Start; + public static double GetLength(this Section range) => range.End - range.Start; +} diff --git a/src/Sa/Classes/StrOrNum.cs b/src/Sa/Classes/StrOrNum.cs new file mode 100644 index 0000000..acfd4aa --- /dev/null +++ b/src/Sa/Classes/StrOrNum.cs @@ -0,0 +1,83 @@ +using Sa.Extensions; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Sa.Classes; + +/// +///StrOrNum +/// +/// +/// StrOrNum val = 10; +/// StrOrNum val_1 = "привет"; +/// string v = val.Match( +/// onChoiceNum: item => $"long: {item}", +/// onChoiceStr: item => $"string: {item}" +/// ); +/// +/// +/// +/// +[JsonConverter(typeof(StrOrNumConverter))] +public abstract record StrOrNum +{ + public record ChoiceStr(string Item) : StrOrNum + { + public override string ToString() => Item; + } + + public record ChoiceNum(long Item) : StrOrNum + { + public override string ToString() => Item.ToString(); + } + + public U Match(Func onChoiceStr, Func onChoiceNum) + => Match(onChoiceStr, onChoiceNum, this); + + + public static implicit operator StrOrNum(string item) => new ChoiceStr(item); + + public static implicit operator StrOrNum(int item) => new ChoiceNum(item); + public static implicit operator StrOrNum(long item) => new ChoiceNum(item); + public static implicit operator StrOrNum(short item) => new ChoiceNum(item); + + public static explicit operator string(StrOrNum choice) => choice.Match(c1 => c1, c2 => c2.ToString()); + public static explicit operator long(StrOrNum choice) => choice.Match(c1 => c1.StrToLong() ?? 0, c2 => c2); + public static explicit operator int(StrOrNum choice) => choice.Match(c1 => c1.StrToInt() ?? 0, c2 => (int)c2); + public static explicit operator short(StrOrNum choice) => choice.Match(c1 => c1.StrToShort() ?? 0, c2 => (short)c2); + + private static U Match(Func onChoiceStr, Func onChoiceNum, StrOrNum choice) + { + U result = choice switch + { + ChoiceStr c => onChoiceStr(c.Item), + ChoiceNum c => onChoiceNum(c.Item), + _ => throw new ArgumentOutOfRangeException(nameof(choice)) + }; + + return result; + } + + public override string ToString() => Match(str => str, num => num.ToString()); + + private StrOrNum() { } +} + + + +public class StrOrNumConverter : JsonConverter +{ + public override StrOrNum Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + string? str = reader.GetString(); + + if (str == null) return new StrOrNum.ChoiceStr(string.Empty); + + return str[0] == 'n' + ? new StrOrNum.ChoiceNum(str[2..].StrToLong() ?? 0) + : new StrOrNum.ChoiceStr(str[2..]); + } + + public override void Write(Utf8JsonWriter writer, StrOrNum value, JsonSerializerOptions options) + => writer.WriteStringValue(value.Match(s => $"s:{s}", n => $"n:{n}")); +} diff --git a/src/Sa/Classes/WorkQueue.cs b/src/Sa/Classes/WorkQueue.cs new file mode 100644 index 0000000..6a8cbb1 --- /dev/null +++ b/src/Sa/Classes/WorkQueue.cs @@ -0,0 +1,150 @@ +using System.Collections.Concurrent; +using System.Diagnostics.CodeAnalysis; +using System.Threading.Channels; + +namespace Sa.Classes; + + +public interface IWork +{ + Task Execute(TModel model, CancellationToken cancellationToken); +} + + +public interface IWorkWithHandleError : IWork +{ + Task HandelError(Exception exception, TModel model, CancellationToken cancellationToken); +} + +public interface IWorkQueue : IAsyncDisposable + where TModel : notnull + where TWork : IWork +{ + WorkQueue Enqueue([NotNull] TModel model, TWork work, CancellationToken cancellationToken = default); + Task Stop(TModel model); +} + + +/// +/// +/// +/// +/// +/// +public sealed class WorkQueue : IWorkQueue where TModel : notnull + where TWork : IWork +{ + private readonly ConcurrentDictionary _workPools = new(); + + + public WorkQueue Enqueue([NotNull] TModel model, TWork work, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(model); + _workPools.GetOrAdd(model, WorkQueue.StartNewWorkPool(model, cancellationToken)).Enqueue(work); + return this; + } + + private static WorkPool StartNewWorkPool(TModel model, CancellationToken cancellationToken) + { + var newWorkPool = new WorkPool(model); + newWorkPool.Start(cancellationToken); + return newWorkPool; + } + + public async Task Stop(TModel model) + { + if (_workPools.TryRemove(model, out WorkPool? workPool)) + { + await workPool.DisposeAsync(); + } + } + + public async ValueTask DisposeAsync() + { + foreach (TModel model in _workPools.Keys) + { + await Stop(model); + } + } + + private sealed class WorkPool : IAsyncDisposable + { + private readonly Channel _channel; + private readonly TModel _model; + private Task? _worker; + private readonly CancellationTokenSource _stoppedTokenSource = new(); + + public WorkPool([NotNullWhen(true)] TModel model) + { + _model = model; + _channel = Channel.CreateUnbounded(new UnboundedChannelOptions + { + SingleReader = true, + SingleWriter = false, + AllowSynchronousContinuations = false + }); + } + + public void Start(CancellationToken cancellationToken) + { + _worker = Task.Run(() => Loop(cancellationToken), cancellationToken); + } + + public void Enqueue(TWork work) + { + _channel.Writer.TryWrite(work); + } + + private async Task Loop(CancellationToken cancellationToken) + { + while (!cancellationToken.IsCancellationRequested && await _channel.Reader.WaitToReadAsync(cancellationToken).ConfigureAwait(false)) + { + while (_channel.Reader.TryRead(out TWork? work)) + { + using var stoppingTokenSource = CancellationTokenSource.CreateLinkedTokenSource(_stoppedTokenSource.Token, cancellationToken); + CancellationToken token = stoppingTokenSource.Token; + try + { + Task task = work.Execute(_model, token); + if (!task.IsCompleted) + { + await task.ConfigureAwait(false); + } + } + catch (OperationCanceledException ex) when (ex.CancellationToken == token) + { + // ignore + } + catch (Exception error) + { + if (work is IWorkWithHandleError handler) + { + await handler.HandelError(error, _model, CancellationToken.None); + } + } + } + } + } + + public async ValueTask DisposeAsync() + { + _channel.Writer.Complete(); + await _stoppedTokenSource.CancelAsync(); + + if (_worker != null) + { + try + { + await _worker; + } + catch (OperationCanceledException) + { + // ignore + } + } + + _stoppedTokenSource.Dispose(); + } + } +} + diff --git a/src/Sa/Extensions/DateTimeExtensions.cs b/src/Sa/Extensions/DateTimeExtensions.cs new file mode 100644 index 0000000..15957c9 --- /dev/null +++ b/src/Sa/Extensions/DateTimeExtensions.cs @@ -0,0 +1,29 @@ +using System.Diagnostics; + +namespace Sa.Extensions; + +public static class DateTimeExtensions +{ + /// + /// unix timestamp + /// + [DebuggerStepThrough] + public static long ToUnixTimestamp(this DateTime dateTime, bool isInMilliseconds = false) + { + TimeSpan ts = dateTime.ToUniversalTime().Subtract(DateTime.UnixEpoch); + return isInMilliseconds ? (long)ts.TotalMilliseconds : (long)ts.TotalSeconds; + } + + [DebuggerStepThrough] + public static DateTimeOffset StartOfDay(this DateTimeOffset dateTime) => new(dateTime.Year, dateTime.Month, dateTime.Day, 0, 0, 0, 0, dateTime.Offset); + [DebuggerStepThrough] + public static DateTimeOffset EndOfDay(this DateTimeOffset dateTime) => dateTime.StartOfDay().AddDays(1); + [DebuggerStepThrough] + public static DateTimeOffset StartOfMonth(this DateTimeOffset dateTime) => new(dateTime.Year, dateTime.Month, 1, 0, 0, 0, 0, dateTime.Offset); + [DebuggerStepThrough] + public static DateTimeOffset EndOfMonth(this DateTimeOffset dateTime) => dateTime.StartOfMonth().AddMonths(1); + [DebuggerStepThrough] + public static DateTimeOffset StartOfYear(this DateTimeOffset dateTime) => new(dateTime.Year, 1, 1, 0, 0, 0, 0, dateTime.Offset); + [DebuggerStepThrough] + public static DateTimeOffset EndOfYear(this DateTimeOffset dateTime) => dateTime.StartOfYear().AddYears(1); +} diff --git a/src/Sa/Extensions/EnumerableExtensions.cs b/src/Sa/Extensions/EnumerableExtensions.cs new file mode 100644 index 0000000..94ddd21 --- /dev/null +++ b/src/Sa/Extensions/EnumerableExtensions.cs @@ -0,0 +1,29 @@ +using System.Diagnostics; + +namespace Sa.Extensions; + + +public static class EnumerableExtensions +{ + [DebuggerStepThrough] + public static string JoinByString(this IEnumerable source, string? joinWith = null) + { + if (source == null) return default!; + return string.Join(joinWith, source); + } + + [DebuggerStepThrough] + public static string JoinByString(this IEnumerable source, Func map, string? joinWith = null) + { + if (source == null) return default!; + return string.Join(joinWith, source.Select(map)); + } + + [DebuggerStepThrough] + public static string JoinByString(this IEnumerable source, Func map, string? joinWith = null) + { + if (source == null) return default!; + return string.Join(joinWith, source.Select(map)); + } +} + diff --git a/src/Sa/Extensions/ExceptionExtensions.cs b/src/Sa/Extensions/ExceptionExtensions.cs new file mode 100644 index 0000000..015e4bf --- /dev/null +++ b/src/Sa/Extensions/ExceptionExtensions.cs @@ -0,0 +1,33 @@ +using System.Diagnostics; +using System.Text; + +namespace Sa.Extensions; + +public static class ExceptionExtensions +{ + [DebuggerStepThrough] + public static bool IsCritical(this Exception ex) + { + if (ex is OutOfMemoryException) return true; + if (ex is StackOverflowException) return true; + if (ex is AppDomainUnloadedException) return true; + if (ex is BadImageFormatException) return true; + if (ex is CannotUnloadAppDomainException) return true; + if (ex is InvalidProgramException) return true; + if (ex is ThreadAbortException) return true; + + return false; + } + + [DebuggerStepThrough] + public static string GetErrorMessages(this Exception exception) + { + StringBuilder sb = new(); + sb.AppendLine(exception.Message); + if (exception.InnerException != null) + { + sb.AppendLine(GetErrorMessages(exception.InnerException)); + } + return sb.ToString(); + } +} diff --git a/src/Sa/Extensions/JsonExtensions.cs b/src/Sa/Extensions/JsonExtensions.cs new file mode 100644 index 0000000..3b93efa --- /dev/null +++ b/src/Sa/Extensions/JsonExtensions.cs @@ -0,0 +1,20 @@ +using System.Diagnostics; +using System.Text.Json; + +namespace Sa.Extensions; + +public static class JsonExtensions +{ + [DebuggerStepThrough] + public static string ToJson(this T value, JsonSerializerOptions? options = null) + { + return JsonSerializer.Serialize(value, options); + } + + + [DebuggerStepThrough] + public static T? FromJson(this string value, JsonSerializerOptions? options = null) + { + return JsonSerializer.Deserialize(value, options); + } +} diff --git a/src/Sa/Extensions/NumericExtensions.cs b/src/Sa/Extensions/NumericExtensions.cs new file mode 100644 index 0000000..a782b7a --- /dev/null +++ b/src/Sa/Extensions/NumericExtensions.cs @@ -0,0 +1,50 @@ +using System.Diagnostics; + +namespace Sa.Extensions; + +public static class NumericExtensions +{ + private static readonly DateTime UnixEpoch = DateTime.UnixEpoch; + private static readonly double MaxUnixSeconds = (DateTime.MaxValue - UnixEpoch).TotalSeconds; + + [DebuggerStepThrough] + public static DateTime ToDateTimeFromUnixTimestamp(this uint timestamp) + => (timestamp > MaxUnixSeconds + ? UnixEpoch.AddMilliseconds(timestamp) + : UnixEpoch.AddSeconds(timestamp)).ToUniversalTime(); + + + [DebuggerStepThrough] + public static DateTime? ToDateTimeFromUnixTimestamp(this string timestampString) + => long.TryParse(timestampString, out var result) ? result.ToDateTimeFromUnixTimestamp() : null; + + [DebuggerStepThrough] + public static DateTime ToDateTimeFromUnixTimestamp(this long timestamp) + => (timestamp > MaxUnixSeconds + ? UnixEpoch.AddMilliseconds(timestamp) + : UnixEpoch.AddSeconds(timestamp)).ToUniversalTime(); + + [DebuggerStepThrough] + public static DateTime ToDateTimeFromUnixTimestamp(this ulong timestamp) + => ToDateTimeFromUnixTimestamp((long)timestamp); + + [DebuggerStepThrough] + public static DateTime ToDateTimeFromUnixTimestamp(this double timestamp) + => ToDateTimeFromUnixTimestamp((long)timestamp); + + [DebuggerStepThrough] + public static DateTime? ToDateTimeFromUnixTimestamp(this long? ts) + => ts.HasValue ? ts.Value.ToDateTimeFromUnixTimestamp() : null; + + [DebuggerStepThrough] + public static DateTime? ToDateTimeFromUnixTimestamp(this ulong? ts) + => ts.HasValue ? ts.Value.ToDateTimeFromUnixTimestamp() : null; + + [DebuggerStepThrough] + public static DateTime? ToDateTimeFromUnixTimestamp(this double? ts) + => ts.HasValue ? ts.Value.ToDateTimeFromUnixTimestamp() : null; + + [DebuggerStepThrough] + public static DateTimeOffset ToDateTimeOffsetFromUnixTimestamp(this long timestamp) + => ToDateTimeFromUnixTimestamp(timestamp); +} diff --git a/src/Sa/Extensions/SpanExtensions.cs b/src/Sa/Extensions/SpanExtensions.cs new file mode 100644 index 0000000..5437b09 --- /dev/null +++ b/src/Sa/Extensions/SpanExtensions.cs @@ -0,0 +1,126 @@ +using System.Diagnostics; + +namespace Sa.Extensions; + +public static class SpanExtensions +{ + + [DebuggerStepThrough] + public static int Count(this Span items, Func func) + { + int count = 0; + foreach (T message in items) + { + if (func(message)) count++; + } + + return count; + } + + [DebuggerStepThrough] + public static IEnumerable> GetChunks(this Memory arr, int chunkSize) + { + for (int i = 0; i < arr.Length; i += chunkSize) + { + // by slice + Memory chunk = arr[i..Math.Min(i + chunkSize, arr.Length)]; + yield return chunk; + } + } + + /// + /// Combines Select and Where with indexes into a single call for optimal + /// performance. + /// + /// + /// The input sequence to filter and select + /// The transformation with index to apply before filtering. + /// The predicate with index with which to filter result. + /// A sequence transformed and then filtered by selector and predicate with indexes. + public static TResult[] SelectWhere(this Span source, Func selector, Func? predicate = null) + { + if (source == null) throw new ArgumentNullException(nameof(source)); + + + TResult[] result = new TResult[source.Length]; + int idx = 0; + for (int i = 0; i < source.Length; i++) + { + TResult? s = selector(source[i], i); + if (predicate == null || predicate(s, i)) + { + result[idx] = s; + idx++; + } + } + Array.Resize(ref result, idx); + return result; + } + + /// + /// + /// + public static TResult[] SelectWhere(this Span source, Func selector, Func? predicate = null) + { + if (source == null) throw new ArgumentNullException(nameof(source)); + + + TResult[] result = new TResult[source.Length]; + int idx = 0; + for (int i = 0; i < source.Length; i++) + { + TResult? s = selector(source[i]); + if (predicate == null || predicate(s)) + { + result[idx] = s; + idx++; + } + } + Array.Resize(ref result, idx); + return result; + } + + + public static TResult[] SelectWhere(this ReadOnlySpan source, Func selector, Func? predicate = null) + { + if (source == null) throw new ArgumentNullException(nameof(source)); + + + TResult[] result = new TResult[source.Length]; + int idx = 0; + for (int i = 0; i < source.Length; i++) + { + TResult? s = selector(source[i], i); + if (predicate == null || predicate(s, i)) + { + result[idx] = s; + idx++; + } + } + Array.Resize(ref result, idx); + return result; + } + + /// + /// + /// + public static TResult[] SelectWhere(this ReadOnlySpan source, Func selector, Func? predicate = null) + { + if (source == null) throw new ArgumentNullException(nameof(source)); + + + TResult[] result = new TResult[source.Length]; + int idx = 0; + for (int i = 0; i < source.Length; i++) + { + TResult? s = selector(source[i]); + if (predicate == null || predicate(s)) + { + result[idx] = s; + idx++; + } + } + Array.Resize(ref result, idx); + return result; + } +} \ No newline at end of file diff --git a/src/Sa/Extensions/StrToExtensions.cs b/src/Sa/Extensions/StrToExtensions.cs new file mode 100644 index 0000000..28c9bb8 --- /dev/null +++ b/src/Sa/Extensions/StrToExtensions.cs @@ -0,0 +1,158 @@ +using System.Diagnostics; +using System.Globalization; +using System.Text; + +namespace Sa.Extensions; + +public static class StrToExtensions +{ + + [DebuggerStepThrough] + public static bool? StrToBool(this string? str) => bool.TryParse(str, out bool result) ? result : null; + + [DebuggerStepThrough] + public static bool? StrToBool(this ReadOnlySpan str) => bool.TryParse(str, out bool result) ? result : null; + + + [DebuggerStepThrough] + public static int? StrToInt(this string? str) => int.TryParse(str, out int result) ? result : null; + [DebuggerStepThrough] + public static int? StrToInt(this ReadOnlySpan str) => int.TryParse(str, out int result) ? result : null; + + + [DebuggerStepThrough] + public static short? StrToShort(this string? str) => short.TryParse(str, out short result) ? result : null; + [DebuggerStepThrough] + public static short? StrToShort(this ReadOnlySpan str) => short.TryParse(str, out short result) ? result : null; + + + [DebuggerStepThrough] + public static ushort? StrToUShort(this string? str) => ushort.TryParse(str, out ushort result) ? result : null; + + [DebuggerStepThrough] + public static ushort? StrToUShort(this ReadOnlySpan str) => ushort.TryParse(str, out ushort result) ? result : null; + + + [DebuggerStepThrough] + public static long? StrToLong(this string? str) => long.TryParse(str, out long result) ? result : null; + [DebuggerStepThrough] + public static long? StrToLong(this ReadOnlySpan str) => long.TryParse(str, out long result) ? result : null; + + + [DebuggerStepThrough] + public static ulong? StrToULong(this string? str) => ulong.TryParse(str, out ulong result) ? result : null; + + [DebuggerStepThrough] + public static ulong? StrToULong(this ReadOnlySpan str) => ulong.TryParse(str, out ulong result) ? result : null; + + + [DebuggerStepThrough] + public static byte[] StrToBytes(this string str, Encoding? encoding = null) => (encoding ?? Encoding.UTF8).GetBytes(str); + + + [DebuggerStepThrough] + public static Guid? StrToGuid(this string? str) => Guid.TryParse(str, out Guid result) ? result : null; + [DebuggerStepThrough] + public static Guid? StrToGuid(this ReadOnlySpan str) => Guid.TryParse(str, out Guid result) ? result : null; + + + [DebuggerStepThrough] + public static T StrToEnum(this string? str, T defaultValue) where T : struct => (Enum.TryParse(str, true, out T result)) ? result : defaultValue; + + + [DebuggerStepThrough] + public static DateTime? StrToDate(this string? str, IFormatProvider? provider = null, DateTimeStyles style = DateTimeStyles.None) + => DateTime.TryParseExact(str, DateFmt.Formats, provider ?? CultureInfo.InvariantCulture, style, out DateTime result) + ? result + : null; + + [DebuggerStepThrough] + public static DateTime? StrToDate(this ReadOnlySpan str, IFormatProvider? provider = null, DateTimeStyles style = DateTimeStyles.None) + => DateTime.TryParseExact(str, DateFmt.Formats, provider ?? CultureInfo.InvariantCulture, style, out DateTime result) + ? result + : null; + +} + +#region Date Fmts +static class DateFmt +{ + public static readonly string[] Formats = + [ + "yyyyMMdd", + "dd.MM.yyyy", + "dd-MM-yyyy", + "yyyy-MM-dd", + "MM/dd/yyyy HH:mm:ss", + "MM/dd/yyyy", + "dd.MM.yy", + "yyyy-MM-dd HH:mm", + "yyyy-MM-dd HH:mm:ss", + "dd.MM.yyyy HH:mm", + "dd-MM-yyyy HH:mm", + "yyyy-MM-ddTHH:mm:ss", + "dd.MM.yyyy HH:mm:ss", + "yyyy-MM-ddK", + "yyyyMMddK", + "yyyy-MM-ddTHH:mm:ss.fffffffK", + "yyyyMMddTHH:mm:ss.fffffffK", + "yyyy-MM-ddTHH:mm:ss,fffffffK", + "yyyyMMddTHH:mm:ss,fffffffK", + "yyyy-MM-ddTHH:mm:ss.ffffffK", + "yyyyMMddTHH:mm:ss.ffffffK", + "yyyy-MM-ddTHH:mm:ss,ffffffK", + "yyyyMMddTHH:mm:ss,ffffffK", + "yyyy-MM-ddTHH:mm:ss.fffffK", + "yyyyMMddTHH:mm:ss.fffffK", + "yyyy-MM-ddTHH:mm:ss,fffffK", + "yyyyMMddTHH:mm:ss,fffffK", + "yyyy-MM-ddTHH:mm:ss.ffffK", + "yyyyMMddTHH:mm:ss.ffffK", + "yyyy-MM-ddTHH:mm:ss,ffffK", + "yyyyMMddTHH:mm:ss,ffffK", + "yyyy-MM-ddTHH:mm:ss.fffK", + "yyyyMMddTHH:mm:ss.fffK", + "yyyy-MM-ddTHH:mm:ss.ffK", + "yyyyMMddTHH:mm:ss.ffK", + "yyyy-MM-ddTHH:mm:ss,ffK", + "yyyyMMddTHH:mm:ss,ffK", + "yyyy-MM-ddTHH:mm:ss.fK", + "yyyyMMddTHH:mm:ss.fK", + "yyyy-MM-ddTHH:mm:ss,fK", + "yyyyMMddTHH:mm:ss,fK", + "yyyy-MM-ddTHH:mm:ssK", + "yyyyMMddTHH:mm:ssK", + "yyyy-MM-ddTHHmmss.fffffffK", + "yyyyMMddTHHmmss.fffffffK", + "yyyy-MM-ddTHHmmss,fffffffK", + "yyyyMMddTHHmmss,fffffffK", + "yyyy-MM-ddTHHmmss.ffffffK", + "yyyyMMddTHHmmss.ffffffK", + "yyyy-MM-ddTHHmmss,ffffffK", + "yyyyMMddTHHmmss,ffffffK", + "yyyy-MM-ddTHHmmss.fffffK", + "yyyyMMddTHHmmss.fffffK", + "yyyy-MM-ddTHHmmss,fffffK", + "yyyyMMddTHHmmss,fffffK", + "yyyy-MM-ddTHHmmss.ffffK", + "yyyyMMddTHHmmss.ffffK", + "yyyy-MM-ddTHHmmss,ffffK", + "yyyyMMddTHHmmss,ffffK", + "yyyy-MM-ddTHHmmss.ffK", + "yyyyMMddTHHmmss.ffK", + "yyyy-MM-ddTHHmmss,ffK", + "yyyyMMddTHHmmss,ffK", + "yyyy-MM-ddTHHmmss.fK", + "yyyyMMddTHHmmss.fK", + "yyyy-MM-ddTHHmmss,fK", + "yyyyMMddTHHmmss,fK", + "yyyy-MM-ddTHHmmssK", + "yyyyMMddTHHmmssK", + "yyyy-MM-ddTHH:mmK", + "yyyyMMddTHH:mmK", + "yyyy-MM-ddTHHK", + "yyyyMMddTHHK", + "o" + ]; +} +#endregion \ No newline at end of file diff --git a/src/Sa/Extensions/StringExtensions.cs b/src/Sa/Extensions/StringExtensions.cs new file mode 100644 index 0000000..040dc4d --- /dev/null +++ b/src/Sa/Extensions/StringExtensions.cs @@ -0,0 +1,14 @@ +using Sa.Classes; +using System.Diagnostics; + +namespace Sa.Extensions; + +public static class StringExtensions +{ + [DebuggerStepThrough] + public static string? NullIfEmpty(this string? str) => string.IsNullOrWhiteSpace(str) ? default : str; + + + [DebuggerStepThrough] + public static uint GetMurmurHash3(this string str, uint seed = 0) => MurmurHash3.Hash32(str.StrToBytes(), seed); +} diff --git a/src/Sa/GlobalSuppressions.cs b/src/Sa/GlobalSuppressions.cs new file mode 100644 index 0000000..36a73cd --- /dev/null +++ b/src/Sa/GlobalSuppressions.cs @@ -0,0 +1,8 @@ +// This file is used by Code Analysis to maintain SuppressMessage +// attributes that are applied to this project. +// Project-level suppressions either have no target or are given +// a specific target and scoped to a namespace, type, member, etc. + +using System.Diagnostics.CodeAnalysis; + +[assembly: SuppressMessage("Minor Code Smell", "S3267:Loops should be simplified with \"LINQ\" expressions", Justification = "")] diff --git a/src/Sa/Host/InstanceIdProvider.cs b/src/Sa/Host/InstanceIdProvider.cs new file mode 100644 index 0000000..614e2be --- /dev/null +++ b/src/Sa/Host/InstanceIdProvider.cs @@ -0,0 +1,16 @@ +namespace Sa.Host; + + +public interface IInstanceIdProvider +{ + string GetInstanceId(); +} + + + +public class DefaultInstanceIdProvider : IInstanceIdProvider +{ + private readonly string _instanceId = $"{Environment.MachineName}-{Guid.NewGuid():N}"; + + public string GetInstanceId() => _instanceId; +} diff --git a/src/Sa/Host/MessageTypeResolver/AssemblyQualifiedNameMessageTypeResolver.cs b/src/Sa/Host/MessageTypeResolver/AssemblyQualifiedNameMessageTypeResolver.cs new file mode 100644 index 0000000..73ceb3d --- /dev/null +++ b/src/Sa/Host/MessageTypeResolver/AssemblyQualifiedNameMessageTypeResolver.cs @@ -0,0 +1,71 @@ +namespace Sa.Host.MessageTypeResolver; + +using System.Collections.Concurrent; +using System.Text.RegularExpressions; + +/// +/// that uses the for the message type string passed in the message header. +/// +public class AssemblyQualifiedNameMessageTypeResolver : IMessageTypeResolver +{ + private static readonly Regex s_RedundantAssemblyTokens = new(@"\, (Version|Culture|PublicKeyToken)\=([\w\d.]+)", RegexOptions.None, TimeSpan.FromSeconds(2)); + + /// + /// Determines whether to emit the Version, Culture and PublicKeyToken along with the Assembly name (for strong assembly naming). + /// + public bool EmitAssemblyStrongName { get; set; } = false; + + private readonly ConcurrentDictionary _toNameCache = []; + private readonly ConcurrentDictionary _toTypeCache = []; + + + + private string ToNameInternal(Type messageType) + { + string assemblyQualifiedName = messageType?.AssemblyQualifiedName ?? throw new ArgumentNullException(nameof(messageType)); + + if (EmitAssemblyStrongName) + { + return assemblyQualifiedName; + } + + var reducedName = s_RedundantAssemblyTokens.Replace(assemblyQualifiedName, string.Empty); + + return reducedName; + } + + private static Type ToTypeInternal(string name) + => Type.GetType(name ?? throw new ArgumentNullException(nameof(name))) ?? throw new ArgumentException(null, nameof(name)); + + public string ToName(Type messageType) + { + if (!_toNameCache.TryGetValue(messageType, out _)) + { + string typeName = ToNameInternal(messageType); + + if (_toNameCache.TryAdd(messageType, typeName)) + { + _toTypeCache.TryAdd(typeName, messageType); + } + } + + return _toNameCache.GetValueOrDefault(messageType) ?? throw new ArgumentException(null, nameof(messageType)); + } + + public Type ToType(string name) + { + if (!_toTypeCache.TryGetValue(name, out _)) + { + Type? messageType = ToTypeInternal(name); + + if (_toTypeCache.TryAdd(name, messageType)) + { + _toNameCache.TryAdd(messageType, name); + } + + return messageType; + } + + return _toTypeCache.GetValueOrDefault(name) ?? throw new ArgumentException(null, nameof(name)); + } +} \ No newline at end of file diff --git a/src/Sa/Host/MessageTypeResolver/IMessageTypeResolver.cs b/src/Sa/Host/MessageTypeResolver/IMessageTypeResolver.cs new file mode 100644 index 0000000..0d2c891 --- /dev/null +++ b/src/Sa/Host/MessageTypeResolver/IMessageTypeResolver.cs @@ -0,0 +1,8 @@ +namespace Sa.Host.MessageTypeResolver; + +public interface IMessageTypeResolver +{ + Type? ToType(string name); + string ToName(Type messageType); + string ToName() => ToName(typeof(T)); +} diff --git a/src/Sa/Infrastructure/IHasId.cs b/src/Sa/Infrastructure/IHasId.cs new file mode 100644 index 0000000..bb8c848 --- /dev/null +++ b/src/Sa/Infrastructure/IHasId.cs @@ -0,0 +1,12 @@ +namespace Sa.Infrastructure; + + +public interface IHasId +{ + object Id { get; } +} + +public interface IHasId +{ + T Id { get; } +} diff --git a/src/Sa/Sa.csproj b/src/Sa/Sa.csproj new file mode 100644 index 0000000..c508489 --- /dev/null +++ b/src/Sa/Sa.csproj @@ -0,0 +1,15 @@ + + + + net8.0 + enable + enable + + + + + + + + + diff --git a/src/Sa/Serialization/Converter/ObjectToInferredTypesConverter.cs b/src/Sa/Serialization/Converter/ObjectToInferredTypesConverter.cs new file mode 100644 index 0000000..1a449e0 --- /dev/null +++ b/src/Sa/Serialization/Converter/ObjectToInferredTypesConverter.cs @@ -0,0 +1,42 @@ +namespace Sa.Serialization.Converter; + +using System.Text.Json; +using System.Text.Json.Serialization; + +/// +/// Converter that infers object to primitive types. See +/// +/// +public class ObjectToInferredTypesConverter : JsonConverter +{ + public override object? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + => ReadValue(ref reader, options); + + + private static object? ReadValue(ref Utf8JsonReader reader, JsonSerializerOptions options) => + reader.TokenType switch + { + JsonTokenType.Null => null, + JsonTokenType.True => true, + JsonTokenType.False => false, + JsonTokenType.Number when reader.TryGetInt64(out var l) => l, + JsonTokenType.Number => reader.GetDouble(), + JsonTokenType.String when reader.TryGetDateTime(out var datetime) => datetime, + JsonTokenType.String => reader.GetString()!, + JsonTokenType.StartArray => ParseList(ref reader, options), + _ => JsonDocument.ParseValue(ref reader).RootElement.Clone() + }; + + private static List ParseList(ref Utf8JsonReader reader, JsonSerializerOptions options) + { + List list = []; + while (reader.Read() && reader.TokenType != JsonTokenType.EndArray) + { + list.Add(ReadValue(ref reader, options)); + } + return list; + } + + public override void Write(Utf8JsonWriter writer, object objectToWrite, JsonSerializerOptions options) => + JsonSerializer.Serialize(writer, objectToWrite, objectToWrite.GetType(), options); +} \ No newline at end of file diff --git a/src/Sa/Setup.cs b/src/Sa/Setup.cs new file mode 100644 index 0000000..53b4eaf --- /dev/null +++ b/src/Sa/Setup.cs @@ -0,0 +1,28 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.ObjectPool; +using Microsoft.IO; +using Sa.Classes; +using Sa.Host; +using Sa.Host.MessageTypeResolver; +using Sa.Timing.Providers; + +namespace Sa; + +public static class Setup +{ + public static IServiceCollection AddSaInfrastructure(this IServiceCollection services) + { + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + + services.TryAddSingleton(); + + + services.TryAddSingleton(); + services.TryAddSingleton(typeof(IArrayPooler<>), typeof(ArrayPooler<>)); + return services; + } +} diff --git a/src/Sa/Timing/Providers/CurrentTimeProvider.cs b/src/Sa/Timing/Providers/CurrentTimeProvider.cs new file mode 100644 index 0000000..1245d90 --- /dev/null +++ b/src/Sa/Timing/Providers/CurrentTimeProvider.cs @@ -0,0 +1,11 @@ +using System.Diagnostics; + +namespace Sa.Timing.Providers; + +public class CurrentTimeProvider : ICurrentTimeProvider +{ + [DebuggerStepThrough] + public DateTimeOffset GetUtcNow() => DateTimeOffset.UtcNow; + + public override string ToString() => $"current time: {GetUtcNow()}"; +} diff --git a/src/Sa/Timing/Providers/ICurrentTimeProvider.cs b/src/Sa/Timing/Providers/ICurrentTimeProvider.cs new file mode 100644 index 0000000..f9472f3 --- /dev/null +++ b/src/Sa/Timing/Providers/ICurrentTimeProvider.cs @@ -0,0 +1,6 @@ +namespace Sa.Timing.Providers; + +public interface ICurrentTimeProvider +{ + DateTimeOffset GetUtcNow(); +} diff --git a/src/Samples/Schedule.Console/GlobalSuppressions.cs b/src/Samples/Schedule.Console/GlobalSuppressions.cs new file mode 100644 index 0000000..c47cb56 --- /dev/null +++ b/src/Samples/Schedule.Console/GlobalSuppressions.cs @@ -0,0 +1,9 @@ +// This file is used by Code Analysis to maintain SuppressMessage +// attributes that are applied to this project. +// Project-level suppressions either have no target or are given +// a specific target and scoped to a namespace, type, member, etc. + +using System.Diagnostics.CodeAnalysis; + +[assembly: SuppressMessage("Major Code Smell", "S1118:Utility classes should not have public constructors", Justification = "")] +[assembly: SuppressMessage("Major Bug", "S3903:Types should be defined in named namespaces", Justification = "")] diff --git a/src/Samples/Schedule.Console/Program.cs b/src/Samples/Schedule.Console/Program.cs new file mode 100644 index 0000000..555069d --- /dev/null +++ b/src/Samples/Schedule.Console/Program.cs @@ -0,0 +1,109 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Sa.Schedule; +using Sa.Timing.Providers; +using Schedule.Console; + +internal class Program +{ + static async Task Main() + { + Console.Write("As host service (Y/n): "); + + bool isHostService = Console.ReadKey().Key == ConsoleKey.Y; + + Console.WriteLine(); + + // default configure... + IHostBuilder builder = Host.CreateDefaultBuilder(); + + builder.ConfigureServices(services => + { + services.AddSchedule(builder => + { + if (isHostService) builder.UseHostedService(); + + builder.AddJob() + .WithContextStackSize(3) + .EverySeconds(2) + .WithName("Some 2") + .ConfigureErrorHandling(c => c.IfErrorRetry(2).ThenStopJob()) + ; + + builder.AddInterceptor(); + }); + + services.AddLogging(builder => builder.AddConsole()); + }); + + builder.UseConsoleLifetime(); + + var host = builder.Build(); + + var controller = host.Services.GetRequiredService(); + + + if (isHostService) + { + _ = host.RunAsync(); + } + else + { + var cts = new CancellationTokenSource(); + controller.Start(cts.Token); + + _ = Task.Run(async () => + { + await Task.Delay(30000); + await cts.CancelAsync(); + cts.Dispose(); + Console.WriteLine($"cancelled on timeout"); + }); + } + + _ = Task.Run(async () => + { + await Task.Delay(5000); + await controller.Stop(); + Console.WriteLine($"*** stopped & restart after 2 sec"); + await Task.Delay(2000); + controller.Restart(); + }); + + + IHostApplicationLifetime applicationLifetime = host.Services.GetRequiredService(); + await host.WaitForShutdownAsync(applicationLifetime.ApplicationStopping); + + Console.WriteLine("*** THE END ***"); + } +} + + +namespace Schedule.Console +{ + public class SomeJob(ICurrentTimeProvider currentTime) : IJob + { + public async Task Execute(IJobContext context, CancellationToken cancellationToken) + { + await Task.Delay(100, cancellationToken); + if (context.NumIterations >= 4 && context.NumIterations < 6) + { + System.Console.WriteLine($"err {context.FailedIterations}"); + throw new ArgumentException("test"); + } + + System.Console.WriteLine($"{currentTime.GetUtcNow()} {context.NumIterations}: {context.JobName}"); + } + } + + public class SomeInterceptor : IJobInterceptor + { + public async Task OnHandle(IJobContext context, Func next, object? key, CancellationToken cancellationToken) + { + System.Console.WriteLine($"<"); + await next(); + System.Console.WriteLine($">"); + } + } +} \ No newline at end of file diff --git a/src/Samples/Schedule.Console/Properties/launchSettings.json b/src/Samples/Schedule.Console/Properties/launchSettings.json new file mode 100644 index 0000000..a6fa13f --- /dev/null +++ b/src/Samples/Schedule.Console/Properties/launchSettings.json @@ -0,0 +1,14 @@ +{ + "profiles": { + "Skit.Console": { + "commandName": "Project" + }, + "Container (Dockerfile)": { + "commandName": "Docker" + }, + "WSL": { + "commandName": "WSL2", + "distributionName": "" + } + } +} \ No newline at end of file diff --git a/src/Samples/Schedule.Console/Schedule.Console.csproj b/src/Samples/Schedule.Console/Schedule.Console.csproj new file mode 100644 index 0000000..8e1b33e --- /dev/null +++ b/src/Samples/Schedule.Console/Schedule.Console.csproj @@ -0,0 +1,28 @@ + + + + Exe + net8.0 + enable + enable + true + true + Linux + true + + + + + + + + + + + + + PreserveNewest + + + + diff --git a/src/Samples/Schedule.Console/appsettings.json b/src/Samples/Schedule.Console/appsettings.json new file mode 100644 index 0000000..45fe774 --- /dev/null +++ b/src/Samples/Schedule.Console/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft": "Warning", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} \ No newline at end of file diff --git a/src/Samples/StateMachine.Console/LongProcess.cs b/src/Samples/StateMachine.Console/LongProcess.cs new file mode 100644 index 0000000..7c8684d --- /dev/null +++ b/src/Samples/StateMachine.Console/LongProcess.cs @@ -0,0 +1,22 @@ +using Sa.StateMachine; + +namespace StateMachine.Console; + +class LongProcess : SmLongProcess +{ + protected override ISmProcessor CreateProcessor() + { + return new MyProcessor(); + } + + class MyProcessor : Processor + { + public async override ValueTask MoveNext(ISmContext context) + { + // some work + await Task.Delay(1000, context.CancellationToken); + System.Console.WriteLine($"process #{context}"); + return await base.MoveNext(context); + } + } +} diff --git a/src/Samples/StateMachine.Console/Program.cs b/src/Samples/StateMachine.Console/Program.cs new file mode 100644 index 0000000..66ca2df --- /dev/null +++ b/src/Samples/StateMachine.Console/Program.cs @@ -0,0 +1,33 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using StateMachine.Console; + + +Console.WriteLine("Hello, World!"); + +LongProcess lp = new(); + +var i = 0; +await foreach (var c in lp) +{ + Console.WriteLine($"{i++}. {c.CurrentState}"); +} + +// default configure... +IHostBuilder builder = Host.CreateDefaultBuilder(); + +builder.ConfigureServices(services => +{ + services.AddLogging(builder => builder.AddConsole()); +}); + +builder.UseConsoleLifetime(); + +var host = builder.Build(); + + +IHostApplicationLifetime applicationLifetime = host.Services.GetRequiredService(); +await host.WaitForShutdownAsync(applicationLifetime.ApplicationStopping); + +Console.WriteLine("*** THE END ***"); diff --git a/src/Samples/StateMachine.Console/StateMachine.Console.csproj b/src/Samples/StateMachine.Console/StateMachine.Console.csproj new file mode 100644 index 0000000..8703ff5 --- /dev/null +++ b/src/Samples/StateMachine.Console/StateMachine.Console.csproj @@ -0,0 +1,18 @@ + + + + Exe + net8.0 + enable + enable + + + + + + + + + + + diff --git a/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PgDataSourceCollection.cs b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PgDataSourceCollection.cs new file mode 100644 index 0000000..f7fef5a --- /dev/null +++ b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PgDataSourceCollection.cs @@ -0,0 +1,7 @@ +namespace Sa.Data.PostgreSql.Fixture; + +/// +/// +/// +[CollectionDefinition(nameof(PgDataSourceFixture))] +public class PgDataSourceCollection : ICollectionFixture; diff --git a/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PgDataSourceFixture.cs b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PgDataSourceFixture.cs new file mode 100644 index 0000000..43680a8 --- /dev/null +++ b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PgDataSourceFixture.cs @@ -0,0 +1,38 @@ +namespace Sa.Data.PostgreSql.Fixture; + + +public class PgDataSourceFixture : PostgreSqlFixture + where TSub : notnull +{ + private readonly Lazy _dataSource; + + protected PgDataSourceFixture(PostgreSqlFixtureSettings? settings) + : base(settings ?? PostgreSqlFixtureSettings.Instance) + => _dataSource = new(() + => IPgDataSource.Create(this.ConnectionString)); + + public PgDataSourceFixture() : this(null) { } + + public IPgDataSource DataSource => _dataSource.Value; + + public async override Task DisposeAsync() + { + if (_dataSource.IsValueCreated && _dataSource.Value is IAsyncDisposable disposable) + { + await disposable.DisposeAsync(); + } + + await base.DisposeAsync(); + } + + public async Task CheckTable(string tablename) + { + await DataSource.ExecuteNonQuery($"SELECT '{tablename}'::regclass;"); + } +} + + +public class PgDataSourceFixture : PgDataSourceFixture +{ + +} \ No newline at end of file diff --git a/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PostgreSqlFixture.cs b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PostgreSqlFixture.cs new file mode 100644 index 0000000..3c3941e --- /dev/null +++ b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PostgreSqlFixture.cs @@ -0,0 +1,42 @@ +using Sa.Fixture; +using Testcontainers.PostgreSql; + +namespace Sa.Data.PostgreSql.Fixture; + + +/// +/// +/// +public abstract class PostgreSqlFixture : SaFixture + where TSettings : PostgreSqlFixtureSettings + where TSub : notnull +{ + private readonly PostgreSqlContainer container; + + protected PostgreSqlFixture(TSettings settings) + : base(settings) + { + var builder = new PostgreSqlBuilder() + .WithImage(settings.DockerImage) + ; + + settings.Configure?.Invoke(builder); + container = builder.Build(); + } + + public virtual string ConnectionString => container.GetConnectionString(); + + public string ContainerId => $"{container.Id}"; + + public override async Task InitializeAsync() + { + await base.InitializeAsync(); + await container.StartAsync(); + } + + public override async Task DisposeAsync() + { + await container.DisposeAsync(); + await base.DisposeAsync(); + } +} diff --git a/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PostgreSqlFixtureSettings.cs b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PostgreSqlFixtureSettings.cs new file mode 100644 index 0000000..8098ba9 --- /dev/null +++ b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/PostgreSqlFixtureSettings.cs @@ -0,0 +1,11 @@ +using Testcontainers.PostgreSql; + +namespace Sa.Data.PostgreSql.Fixture; + +public record PostgreSqlFixtureSettings +{ + public string DockerImage { get; set; } = "postgres:15-alpine"; + public Action? Configure { get; set; } + + public readonly static PostgreSqlFixtureSettings Instance = new(); +} diff --git a/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/Sa.Data.PostgreSql.Fixture.csproj b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/Sa.Data.PostgreSql.Fixture.csproj new file mode 100644 index 0000000..ffb4708 --- /dev/null +++ b/src/Tests/Fixtures/Sa.Data.PostgreSql.Fixture/Sa.Data.PostgreSql.Fixture.csproj @@ -0,0 +1,16 @@ + + + + + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + diff --git a/src/Tests/Fixtures/Sa.Fixture/Sa.Fixture.csproj b/src/Tests/Fixtures/Sa.Fixture/Sa.Fixture.csproj new file mode 100644 index 0000000..0e15e2f --- /dev/null +++ b/src/Tests/Fixtures/Sa.Fixture/Sa.Fixture.csproj @@ -0,0 +1,3 @@ + + + diff --git a/src/Tests/Fixtures/Sa.Fixture/SaFixture.cs b/src/Tests/Fixtures/Sa.Fixture/SaFixture.cs new file mode 100644 index 0000000..015c5a0 --- /dev/null +++ b/src/Tests/Fixtures/Sa.Fixture/SaFixture.cs @@ -0,0 +1,63 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; + +namespace Sa.Fixture; + + +public abstract class SaFixture : IAsyncLifetime +{ + private readonly Lazy _serviceProvider; + + protected IServiceCollection Services { get; } + = new ServiceCollection() + .AddSingleton(typeof(ILogger<>), typeof(NullLogger<>)) + .AddSingleton(); + + protected IConfigurationRoot Configuration { get; } + = new ConfigurationBuilder().AddJsonFile("appsettings.json", true).Build(); + + public Action? SetupServices { get; set; } + + protected SaFixture(TSettings settings) + { + Settings = settings; + _serviceProvider = new Lazy(() => + { + SetupServices?.Invoke(Services, Configuration); + return Services.BuildServiceProvider(); + }); + } + + public TSettings Settings { get; } + + public IServiceProvider ServiceProvider => _serviceProvider.Value; + + public virtual Task InitializeAsync() => Task.CompletedTask; + + public async virtual Task DisposeAsync() + { + if (_serviceProvider.IsValueCreated) await _serviceProvider.Value.DisposeAsync(); + } +} + + +public abstract class SaFixture : SaFixture + where TSub : notnull +{ + private readonly Lazy sub; + + protected SaFixture(TSettings settings) : base(settings) + { + sub = new Lazy(() => ServiceProvider.GetRequiredService()); + } + + public TSub Sub => sub.Value; +} + + +public abstract class SaSubFixture() : SaFixture(new()) + where TSub : notnull +{ +} \ No newline at end of file diff --git a/src/Tests/Host.Test.Properties.xml b/src/Tests/Host.Test.Properties.xml new file mode 100644 index 0000000..1533971 --- /dev/null +++ b/src/Tests/Host.Test.Properties.xml @@ -0,0 +1,34 @@ + + + + + + + net8.0 + enable + enable + + false + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + diff --git a/src/Tests/Sa.Data.PostgreSqlTests/PgDataSourceTests.cs b/src/Tests/Sa.Data.PostgreSqlTests/PgDataSourceTests.cs new file mode 100644 index 0000000..719f009 --- /dev/null +++ b/src/Tests/Sa.Data.PostgreSqlTests/PgDataSourceTests.cs @@ -0,0 +1,68 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Data.PostgreSql; +using Sa.Data.PostgreSql.Fixture; + +namespace Sa.Data.PostgreSqlTests; + +public class PgDataSourceTests(PgDataSourceFixture fixture) : IClassFixture +{ + [Fact()] + public async Task ExecuteNonQueryTest() + { + const int expected = -1; + var actual = await fixture.DataSource.ExecuteNonQuery("SELECT 2"); + Assert.Equal(expected, actual); + } + + [Fact()] + public async Task ExecuteNonQueryWithParamsTest() + { + const int expected = 1; + var actual = await fixture.DataSource.ExecuteNonQuery(""" + CREATE TABLE IF NOT EXISTS users ( + name text, + age int + ); + + INSERT INTO users (name, age) VALUES (@p1, @p2); + """, + [ + new("p1", "Tom"), + new("p2", 18) + ]) + ; + Assert.Equal(expected, actual); + } + + + [Fact()] + public async Task ExecuteReaderTest() + { + Console.WriteLine(fixture.ConnectionString); + + const int expected = 1; + int actual = 0; + + await fixture.DataSource.ExecuteReader("SELECT 1", (reader, i) => actual = reader.GetInt32(0)); + + Assert.Equal(expected, actual); + } + + + [Fact()] + public async Task DiDataSourceTest() + { + ServiceCollection services = new(); + services.AddPgDataSource(b => b.WithConnectionString(fixture.ConnectionString)); + using var serviceProvider = services.BuildServiceProvider(); + + IPgDataSource dataSource = serviceProvider.GetRequiredService(); + + const int expected = 1; + int actual = 0; + + await dataSource.ExecuteReader("SELECT 1", (reader, i) => actual = reader.GetInt32(0)); + + Assert.Equal(expected, actual); + } +} diff --git a/src/Tests/Sa.Data.PostgreSqlTests/Sa.Data.PostgreSqlTests.csproj b/src/Tests/Sa.Data.PostgreSqlTests/Sa.Data.PostgreSqlTests.csproj new file mode 100644 index 0000000..2988071 --- /dev/null +++ b/src/Tests/Sa.Data.PostgreSqlTests/Sa.Data.PostgreSqlTests.csproj @@ -0,0 +1,14 @@ + + + + + + true + + + + + + + + diff --git a/src/Tests/Sa.MediaTests/Data/m1.wav b/src/Tests/Sa.MediaTests/Data/m1.wav new file mode 100644 index 0000000..453a854 Binary files /dev/null and b/src/Tests/Sa.MediaTests/Data/m1.wav differ diff --git a/src/Tests/Sa.MediaTests/Sa.MediaTests.csproj b/src/Tests/Sa.MediaTests/Sa.MediaTests.csproj new file mode 100644 index 0000000..3a254e2 --- /dev/null +++ b/src/Tests/Sa.MediaTests/Sa.MediaTests.csproj @@ -0,0 +1,21 @@ + + + + + + true + + + + + + + + + + + PreserveNewest + + + + diff --git a/src/Tests/Sa.MediaTests/WavFileTests.cs b/src/Tests/Sa.MediaTests/WavFileTests.cs new file mode 100644 index 0000000..9e68e54 --- /dev/null +++ b/src/Tests/Sa.MediaTests/WavFileTests.cs @@ -0,0 +1,34 @@ +using Sa.Media.Wav; + +namespace Sa.MediaTests; + +public class WavFileTests +{ + [Theory] + [InlineData("Data/m1.wav")] + public void ReadHeaderTest(string filename) + { + using WavFile wavFile = new WavFile() + .WithFileName(filename) + .ReadHeader(); + + Assert.NotNull(wavFile); + Assert.True(wavFile.IsPcmWave); + } + + + [Theory] + [InlineData("Data/m1.wav")] + public void WriteChannelTest(string filename) + { + using WavFile wavFile = new WavFile().WithFileName(filename); + + var file = Path.Combine(Path.GetDirectoryName(filename)!, Path.GetFileNameWithoutExtension(filename) + "_02.wav"); + + long len = wavFile.WriteChannel(file, 1); + + Assert.True(len > 0); + + File.Delete(file); + } +} \ No newline at end of file diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryLongProcessorTests.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryLongProcessorTests.cs new file mode 100644 index 0000000..1bdef35 --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryLongProcessorTests.cs @@ -0,0 +1,68 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Outbox.Delivery; + +namespace Sa.Outbox.PostgreSqlTests.Delivery; + +public class DeliveryLongProcessorTests(DeliveryLongProcessorTests.Fixture fixture) : IClassFixture +{ + public class TestMessageConsumer : IConsumer + { + public async ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken) + { + Console.WriteLine(outboxMessages.Count); + await Task.Delay(1000, cancellationToken); + } + } + + + public class Fixture : OutboxPostgreSqlFixture + { + public Fixture() : base() + { + Services.AddOutbox(builder + => builder + .WithPartitioningSupport((_, sp) + => sp.GetTenantIds = t => Task.FromResult([1, 2]) + ) + .WithDeliveries(builder + => builder.AddDelivery() + ) + ) + ; + } + + public IOutboxMessagePublisher Publisher => ServiceProvider.GetRequiredService(); + } + + + private IDeliveryProcessor Sub => fixture.Sub; + + + [Fact] + public async Task Deliver_LongProcess_MustBe_Work() + { + Console.Write(fixture.ConnectionString); + + List messages = + [ + new TestMessage { PayloadId = "11", Content = "Message 1", TenantId = 1}, + new TestMessage { PayloadId = "12", Content = "Message 2", TenantId = 2} + ]; + + var cnt = await fixture.Publisher.Publish(messages); + Assert.True(cnt > 0); + + var settings = new OutboxDeliverySettings(Guid.NewGuid()) + { + ExtractSettings = + { + LockDuration = TimeSpan.FromMilliseconds(300), + LockRenewal = TimeSpan.FromMilliseconds(100), + ForEachTenant = true, + } + }; + + var result = await Sub.ProcessMessages(settings, CancellationToken.None); + Assert.True(result > 0); + } +} diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryPermanentErrorTests.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryPermanentErrorTests.cs new file mode 100644 index 0000000..3d93407 --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryPermanentErrorTests.cs @@ -0,0 +1,82 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Outbox.Delivery; + +namespace Sa.Outbox.PostgreSqlTests.Delivery; + +public class DeliveryPermanentErrorTests(DeliveryPermanentErrorTests.Fixture fixture) : IClassFixture +{ + + public class TestException(string message) : Exception(message) + { + } + + + public class TestMessageConsumer : IConsumer + { + private static readonly TestException s_err = new("test permanent error"); + + public async ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken) + { + await Task.Delay(100, cancellationToken); + foreach (IOutboxContext msg in outboxMessages) + { + msg.PermanentError(s_err, "test"); + } + } + } + + + public class Fixture : OutboxPostgreSqlFixture + { + public Fixture() : base() + { + Services + .AddOutbox(builder + => builder.WithPartitioningSupport((_, sp) + => sp.GetTenantIds = t => Task.FromResult([1, 2]) + ) + .WithDeliveries(builder + => builder.AddDelivery() + ) + ); + } + + public IOutboxMessagePublisher Publisher => ServiceProvider.GetRequiredService(); + } + + + private IDeliveryProcessor Sub => fixture.Sub; + + + [Fact] + public async Task Deliver_ErrorProcess_MustBe_Logged() + { + Console.Write(fixture.ConnectionString); + + List messages = + [ + new TestMessage { PayloadId = "11", Content = "Message 1", TenantId = 1}, + new TestMessage { PayloadId = "12", Content = "Message 2", TenantId = 2} + ]; + + var cnt = await fixture.Publisher.Publish(messages); + Assert.True(cnt > 0); + + var settings = new OutboxDeliverySettings(Guid.NewGuid()) + { + ExtractSettings = + { + ForEachTenant = true, + } + }; + + var result = await Sub.ProcessMessages(settings, CancellationToken.None); + + Assert.Equal(0, result); + + + int errCount = await fixture.DataSource.ExecuteReaderFirst("select count(error_id) from outbox__$error"); + + Assert.Equal(1, errCount); + } +} diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryProcessorTests.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryProcessorTests.cs new file mode 100644 index 0000000..2a33338 --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryProcessorTests.cs @@ -0,0 +1,65 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Outbox.Delivery; + +namespace Sa.Outbox.PostgreSqlTests.Delivery; + +public class DeliveryProcessorTests(DeliveryProcessorTests.Fixture fixture) : IClassFixture +{ + public class TestMessageConsumer : IConsumer + { + public async ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken) + { + Console.WriteLine(outboxMessages.Count); + await Task.Delay(100, cancellationToken); + } + } + + + public class Fixture : OutboxPostgreSqlFixture + { + public Fixture() : base() + { + Services + .AddOutbox(builder + => builder.WithPartitioningSupport((_, ps) + => ps.GetTenantIds = t => Task.FromResult([1, 2]) + ) + .WithDeliveries(builder + => builder.AddDelivery(null, instanceCount: 0) + ) + ); + } + + public IOutboxMessagePublisher Publisher => ServiceProvider.GetRequiredService(); + } + + + private IDeliveryProcessor Sub => fixture.Sub; + + + [Fact] + public async Task Deliver_Process_MustBe_Work() + { + Console.Write(fixture.ConnectionString); + + List messages = + [ + new TestMessage { PayloadId = "11", Content = "Message 1", TenantId = 1}, + new TestMessage { PayloadId = "12", Content = "Message 2", TenantId = 2} + ]; + + var cnt = await fixture.Publisher.Publish(messages); + Assert.True(cnt > 0); + + var settings = new OutboxDeliverySettings(Guid.NewGuid()) + { + ExtractSettings = + { + ForEachTenant = true, + } + }; + + var result = await Sub.ProcessMessages(settings, CancellationToken.None); + Assert.True(result > 0); + } +} diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryRetryErrorTests.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryRetryErrorTests.cs new file mode 100644 index 0000000..37bb6f6 --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/Delivery/DeliveryRetryErrorTests.cs @@ -0,0 +1,101 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Outbox.Delivery; + +namespace Sa.Outbox.PostgreSqlTests.Delivery; + +public class DeliveryRetryErrorTests(DeliveryRetryErrorTests.Fixture fixture) : IClassFixture +{ + + public class TestException(string message) : Exception(message) + { + } + + + public class TestMessageConsumer : IConsumer + { + private static readonly TestException s_err = new("test same error"); + + public async ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken) + { + await Task.Delay(100, cancellationToken); + foreach (IOutboxContext msg in outboxMessages) + { + msg.Error(s_err, "test"); + } + } + } + + + public class Fixture : OutboxPostgreSqlFixture + { + public Fixture() : base() + { + Services + .AddOutbox(builder => builder + .WithPartitioningSupport((_, sp) + => sp.GetTenantIds = t => Task.FromResult([1, 2]) + ) + .WithDeliveries(builder + => builder.AddDelivery() + ) + ); + } + + public IOutboxMessagePublisher Publisher => ServiceProvider.GetRequiredService(); + } + + + private IDeliveryProcessor Sub => fixture.Sub; + + + [Fact] + public async Task Deliver_RetriesOnErrorProcess_MustBe_Logged_501() + { + Console.Write(fixture.ConnectionString); + + List messages = [new TestMessage { PayloadId = "11", Content = "Message 1", TenantId = 1 }]; + + ulong cnt = await fixture.Publisher.Publish(messages); + Assert.True(cnt > 0); + + const int MaxDeliveryAttempts = 2; + + var settings = new OutboxDeliverySettings(Guid.NewGuid()) + { + ExtractSettings = + { + ForEachTenant = true, + LockDuration = TimeSpan.FromMilliseconds(0), + LockRenewal = TimeSpan.FromMinutes(10) + }, + ConsumeSettings = + { + MaxDeliveryAttempts = MaxDeliveryAttempts + } + }; + + cnt = 0; + while (cnt < MaxDeliveryAttempts) + { + await Task.Delay(300); + + await Sub.ProcessMessages(settings, CancellationToken.None); + int attempt = await GetDeliveries(); + if (attempt > MaxDeliveryAttempts) + { + cnt++; + } + } + + int errCount = await fixture.DataSource.ExecuteReaderFirst("select count(error_id) from outbox__$error"); + Assert.Equal(1, errCount); + + string delivery_id = await fixture.DataSource.ExecuteReaderFirst("select delivery_id from outbox__$delivery where delivery_status_code = 501"); + Assert.NotEmpty(delivery_id); + + string outbox_delivery_id = await fixture.DataSource.ExecuteReaderFirst("SELECT outbox_delivery_id FROM public.outbox WHERE outbox_delivery_status_code = 501"); + Assert.Equal(delivery_id, outbox_delivery_id); + } + + private Task GetDeliveries() => fixture.DataSource.ExecuteReaderFirst("select count(delivery_id) from outbox__$delivery"); +} diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/OutboxParallelMessagingTests.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/OutboxParallelMessagingTests.cs new file mode 100644 index 0000000..e638ac1 --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/OutboxParallelMessagingTests.cs @@ -0,0 +1,178 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Data.PostgreSql.Fixture; +using Sa.Outbox.PostgreSql; +using Sa.Outbox.Support; +using Sa.Partitional.PostgreSql; +using Sa.Schedule; + +namespace Sa.Outbox.PostgreSqlTests; + +public class OutboxParallelMessagingTests(OutboxParallelMessagingTests.Fixture fixture) : IClassFixture +{ + static class GenMessageRange + { + + public const int Threads = 5; + const int From = 100; + const int To = 1000; + + public static int GetMessageCount() => Random.Shared.Next(From, To); + } + + + [OutboxMessage(part: "multi_1")] + public class SomeMessage1 : IOutboxPayloadMessage + { + public string PayloadId { get; set; } = Guid.NewGuid().ToString(); + public int TenantId { get; set; } = Random.Shared.Next(1, 2); + } + + [OutboxMessage(part: "multi_2")] + public class SomeMessage2 : IOutboxPayloadMessage + { + public string PayloadId { get; set; } = Guid.NewGuid().ToString(); + public int TenantId { get; set; } = Random.Shared.Next(1, 2); + } + + + public static class CommonCounter + { + internal static int Counter = 0; + + public static void Add(int count) + { + Interlocked.Add(ref Counter, count); + } + } + + public class SomeMessageConsumer1 : IConsumer + { + public ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken) + { + CommonCounter.Add(outboxMessages.Count); + return ValueTask.CompletedTask; + } + } + + + public class SomeMessageConsumer2 : IConsumer + { + public ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken) + { + CommonCounter.Add(outboxMessages.Count); + return ValueTask.CompletedTask; + } + } + + public class Fixture : PgDataSourceFixture + { + public Fixture() : base() + { + Services + .AddOutbox(builder => + { + builder + .WithPartitioningSupport((_, sp) => + { + sp.ForEachTenant = true; + sp.GetTenantIds = t => Task.FromResult([1, 2]); + }) + .WithDeliveries(builder => builder + .AddDelivery((_, settings) => + { + settings.ScheduleSettings.ExecutionInterval = TimeSpan.FromMilliseconds(500); + settings.ScheduleSettings.InitialDelay = TimeSpan.Zero; + settings.ExtractSettings.MaxBatchSize = 1024; + }) + .AddDelivery((_, settings) => + { + settings.ScheduleSettings.ExecutionInterval = TimeSpan.FromMilliseconds(500); + settings.ScheduleSettings.InitialDelay = TimeSpan.Zero; + settings.ExtractSettings.MaxBatchSize = 1024; + }) + ); + builder.PublishSettings.MaxBatchSize = 1024; + }) + .AddOutboxUsingPostgreSql(cfg => + { + cfg.AddDataSource(c => c.WithConnectionString(_ => this.ConnectionString)); + cfg.WithPgOutboxSettings((_, settings) => + { + settings.TableSettings.DatabaseSchemaName = "parallel"; + settings.CleanupSettings.DropPartsAfterRetention = TimeSpan.FromDays(1); + }); + }); + } + } + + IServiceProvider ServiceProvider => fixture.ServiceProvider; + + [Fact] + public async Task ParallelMessaging_MustBeProcessed() + { + Console.WriteLine(fixture.ConnectionString); + + // start cron schedules + IScheduler scheduler = ServiceProvider.GetRequiredService(); + int i = scheduler.Start(CancellationToken.None); + Assert.True(i > 0); + + // start delivery message + var publisher = ServiceProvider.GetRequiredService(); + + List> tasks = [ + RunPublish(publisher) + , RunPublish(publisher) + ]; + + + await Task.WhenAll(tasks); + + long total = tasks.Select(c => c.Result) + .DefaultIfEmpty() + .Aggregate((t1, t2) => t1 + t2); + + var migrationService = ServiceProvider.GetRequiredService(); + + bool r = await migrationService.WaitMigration(TimeSpan.FromSeconds(3)); + Assert.True(r, "none migration"); + + // delay for consume + while (CommonCounter.Counter < (int)total) + { + // delay for consume + await Task.Delay(300); + } + + await scheduler.Stop(); + + Assert.True(CommonCounter.Counter > 0); + } + + private static async Task RunPublish(IOutboxMessagePublisher publisher) + where T : IOutboxPayloadMessage, new() + { + long total = 0; + List nodes = Enumerable.Range(1, GenMessageRange.Threads).ToList(); + ParallelLoopResult loop = Parallel.ForEach(nodes, async node => + { + List messages = []; + int messageCount = GenMessageRange.GetMessageCount(); + for (int j = 0; j < messageCount; j++) + { + Interlocked.Increment(ref total); + messages.Add(new T()); + } + + await publisher.Publish(messages); + }); + + + while (!loop.IsCompleted) + { + await Task.Delay(300); + } + + return total; + } +} \ No newline at end of file diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/OutboxPostgreSqlFixture.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/OutboxPostgreSqlFixture.cs new file mode 100644 index 0000000..5f3287f --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/OutboxPostgreSqlFixture.cs @@ -0,0 +1,15 @@ +using Sa.Data.PostgreSql.Fixture; +using Sa.Outbox.PostgreSql; + +namespace Sa.Outbox.PostgreSqlTests; + +public class OutboxPostgreSqlFixture : PgDataSourceFixture + where TSub : notnull +{ + public OutboxPostgreSqlFixture() + { + Services.AddOutboxUsingPostgreSql(builder + => builder.AddDataSource(b + => b.WithConnectionString(sp => ConnectionString))); + } +} diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/OutboxTests.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/OutboxTests.cs new file mode 100644 index 0000000..f239f44 --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/OutboxTests.cs @@ -0,0 +1,105 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Data.PostgreSql.Fixture; +using Sa.Outbox.PostgreSql; +using Sa.Outbox.Support; +using Sa.Partitional.PostgreSql; +using Sa.Schedule; + +namespace Sa.Outbox.PostgreSqlTests; + +public class OutBoxTests(OutBoxTests.Fixture fixture) : IClassFixture +{ + + [OutboxMessage(part: "some")] + public class SomeMessage : IOutboxPayloadMessage + { + public string PayloadId { get; set; } = default!; + public int TenantId { get; set; } + } + + public class SomeMessageConsumer : IConsumer + { + internal static int Counter = 0; + + public async ValueTask Consume(IReadOnlyCollection> outboxMessages, CancellationToken cancellationToken) + { + Interlocked.Add(ref Counter, outboxMessages.Count); + await Task.Delay(100, cancellationToken); + } + } + + public class Fixture : PgDataSourceFixture + { + public Fixture() : base() + { + Services + .AddOutbox(builder => builder + .WithPartitioningSupport((_, sp) => + { + sp.ForEachTenant = true; + sp.GetTenantIds = t => Task.FromResult([1, 2]); + }) + .WithDeliveries(builder => builder + .AddDelivery((_, settings) => + { + settings.ScheduleSettings.ExecutionInterval = TimeSpan.FromMilliseconds(100); + settings.ScheduleSettings.InitialDelay = TimeSpan.Zero; + settings.ExtractSettings.MaxBatchSize = 1; + }) + ) + ) + .AddOutboxUsingPostgreSql(cfg => + { + cfg.AddDataSource(c => c.WithConnectionString(_ => this.ConnectionString)); + cfg.WithPgOutboxSettings((_, settings) => + { + settings.TableSettings.DatabaseSchemaName = "test"; + settings.CleanupSettings.DropPartsAfterRetention = TimeSpan.FromDays(1); + }); + }) + ; + } + } + + + IServiceProvider ServiceProvider => fixture.ServiceProvider; + + + [Fact] + public async Task OutBoxTest() + { + Console.WriteLine(fixture.ConnectionString); + + // start cron schedules + var scheduler = ServiceProvider.GetRequiredService(); + int i = scheduler.Start(CancellationToken.None); + Assert.True(i > 0); + + // start delivery message + var publisher = ServiceProvider.GetRequiredService(); + + ulong total = await publisher.Publish([ + new SomeMessage { TenantId = 1 } + , new SomeMessage { TenantId = 1 } + , new SomeMessage { TenantId = 1 } + , new SomeMessage { TenantId = 1 } + ]); + + var migrationService = ServiceProvider.GetRequiredService(); + + bool r = await migrationService.WaitMigration(TimeSpan.FromSeconds(3)); + Assert.True(r, "none migration"); + + // delay for consume + int j = 0; + while (SomeMessageConsumer.Counter < (int)total && j++ < 10) + { + // delay for consume + await Task.Delay(300); + } + + await scheduler.Stop(); + + Assert.True(SomeMessageConsumer.Counter > 0); + } +} diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/Publisher/OutboxPublisherTests.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/Publisher/OutboxPublisherTests.cs new file mode 100644 index 0000000..8cb79c2 --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/Publisher/OutboxPublisherTests.cs @@ -0,0 +1,34 @@ +namespace Sa.Outbox.PostgreSqlTests.Publisher; + + +public class OutboxPublisherTests(OutboxPublisherTests.Fixture fixture) : IClassFixture +{ + public class Fixture : OutboxPostgreSqlFixture + { + } + + private IOutboxMessagePublisher Sub => fixture.Sub; + + + [Fact] + public async Task Publish_MultipleMessages_ReturnsExpectedResult() + { + Console.Write(fixture.ConnectionString); + + // Arrange + List messages = + [ + new TestMessage { PayloadId = "1", Content = "Message 1", TenantId = 1}, + new TestMessage { PayloadId = "2", Content = "Message 2", TenantId = 2} + ]; + + // Act + ulong result = await Sub.Publish(messages); + + // Assert + Assert.Equal(2, (int)result); + + int count = await fixture.DataSource.ExecuteReaderFirst("select count(*) from outbox"); + Assert.Equal(2, count); + } +} diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/Sa.Outbox.PostgreSqlTests.csproj b/src/Tests/Sa.Outbox.PostgreSqlTests/Sa.Outbox.PostgreSqlTests.csproj new file mode 100644 index 0000000..34c5ac6 --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/Sa.Outbox.PostgreSqlTests.csproj @@ -0,0 +1,16 @@ + + + + + + true + true + 1 + + + + + + + + diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/Serialization/OutboxMessageSerializerTests.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/Serialization/OutboxMessageSerializerTests.cs new file mode 100644 index 0000000..089e206 --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/Serialization/OutboxMessageSerializerTests.cs @@ -0,0 +1,93 @@ +using Sa.Extensions; +using Sa.Fixture; +using Sa.Outbox.PostgreSql.Serialization; +using System.Text.Json; + +namespace Sa.Outbox.PostgreSqlTests.Serialization; + + +public class OutboxMessageSerializerTests(OutboxMessageSerializerTests.Fixture fixture) : IClassFixture +{ + + public class Fixture : SaSubFixture + { + public Fixture() : base() + { + Services.AddOutboxMessageSerializer(new()); + } + } + + + private readonly IOutboxMessageSerializer sub = fixture.Sub; + + + [Fact] + public async Task Serialize_WithValidObject() + { + // Arrange + var obj = new TestMessage { Id = 1, Content = "Test" }; + + using var stream = new MemoryStream(); + // Act + await sub.SerializeAsync(stream, obj); + + // Assert + Assert.True(stream.Length > 0); + } + + [Fact] + public async Task Serialize_WithNullObject_ThrowsArgumentNullException() + { + using var stream = new MemoryStream(); + // Act & Assert + await Assert.ThrowsAsync(() + => sub.SerializeAsync(stream, default!)); + } + + [Fact] + public async Task Deserialize_WithValid_JsonReturnsObject() + { + // Arrange + var json = "{\"Id\":1,\"Content\":\"Test\"}"; + + // Act + using var stream = new MemoryStream(); + stream.Write(json.StrToBytes()); + stream.Position = 0; + + TestMessage? message = await sub.DeserializeAsync(stream); + + // Assert + Assert.NotNull(message); + Assert.Equal(1, message.Id); + Assert.Equal("Test", message.Content); + } + + [Fact] + public async Task Deserialize_WithInvalidJson_ThrowsJsonException() + { + // Arrange + var invalidJson = "invalid json"; + + using var stream = new MemoryStream(); + stream.Write(invalidJson.StrToBytes()); + stream.Position = 0; + + // Act & Assert + await Assert.ThrowsAsync(() => sub.DeserializeAsync(stream)); + } + + [Fact] + public async Task Deserialize_WithNullJson_ThrowsArgumentNullException() + { + // Act & Assert + await Assert.ThrowsAsync(() => sub.DeserializeAsync(default!)); + } + + + private class TestMessage + { + public int Id { get; set; } + public string? Content { get; set; } + } +} \ No newline at end of file diff --git a/src/Tests/Sa.Outbox.PostgreSqlTests/TestMessage.cs b/src/Tests/Sa.Outbox.PostgreSqlTests/TestMessage.cs new file mode 100644 index 0000000..ac5885b --- /dev/null +++ b/src/Tests/Sa.Outbox.PostgreSqlTests/TestMessage.cs @@ -0,0 +1,12 @@ +using Sa.Outbox.Support; + +namespace Sa.Outbox.PostgreSqlTests; + +[OutboxMessage] + +public class TestMessage : IOutboxPayloadMessage +{ + public string PayloadId { get; set; } = default!; + public string? Content { get; set; } + public int TenantId { get; set; } +} \ No newline at end of file diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/Cache/PartCacheTests.cs b/src/Tests/Sa.Partitional.PostgreSqlTests/Cache/PartCacheTests.cs new file mode 100644 index 0000000..398185b --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/Cache/PartCacheTests.cs @@ -0,0 +1,169 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Classes; +using Sa.Data.PostgreSql.Fixture; +using Sa.Partitional.PostgreSql; +using Sa.Partitional.PostgreSql.Cache; + +namespace Sa.Partitional.PostgreSqlTests.Cache; + + +public class PartCacheTests(PartCacheTests.Fixture fixture) : IClassFixture +{ + + public class Fixture : PgDataSourceFixture + { + public Fixture() + { + Services.AddPartitional((_, builder) => + { + builder.AddSchema(schema => + { + schema.AddTable("test_20", + "id INT NOT NULL", + "tenant_id INT NOT NULL", + "part TEXT NOT NULL", + "payload_id TEXT" + ) + .PartByList("tenant_id", "part") + ; + + schema.AddTable("test_21", + "id INT NOT NULL", + "tenant_id INT NOT NULL", + "part TEXT NOT NULL", + "payload_id TEXT" + ) + .PartByList("tenant_id", "part") + ; + + schema.AddTable("test_22", + "id INT NOT NULL", + "tenant_id INT NOT NULL", + "part TEXT NOT NULL", + "payload_id TEXT" + ) + .PartByList("tenant_id", "part") + ; + + + schema.AddTable("test_23", + "id INT NOT NULL" + ) + ; + + schema.AddTable("test_24", + "id INT NOT NULL", + "tenant_id INT NOT NULL", + "part TEXT NOT NULL", + "payload_id TEXT" + ) + .PartByList("tenant_id", "part") + ; + + }); + }) + .AddDataSource(configure => configure.WithConnectionString(_ => this.ConnectionString)) + ; + } + + public IPartRepository PartRepository => ServiceProvider.GetRequiredService(); + } + + + private IPartCache Sub => fixture.Sub; + + + [Fact] + public async Task InCache_TableNotExists_ReturnsFalse() + { + Console.WriteLine(fixture.ConnectionString); + + bool actual = await Sub.InCache("different_table", DateTimeOffset.Now, ["p1", 145]); + // Assert + Assert.False(actual); + } + + [Fact] + public async Task InCache_PartExistsInCache_ReturnsTrue() + { + var date = DateTimeOffset.Now; + + await fixture.PartRepository.CreatePart("test_20", date, [1, "some"]); + + // Act + bool result = await Sub.InCache("test_20", date, [1, "some"]); + + // Assert + Assert.True(result); + } + + [Fact] + public async Task InCache_DateNotInCache_ReturnsFalse() + { + await fixture.PartRepository.CreatePart("test_21", DateTimeOffset.Now, [1, "some"]); + + // Act + bool result = await Sub.InCache("test_21", DateTimeOffset.Now.AddDays(1), [1, "some"]); + + // Assert + Assert.False(result); + } + + [Fact] + public async Task InCache_EnsureCache_ReturnsTrue() + { + DateTimeOffset date = DateTimeOffset.Now; + StrOrNum[] parValues = [1, "some1"]; + + // Act + bool result = await Sub.InCache("test_22", date, parValues); + + // Assert + Assert.False(result); + + result = await Sub.EnsureCache("test_22", date, parValues); + + Assert.True(result); + + result = await Sub.InCache("test_22", date, parValues); + + Assert.True(result); + } + + [Fact] + public async Task InCache_RemoveCache() + { + DateTimeOffset date = DateTimeOffset.Now; + + bool result = await Sub.EnsureCache("test_23", date, []); + + Assert.True(result); + + await Sub.RemoveCache("test_23"); + + result = await Sub.InCache("test_23", date, []); + + Assert.True(result); + } + + + [Fact] + public async Task InCache_EnsureCache_DifferentParts() + { + DateTimeOffset date = DateTimeOffset.Now; + StrOrNum[] partValues_1 = [1, "some1"]; + StrOrNum[] partValues_2 = [2, "some1"]; + + var result = await Sub.EnsureCache("test_24", date, partValues_1); + + Assert.True(result); + + result = await Sub.InCache("test_24", date, partValues_2); + + Assert.False(result); + + result = await Sub.EnsureCache("test_24", date, partValues_2); + + Assert.True(result); + } +} diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/Cleaning/PartCleanupServiceTests.cs b/src/Tests/Sa.Partitional.PostgreSqlTests/Cleaning/PartCleanupServiceTests.cs new file mode 100644 index 0000000..f3b871b --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/Cleaning/PartCleanupServiceTests.cs @@ -0,0 +1,71 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Data.PostgreSql.Fixture; +using Sa.Extensions; +using Sa.Partitional.PostgreSql; + +namespace Sa.Partitional.PostgreSqlTests.Cleaning; + +public class PartCleanupServiceTests(PartCleanupServiceTests.Fixture fixture) : IClassFixture +{ + + public class Fixture : PgDataSourceFixture + { + public Fixture() + { + Services.AddPartitional((_, builder) => + { + builder.AddSchema(schema => + { + schema.AddTable("test_70", + "id INT NOT NULL", + "city TEXT NOT NULL", + "tenant_id INT NOT NULL", + "payload_id TEXT NOT NULL" + ) + .PartByList("tenant_id", "city") + .AddMigration(1, ["New York", "London"]) + .AddMigration(2, ["Moscow", "Kazan", "Yekaterinburg"]) + ; + + }); + }) + .AddDataSource(configure => configure.WithConnectionString(_ => this.ConnectionString)) + .AddPartCleanupSchedule() + ; + } + } + + + private IPartCleanupService Sub => fixture.Sub; + + private Task> GetParts() + => GetPartRep().GetPartsFromDate("test_70", DateTimeOffset.Now.StartOfDay()); + + private IPartRepository GetPartRep() => fixture.ServiceProvider.GetRequiredService(); + + private async Task MigrateTest() + { + await GetPartRep().Migrate([DateTimeOffset.Now], CancellationToken.None); + } + + [Fact()] + public async Task CleanTest() + { + Console.WriteLine(fixture.ConnectionString); + + await MigrateTest(); + + List list = await GetParts(); + Assert.NotEmpty(list); + + DateTimeOffset toDate = DateTimeOffset.UtcNow.AddDays(1); + + int i = await Sub.Clean(toDate, CancellationToken.None); + Assert.Equal(5, i); + + int expected = list.Count - i; + + list = await GetParts(); + Assert.Equal(expected, list.Count); + } +} diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/Configuration/ConfigurationPartTests.cs b/src/Tests/Sa.Partitional.PostgreSqlTests/Configuration/ConfigurationPartTests.cs new file mode 100644 index 0000000..b8da7fb --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/Configuration/ConfigurationPartTests.cs @@ -0,0 +1,67 @@ +using Sa.Fixture; +using Sa.Partitional.PostgreSql; + +namespace Sa.Partitional.PostgreSqlTests.Configuration; + + + + +public class ConfigurationPartTests(ConfigurationPartTests.Fixture fixture) : IClassFixture +{ + public class Fixture : SaSubFixture + { + public Fixture() + { + Services.AddPartitional((_, builder) => builder.AddSchema(schema => + { + + schema + .AddTable("test_1", + "part TEXT NOT NULL", + "lock_instance_id TEXT NOT NULL", + "lock_expires_on bigint NOT NULL", + "payload_id TEXT NOT NULL" + ) + .PartByList("part", "lock_instance_id") + .TimestampAs("date") + ; + + schema + .AddTable("test_2", + "name TEXT NOT NULL" + ) + .PartByList("name") + ; + + })); + + Services.AddPartitional((_, builder) => builder.AddSchema(schema => + { + schema + .AddTable("test_3", + "text TEXT NOT NULL" + ) + ; + })); + } + } + + [Fact] + public void PartitionalPostgreSql_SettingsBuiling() + { + ISettingsBuilder builder = fixture.Sub; + ITableSettingsStorage settings = builder.Build(); + Assert.Equal(3, settings.Tables.Count); + } + + [Fact] + public void PartitionalPostgreSql_CheckIdName_Test() + { + ISettingsBuilder builder = fixture.Sub; + ITableSettingsStorage storage = builder.Build(); + ITableSettings? settings = storage.Tables.FirstOrDefault(c => c.FullName == "public.test_1"); + Assert.NotNull(settings); + Assert.NotNull(builder); + Assert.Equal("part", settings.IdFieldName); + } +} diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/Configuration/SqlBuilderTests.cs b/src/Tests/Sa.Partitional.PostgreSqlTests/Configuration/SqlBuilderTests.cs new file mode 100644 index 0000000..83e3f85 --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/Configuration/SqlBuilderTests.cs @@ -0,0 +1,196 @@ +using Sa.Fixture; +using Sa.Partitional.PostgreSql; + +namespace Sa.Partitional.PostgreSqlTests.Configuration; + + + +public class SqlBuilderTests(SqlBuilderTests.Fixture fixture) : IClassFixture +{ + public class Fixture : SaSubFixture + { + public Fixture() + { + Services.AddPartitional((_, builder) => + { + builder.AddSchema(schema => + { + schema.AddTable("test_0", + "id CHAR(26) NOT NULL", + "tenant_id INT NOT NULL", + "part TEXT NOT NULL", + "part_1 TEXT NOT NULL", + "payload_id TEXT" + ) + .PartByList("tenant_id", "part", "part_1") + .TimestampAs("date") + ; + + schema.AddTable("test_1", + "id INT NOT NULL", + "part TEXT NOT NULL", + "tenant_id INT NOT NULL", + "lock_expires_on BIGINT NOT NULL", + "payload_id TEXT NOT NULL" + ) + .PartByList("part", "tenant_id") + .TimestampAs("date") + ; + + schema.AddTable("test_2", + "id INT NOT NULL", + "name TEXT NOT NULL" + ) + .PartByList("name") + ; + + }); + }); + + Services.AddPartitional((_, builder) => + { + builder.AddSchema("public", schema => + { + schema.AddTable("test_3", + "id INT NOT NULL", + "text TEXT NOT NULL" + ); + }); + + builder.AddSchema(schema => + { + schema + .CreateTable("test_4"); + + schema + .AddTable("test_5", + "pk_id INT NOT NULL", + "p0 TEXT NOT NULL", + "p1 TEXT NOT NULL", + "p2 TEXT NOT NULL", + "p3 TEXT NOT NULL", + "p4 TEXT NOT NULL", + "p5 TEXT NOT NULL", + "tid INT NOT NULL", + "payload_id TEXT" + ) + .PartByList("tid", "p0", "p1", "p2", "p3", "p4", "p5") + .TimestampAs("dt") + ; + + }); + }); + } + } + + + + [Fact] + public void PartitionalPostgreSql_SqlBuiling_Test_0() + { + ISqlBuilder sqlbuilder = fixture.Sub; + + ISqlTableBuilder? build = sqlbuilder["public.test_0"]; + Assert.NotNull(build); + + var expected = ("tenant_id", "part", "part_1"); + var actual = + ( + build.Settings.PartByListFieldNames[0], + build.Settings.PartByListFieldNames[1], + build.Settings.PartByListFieldNames[2] + ); + + Assert.Equal(expected, actual); + + string sql = build.CreateSql(DateTimeOffset.Now, 29, "part1", "part2"); + Assert.NotEmpty(sql); + } + + [Fact] + public void PartitionalPostgreSql_SqlBuiling_Test_1() + { + ISqlBuilder sqlbuilder = fixture.Sub; + + ISqlTableBuilder? tblBuilder = sqlbuilder["public.test_1"]; + Assert.NotNull(tblBuilder); + + string sql = tblBuilder.CreateSql(DateTimeOffset.Now, "some", 27); + Assert.NotEmpty(sql); + } + + [Fact] + public void PartitionalPostgreSql_SqlBuiling_Test_2() + { + ISqlBuilder sqlbuilder = fixture.Sub; + ISqlTableBuilder? tblBuilder = sqlbuilder["test_2"]; + + Assert.NotNull(tblBuilder); + + string sql = tblBuilder.CreateSql(DateTimeOffset.Now, "some_2"); + Assert.NotEmpty(sql); + } + + [Fact] + public void PartitionalPostgreSql_SqlBuiling_Test_3() + { + ISqlBuilder sqlbuilder = fixture.Sub; + ISqlTableBuilder? tblBuilder = sqlbuilder["public.test_3"]; + + Assert.NotNull(tblBuilder); + + ISqlTableBuilder? tblBuilder1 = sqlbuilder["public.\"test_3\""]; + + Assert.NotNull(tblBuilder1); + + Assert.Equal(tblBuilder1, tblBuilder); + + var now = DateTimeOffset.Now; + + string sqlTest = tblBuilder.CreateSql(now); + Assert.NotEmpty(sqlTest); + + string sqlTest1 = tblBuilder1.CreateSql(now); + Assert.Equal(sqlTest, sqlTest1); + } + + [Fact] + public void PartitionalPostgreSql_SqlBuiling_Test_4() + { + ISqlBuilder sqlbuilder = fixture.Sub; + + ISqlTableBuilder? builder = sqlbuilder["test_4"]; + Assert.NotNull(builder); + + var now = DateTimeOffset.Now; + string sql = builder.CreateSql(now); + Assert.NotEmpty(sql); + } + + + [Fact] + public void PartitionalPostgreSql_SqlBuiling_Test_5() + { + ISqlBuilder sqlbuilder = fixture.Sub; + + ISqlTableBuilder? builder = sqlbuilder["test_5"]; + + Assert.NotNull(builder); + Assert.Equal(7, builder.Settings.PartByListFieldNames.Length); + + var now = DateTimeOffset.Now; + string sql = builder.CreateSql(now, 7, "s0", "s1", "s2", "s3", "s4", "s5"); + Assert.NotEmpty(sql); + } + + [Fact] + public void PartitionalPostgreSql_CheckIdName_Test() + { + ISqlBuilder sqlbuilder = fixture.Sub; + + ISqlTableBuilder? builder = sqlbuilder["test_5"]; + + Assert.NotNull(builder); + Assert.Equal("pk_id", builder.Settings.IdFieldName); + } +} diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/PartitionAsJobTests.cs b/src/Tests/Sa.Partitional.PostgreSqlTests/PartitionAsJobTests.cs new file mode 100644 index 0000000..7ecf1fa --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/PartitionAsJobTests.cs @@ -0,0 +1,62 @@ +using Microsoft.Extensions.DependencyInjection; +using Sa.Data.PostgreSql.Fixture; +using Sa.Extensions; +using Sa.Partitional.PostgreSql; +using Sa.Schedule; + +namespace Sa.Partitional.PostgreSqlTests; + + +public class PartitionAsJobTests(PartitionAsJobTests.Fixture fixture) : IClassFixture +{ + + public class Fixture : PgDataSourceFixture + { + public Fixture() + { + Services.AddPartitional((_, builder) => + { + builder.AddSchema(schema => + { + // Настройка таблицы customers + schema.AddTable("customer", + "id INT NOT NULL", + "country TEXT NOT NULL", + "city TEXT NOT NULL" + ) + // разделить в таблицах меж партиций + .WithPartSeparator("_") + // Партиционирование по country и city (если не задан PartByRange то по дням) + .PartByList("country", "city") + // Миграция партиций каждого тенанта по city + .AddMigration("RU", ["Moscow", "Samara"]) + .AddMigration("USA", ["Alabama", "New York"]) + .AddMigration("FR", ["Paris", "Lyon", "Bordeaux"]); + + }); + } + , asJob: true + ) + .AddDataSource(configure => configure.WithConnectionString(_ => this.ConnectionString)) + ; + } + } + + + private IPartRepository Sub => fixture.Sub; + + [Fact] + public async Task MigrateAsJobTest() + { + Console.WriteLine(fixture.ConnectionString); + + int i = fixture.ServiceProvider.GetRequiredService().Start(CancellationToken.None); + Assert.True(i > 0); + + Console.WriteLine(fixture.ConnectionString); + await Task.Delay(800); + + var list = await Sub.GetPartsFromDate("customer", DateTimeOffset.Now.StartOfDay()); + Assert.NotEmpty(list); + } +} diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/PartitionIndexTests.cs b/src/Tests/Sa.Partitional.PostgreSqlTests/PartitionIndexTests.cs new file mode 100644 index 0000000..11aa43e --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/PartitionIndexTests.cs @@ -0,0 +1,61 @@ +using Sa.Data.PostgreSql.Fixture; +using Sa.Partitional.PostgreSql; + +namespace Sa.Partitional.PostgreSqlTests; + + +public class PartitionIndexTests(PartitionIndexTests.Fixture fixture) : IClassFixture +{ + public class Fixture : PgDataSourceFixture + { + public Fixture() + { + Services.AddPartitional((_, builder) => + { + builder.AddSchema(schema => + { + schema.AddTable("test_61", + "id INT NOT NULL" + ) + ; + + }); + }) + .AddDataSource(configure => configure.WithConnectionString(_ => this.ConnectionString)) + ; + } + } + + + private IPartitionManager Sub => fixture.Sub; + + + + [Fact] + public async Task InsertingDoubleParts() + { + Console.WriteLine(fixture.ConnectionString); + + DateTimeOffset today = DateTimeOffset.Now; + DateTimeOffset tomorrow = today.AddDays(1); + + // Act + int i = await Sub.Migrate([today, tomorrow]); + + Assert.Equal(2, i); + + long unixTime = today.ToUnixTimeSeconds(); + + i = await fixture.DataSource.ExecuteNonQuery( +$""" +INSERT INTO test_61 + (id,created_at) +VALUES + (1,{unixTime}), + (1,{unixTime + 1}) +ON CONFLICT DO NOTHING +"""); + + Assert.Equal(2, i); + } +} diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/PartitionManagerTests.cs b/src/Tests/Sa.Partitional.PostgreSqlTests/PartitionManagerTests.cs new file mode 100644 index 0000000..f58c06e --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/PartitionManagerTests.cs @@ -0,0 +1,78 @@ +using Sa.Data.PostgreSql.Fixture; +using Sa.Partitional.PostgreSql; + +namespace Sa.Partitional.PostgreSqlTests; + + +public class PartitionManagerTests(PartitionManagerTests.Fixture fixture) : IClassFixture +{ + public class Fixture : PgDataSourceFixture + { + public Fixture() + { + Services.AddPartitional((_, builder) => + { + builder.AddSchema(schema => + { + schema.AddTable("test_41", + "id INT NOT NULL", + "gender TEXT NOT NULL", + "tenant_id INT NOT NULL", + "payload_id TEXT NOT NULL" + ) + .WithPartSeparator("_") + .PartByList("tenant_id", "gender") + .AddMigration(1, ["male", "female"]) + .AddMigration(2, "male") + .AddMigration(2, "female") + .PartByRange(PgPartBy.Day, "created_at") + ; + + schema.AddTable("test_empty", + "id INT NOT NULL" + ) + .AddMigration() + // double check + .AddMigration() + ; + }); + }) + .AddDataSource(configure => configure.WithConnectionString(_ => this.ConnectionString)) + ; + } + } + + + private IPartitionManager Sub => fixture.Sub; + + [Fact] + public async Task MigrateTest() + { + Console.WriteLine(fixture.ConnectionString); + // Act + int i = await Sub.Migrate(); + Assert.True(i > 0); + } + + + [Fact] + public async Task Migrate_WithDates() + { + + DateTimeOffset today = DateTimeOffset.Now.AddDays(7); + DateTimeOffset tomorrow = today.AddDays(1); + + // Act + int i = await Sub.Migrate([today, tomorrow]); + + Assert.NotEqual(0, i); + + string postfix = PgPartBy.Day.Fmt(tomorrow); + + string table = $"public.test_41_1_male_{postfix}"; + + await fixture.CheckTable(table); + + Assert.True(true); + } +} diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/PgPartByTests.cs b/src/Tests/Sa.Partitional.PostgreSqlTests/PgPartByTests.cs new file mode 100644 index 0000000..3414713 --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/PgPartByTests.cs @@ -0,0 +1,170 @@ +using Sa.Partitional.PostgreSql; + +namespace Sa.Partitional.PostgreSqlTests; + +public class PgPartByTests +{ + + [Fact] + public void Day_Fmt_ReturnsCorrectFormat() + { + // Arrange + var testDate = new DateTimeOffset(2023, 12, 25, 14, 30, 23, TimeSpan.Zero); + + // Act + var name = PgPartBy.Day.Fmt(testDate); + + // Assert + Assert.Equal("y2023m12d25", name); + } + + [Theory] + [InlineData("y2023m12d25", 2023, 12, 25)] + [InlineData("y2024m01d01", 2024, 1, 1)] + [InlineData("_outbox_root__y2021m12d30", 2021, 12, 30)] + public void Day_ParseFmt_ValidStrings(string input, int expectedYear, int expectedMonth, int expectedDay) + { + + // Act + var result = PgPartBy.Day.ParseFmt(input); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedYear, result.Value.Year); + Assert.Equal(expectedMonth, result.Value.Month); + Assert.Equal(expectedDay, result.Value.Day); + Assert.Equal(0, result.Value.Hour); + Assert.Equal(0, result.Value.Minute); + Assert.Equal(0, result.Value.Second); + Assert.Equal(TimeSpan.Zero, result.Value.Offset); + } + + [Theory] + [InlineData("invalid")] + [InlineData("y2023m13d32")] + [InlineData("")] + public void Day_ParseFmt_ReturnsNull_ForInvalidStrings(string input) + { + try + { + // Act + var result = PgPartBy.Day.ParseFmt(input); + + // Assert + Assert.Null(result); + } + catch + { + Assert.True(true); + } + } + + + [Fact] + public void Month_Fmt_ReturnsCorrectFormat() + { + // Arrange + var testDate = new DateTimeOffset(2023, 12, 25, 14, 30, 23, TimeSpan.Zero); + + // Act + var name = PgPartBy.Month.Fmt(testDate); + + // Assert + Assert.Equal("y2023m12", name); + } + + [Theory] + [InlineData("y2023m12", 2023, 12)] + [InlineData("y2024m01", 2024, 1)] + [InlineData("_outbox_root__y2021m06", 2021, 6)] + public void Month_ParseFmt_ValidStrings(string input, int expectedYear, int expectedMonth) + { + + // Act + var result = PgPartBy.Month.ParseFmt(input); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedYear, result.Value.Year); + Assert.Equal(expectedMonth, result.Value.Month); + Assert.Equal(1, result.Value.Day); + Assert.Equal(0, result.Value.Hour); + Assert.Equal(0, result.Value.Minute); + Assert.Equal(0, result.Value.Second); + Assert.Equal(TimeSpan.Zero, result.Value.Offset); + } + + [Theory] + [InlineData("invalid")] + [InlineData("y2023m13")] + [InlineData("")] + public void Month_ParseFmt_ReturnsNull_ForInvalidStrings(string input) + { + try + { + // Act + var result = PgPartBy.Month.ParseFmt(input); + + // Assert + Assert.Null(result); + } + catch + { + Assert.True(true); + } + } + + [Fact] + public void Year_Fmt_ReturnsCorrectFormat() + { + // Arrange + var testDate = new DateTimeOffset(2023, 12, 25, 14, 30, 23, TimeSpan.Zero); + + // Act + var name = PgPartBy.Year.Fmt(testDate); + + // Assert + Assert.Equal("y2023", name); + } + + [Theory] + [InlineData("y2023", 2023)] + [InlineData("y2024", 2024)] + [InlineData("_outbox_root__y2021", 2021)] + public void Year_Parse_FmtValidStrings(string input, int expectedYear) + { + + // Act + var result = PgPartBy.Year.ParseFmt(input); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedYear, result.Value.Year); + Assert.Equal(1, result.Value.Month); + Assert.Equal(1, result.Value.Day); + Assert.Equal(0, result.Value.Hour); + Assert.Equal(0, result.Value.Minute); + Assert.Equal(0, result.Value.Second); + Assert.Equal(TimeSpan.Zero, result.Value.Offset); + } + + [Theory] + [InlineData("invalid")] + [InlineData("y202")] + [InlineData("")] + public void Year_ParseFmt_ReturnsNull_ForInvalidStrings(string input) + { + try + { + // Act + var result = PgPartBy.Year.ParseFmt(input); + + // Assert + Assert.Null(result); + } + catch + { + Assert.True(true); + } + } +} \ No newline at end of file diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/Repositories/PgRepositoryTests.cs b/src/Tests/Sa.Partitional.PostgreSqlTests/Repositories/PgRepositoryTests.cs new file mode 100644 index 0000000..f1858d5 --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/Repositories/PgRepositoryTests.cs @@ -0,0 +1,103 @@ +using Sa.Classes; +using Sa.Data.PostgreSql.Fixture; +using Sa.Partitional.PostgreSql; + +namespace Sa.Partitional.PostgreSqlTests.Repositories; + + +public class PgRepositoryTests(PgRepositoryTests.Fixture fixture) : IClassFixture +{ + public class Fixture : PgDataSourceFixture + { + public Fixture() + { + Services.AddPartitional((_, builder) => + { + builder.AddSchema(schema => + { + schema.AddTable("test_10", + "id INT NOT NULL", + "tenant_id INT NOT NULL", + "part TEXT NOT NULL", + "part_1 TEXT NOT NULL", + "payload_id TEXT" + ) + .PartByList("tenant_id", "part", "part_1") + .TimestampAs("date") + ; + + schema.AddTable("test_11", + "id INT NOT NULL", + "part_str TEXT NOT NULL", + "tenant_id INT NOT NULL", + "payload_id TEXT NOT NULL" + ) + .PartByList("part_str", "tenant_id") + ; + + schema.AddTable("test_12", + "id INT NOT NULL" + ) + ; + + }); + }) + .AddDataSource(configure => configure.WithConnectionString(_ => this.ConnectionString)) + ; + } + } + + + + [Fact()] + public async Task CreatePartTest() + { + Console.WriteLine(fixture.ConnectionString); + + int i = await fixture.Sub.CreatePart("test_11", DateTimeOffset.Now, ["some", 12]); + Assert.NotEqual(0, i); + } + + [Fact()] + public async Task CreatePart_WithEmptyListTest() + { + + int i = await fixture.Sub.CreatePart("test_12", DateTimeOffset.Now, []); + Assert.NotEqual(0, i); + } + + + [Fact()] + public async Task GetPartByRangeListTest() + { + var timeExpected = new DateTimeOffset(2024, 12, 03, 00, 00, 00, TimeSpan.Zero); + + await fixture.Sub.CreatePart("test_10", timeExpected.AddMinutes(22).AddHours(12), [1, "some1", "some2"]); + IReadOnlyCollection list = await fixture.Sub.GetPartsFromDate("test_10", timeExpected.AddDays(-3)); + Assert.NotEmpty(list); + PartByRangeInfo item = list.First(c => c.Id == "public.\"test_10__1__some1__some2__y2024m12d03\""); + Assert.NotNull(item); + Assert.Equal(timeExpected, item.FromDate); + Assert.Equal("public.test_10", item.RootTableName); + } + + [Fact()] + public async Task MigrateTest() + { + int i = await fixture.Sub.Migrate([DateTime.Now, DateTime.Now.AddDays(1)], table => + { + StrOrNum[][] result = table switch + { + "public.test_10" => [[1, "part1", "part2"], [2, "part1", "part2"]], + "public.test_11" => [["part1", 1], ["part1", 2], ["part2", 1]], + "public.test_12" => [], + _ => throw new NotImplementedException(), + }; + + return Task.FromResult(result); + }); + + Assert.True(i > 0); + } +} + diff --git a/src/Tests/Sa.Partitional.PostgreSqlTests/Sa.Partitional.PostgreSqlTests.csproj b/src/Tests/Sa.Partitional.PostgreSqlTests/Sa.Partitional.PostgreSqlTests.csproj new file mode 100644 index 0000000..d942516 --- /dev/null +++ b/src/Tests/Sa.Partitional.PostgreSqlTests/Sa.Partitional.PostgreSqlTests.csproj @@ -0,0 +1,15 @@ + + + + + + true + + + + + + + + + diff --git a/src/Tests/Sa.ScheduleTests/Sa.ScheduleTests.csproj b/src/Tests/Sa.ScheduleTests/Sa.ScheduleTests.csproj new file mode 100644 index 0000000..6e9c7d3 --- /dev/null +++ b/src/Tests/Sa.ScheduleTests/Sa.ScheduleTests.csproj @@ -0,0 +1,14 @@ + + + + + + true + + + + + + + + diff --git a/src/Tests/Sa.ScheduleTests/SchedulePostSetupTests.cs b/src/Tests/Sa.ScheduleTests/SchedulePostSetupTests.cs new file mode 100644 index 0000000..ad4c9eb --- /dev/null +++ b/src/Tests/Sa.ScheduleTests/SchedulePostSetupTests.cs @@ -0,0 +1,73 @@ +using Sa.Fixture; +using Sa.Schedule; + +namespace Sa.ScheduleTests; + + +public class SchedulePostSetupTests(SchedulePostSetupTests.Fixture fixture) : IClassFixture +{ + public class Fixture : SaSubFixture + { + static class Counter + { + private static int _count; + public static int Total => _count; + public static void Inc() => Interlocked.Increment(ref _count); + } + + class SomeJob : IJob + { + public async Task Execute(IJobContext context, CancellationToken cancellationToken) + { + Counter.Inc(); + await Task.Delay(10, cancellationToken); + } + } + + public Fixture() + { + Services.AddSchedule(b => + { + b.AddJob((sp, builder) => + { + builder + .EveryTime(TimeSpan.FromMilliseconds(100)) + .RunOnce() + .StartImmediate() + ; + + }); + }); + + Services.AddSchedule(b => + { + b.AddJob((sp, builder) => + { + builder + .EveryTime(TimeSpan.FromMilliseconds(100)) + .RunOnce() + .StartImmediate() + ; + + }); + }); + } + + public static int Count => Counter.Total; + } + + private IScheduler Sub => fixture.Sub; + + + [Fact] + public async Task Check_Executing_RunOnce_ForMultiJobs() + { + int i = Sub.Start(CancellationToken.None); + + Assert.Equal(2, i); + + await Task.Delay(300); + + Assert.Equal(2, Fixture.Count); + } +} diff --git a/src/Tests/Sa.ScheduleTests/ScheduleSetupTests.cs b/src/Tests/Sa.ScheduleTests/ScheduleSetupTests.cs new file mode 100644 index 0000000..75ab891 --- /dev/null +++ b/src/Tests/Sa.ScheduleTests/ScheduleSetupTests.cs @@ -0,0 +1,56 @@ +using Sa.Fixture; +using Sa.Schedule; + +namespace Sa.ScheduleTests; + + +public class ScheduleSetupTests(ScheduleSetupTests.Fixture fixture) : IClassFixture +{ + public class Fixture : SaSubFixture + { + static class Counter + { + private static int _count; + public static int Total => _count; + public static void Inc() => Interlocked.Increment(ref _count); + } + + class SomeJob : IJob + { + public async Task Execute(IJobContext context, CancellationToken cancellationToken) + { + Counter.Inc(); + await Task.Delay(10, cancellationToken); + } + } + + public Fixture() + { + Services.AddSchedule(b => + { + b + .AddJob() + .EveryTime(TimeSpan.FromMilliseconds(100)) + .StartImmediate() + ; + }); + } + + public static int Count => Counter.Total; + } + + private IScheduler Sub => fixture.Sub; + + + [Fact] + public async Task Check_ExecuteCounterJob() + { + int i = Sub.Start(CancellationToken.None); + + Assert.NotEqual(0, i); + + await Task.Delay(300); + + Assert.True(Fixture.Count > 0); + } +} diff --git a/src/Tests/SaTests/Classes/KeepLockerTests.cs b/src/Tests/SaTests/Classes/KeepLockerTests.cs new file mode 100644 index 0000000..eef3393 --- /dev/null +++ b/src/Tests/SaTests/Classes/KeepLockerTests.cs @@ -0,0 +1,90 @@ +using Sa.Classes; + +namespace SaTests.Classes; + +public class KeepLockerTests +{ + [Fact] + public async Task KeepLocked_ExtendsLockUntilCancelled() + { + // Arrange + var extensionCount = 0; + var lockExpiration = TimeSpan.FromMilliseconds(50); + using var cancellationTokenSource = new CancellationTokenSource(); + var cancellationToken = cancellationTokenSource.Token; + + async Task extendLocked(CancellationToken token) + { + await Task.Delay(TimeSpan.FromMilliseconds(10), token); // Simulate some work + extensionCount++; + } + + // Act + using (var locker = KeepLocker.KeepLocked(lockExpiration, extendLocked, cancellationToken: cancellationToken)) + { + await Task.Delay(200); // Give it some time to run + cancellationTokenSource.Cancel(); + } + + // Assert + Assert.True(extensionCount > 0, "The lock should have been extended at least once."); + } + + [Fact] + public async Task KeepLocked_BlockImmediately_ExtendsLockImmediately() + { + // Arrange + var extensionCount = 0; + var lockExpiration = TimeSpan.FromMilliseconds(50); + using var cancellationTokenSource = new CancellationTokenSource(); + var cancellationToken = cancellationTokenSource.Token; + + async Task extendLocked(CancellationToken token) + { + await Task.Delay(TimeSpan.FromMilliseconds(10), token); // Simulate some work + extensionCount++; + } + + // Act + using (var locker = KeepLocker.KeepLocked(lockExpiration, extendLocked, blockImmediately: true, cancellationToken: cancellationToken)) + { + await Task.Delay(100); // Give it some time to run + cancellationTokenSource.Cancel(); + } + + // Assert + Assert.True(extensionCount > 0, "The lock should have been extended immediately."); + } + + [Fact] + public async Task Dispose_ReleasesResources() + { + // Arrange + var extensionCount = 0; + var lockExpiration = TimeSpan.FromMilliseconds(50); + using var cancellationTokenSource = new CancellationTokenSource(); + var cancellationToken = cancellationTokenSource.Token; + + async Task extendLocked(CancellationToken token) + { + await Task.Delay(TimeSpan.FromMilliseconds(10), token); // Simulate some work + extensionCount++; + } + + // Act + var locker = KeepLocker.KeepLocked(lockExpiration, extendLocked, cancellationToken: cancellationToken); + + await Task.Delay(100); + + locker.Dispose(); + + + var expected = extensionCount; + // Assert + Assert.True(extensionCount > 0, "The lock should have been extended immediately."); + + await Task.Delay(100); + + Assert.Equal(expected, extensionCount); + } +} \ No newline at end of file diff --git a/src/Tests/SaTests/Classes/RetryTests.cs b/src/Tests/SaTests/Classes/RetryTests.cs new file mode 100644 index 0000000..7205a9c --- /dev/null +++ b/src/Tests/SaTests/Classes/RetryTests.cs @@ -0,0 +1,146 @@ +using Sa.Classes; + +namespace SaTests.Classes; + + +public class RetryTests +{ + [Fact] + public async Task Constant_Retry_Succeeds_After_2_Attempts() + { + // Arrange + int attemptCount = 0; + ValueTask func(int input, CancellationToken token) + { + attemptCount++; + if (attemptCount < 3) + { + throw new Exception("Simulated failure"); + } + return new ValueTask(input); + } + + // Act + int result = await Retry.Constant(func, 42, retryCount: 3, waitTime: 10); + + // Assert + Assert.Equal(42, result); + Assert.Equal(3, attemptCount); + } + + [Fact] + public async Task Exponential_Retry_Succeeds_After_2_Attempts() + { + // Arrange + int attemptCount = 0; + ValueTask func(int input, CancellationToken token) + { + attemptCount++; + if (attemptCount < 3) + { + throw new Exception("Simulated failure"); + } + return new ValueTask(input); + } + + // Act + int result = await Retry.Exponential(func, 42, retryCount: 3, initialDelay: 10); + + // Assert + Assert.Equal(42, result); + Assert.Equal(3, attemptCount); + } + + [Fact] + public async Task Linear_Retry_Succeeds_After_2_Attempts() + { + // Arrange + int attemptCount = 0; + ValueTask func(int input, CancellationToken token) + { + attemptCount++; + if (attemptCount < 3) + { + throw new Exception("Simulated failure"); + } + return new ValueTask(input); + } + + // Act + int result = await Retry.Linear(func, 42, retryCount: 3, initialDelay: 10); + + // Assert + Assert.Equal(42, result); + Assert.Equal(3, attemptCount); + } + + [Fact] + public async Task DecorrelatedJitter_Retry_Succeeds_After_2_Attempts() + { + // Arrange + int attemptCount = 0; + ValueTask func(int input, CancellationToken token) + { + attemptCount++; + if (attemptCount < 3) + { + throw new Exception("Simulated failure"); + } + return new ValueTask(input); + } + + // Act + int result = await Retry.Jitter(func, 42, retryCount: 3, initialDelay: 10); + + // Assert + Assert.Equal(42, result); + Assert.Equal(3, attemptCount); + } + + [Fact] + public async Task Retry_Throws_Original_Exception_After_Max_Retries() + { + // Arrange + int attemptCount = 0; + ValueTask func(int input, CancellationToken token) + { + attemptCount++; + throw new Exception("Simulated failure"); + } + + // Act and Assert + await Assert.ThrowsAsync(() => Retry.Constant(func, 42, retryCount: 3, waitTime: 10).AsTask()); + Assert.Equal(3, attemptCount); + } + + [Fact] + public async Task Retry_Cancels_After_CancellationToken_Is_Cancelled() + { + // Arrange + int attemptCount = 0; + async ValueTask func(int input, CancellationToken token) + { + if (token.IsCancellationRequested) + { + return 111; + } + + attemptCount++; + await Task.Delay(100, CancellationToken.None); + throw new Exception("Simulated failure"); + } + + using CancellationTokenSource cts = new(); + + _ = Task.Run(async () => + { + await Task.Delay(200); + cts.Cancel(); + }); + + int result = await Retry.Constant(func, 42, retryCount: 3, waitTime: 10, cancellationToken: cts.Token); + + Assert.True(attemptCount > 0); + Assert.Equal(111, result); + } +} diff --git a/src/Tests/SaTests/Classes/StrOrNumTests.cs b/src/Tests/SaTests/Classes/StrOrNumTests.cs new file mode 100644 index 0000000..77db8b4 --- /dev/null +++ b/src/Tests/SaTests/Classes/StrOrNumTests.cs @@ -0,0 +1,32 @@ +using Sa.Classes; +using Sa.Extensions; + +namespace SaTests.Classes; + +public class StrOrNumTests +{ + [Fact] + public void StrOrNum_Mustbe_Serialize_json() + { + StrOrNum expected = "{Hi}\""; + + string json = expected.ToJson(); + + Assert.NotEmpty(json); + + StrOrNum actual = json.FromJson()!; + + Assert.Equal(expected, actual); + + + expected = 123; + + json = expected.ToJson(); + + Assert.NotEmpty(json); + + actual = json.FromJson()!; + + Assert.Equal(expected, actual); + } +} \ No newline at end of file diff --git a/src/Tests/SaTests/Classes/WorkQueueTests.cs b/src/Tests/SaTests/Classes/WorkQueueTests.cs new file mode 100644 index 0000000..0e312a7 --- /dev/null +++ b/src/Tests/SaTests/Classes/WorkQueueTests.cs @@ -0,0 +1,158 @@ +using Sa.Classes; + +namespace SaTests.Classes; + +public class WorkQueueTests +{ + public class SampleModel + { + private int _count; + + public int Count => _count; + + public Exception? Exception { get; set; } + + public void IncCount() => _count++; + } + + + internal class SampleWork : IWork + { + public async Task Execute(SampleModel model, CancellationToken cancellationToken) + { + model.IncCount(); + await Task.Delay(100, cancellationToken); + } + } + + + internal class SampleFailWork : IWorkWithHandleError + { + public Task Execute(SampleModel model, CancellationToken cancellationToken) + { + throw new Exception("test error"); + } + + public Task HandelError(Exception exception, SampleModel model, CancellationToken cancellationToken) + { + model.Exception = exception; + return Task.CompletedTask; + } + } + + + [Fact] + public async Task Schedule_ExecutesWorkSuccessfully() + { + var workService = new WorkQueue(); + var model = new SampleModel { }; + var work = new SampleWork { }; + + // Act + workService.Enqueue(model, work); + + // Wait for a short time to allow the work to be processed + await Task.Delay(200); + + // Stop the work service + await workService.Stop(model); + + // Assert + Assert.Equal(1, model.Count); + + await workService.DisposeAsync(); + } + + [Fact] + public async Task CancelledWork_MustBeStopped() + { + // Arrange + var workService = new WorkQueue(); + var model = new SampleModel { }; + var work = new SampleWork { }; + using var cancellationTokenSource = new CancellationTokenSource(); + + // Act + workService.Enqueue(model, work, cancellationTokenSource.Token); + + // Cancel the token before the work is completed + await cancellationTokenSource.CancelAsync(); + + // Wait for a short time to allow the work to be processed + await Task.Delay(200); + + // Stop the work service + await workService.Stop(model); + await workService.Stop(model); + + // Assert + Assert.Equal(0, model.Count); + + await workService.DisposeAsync(); + } + + + [Fact] + public async Task MultiWork_ExecutesWorkSuccessfully() + { + // Arrange + var workService = new WorkQueue(); + + var model = new SampleModel { }; + + int excepted = 100; + + for (int i = 0; i < excepted; i++) + { + var work = new SampleWork { }; + + // Act + workService + .Enqueue(model, work) + ; + } + + // Wait for a short time to allow the work to be processed + await workService.DisposeAsync(); + + // Assert + Assert.Equal(excepted, model.Count); + + } + + + [Fact] + public async Task WaitingEndedWork_AfterDispose() + { + // Arrange + var workService = new WorkQueue(); + var model = new SampleModel { }; + var work = new SampleWork { }; + using var cancellationTokenSource = new CancellationTokenSource(); + + // Act + workService.Enqueue(model, work, cancellationTokenSource.Token); + await workService.DisposeAsync(); + + // Assert + Assert.Equal(1, model.Count); + } + + + [Fact] + public async Task FailWork_MustBeHandled() + { + // Arrange + var workService = new WorkQueue(); + var model = new SampleModel { }; + var work = new SampleFailWork { }; + + // Act + workService.Enqueue(model, work); + + await workService.DisposeAsync(); + + // Assert + Assert.NotNull(model.Exception); + } +} \ No newline at end of file diff --git a/src/Tests/SaTests/SaTests.csproj b/src/Tests/SaTests/SaTests.csproj new file mode 100644 index 0000000..0334a33 --- /dev/null +++ b/src/Tests/SaTests/SaTests.csproj @@ -0,0 +1,13 @@ + + + + + + true + + + + + + + \ No newline at end of file