зеркало из
1
0
Форкнуть 0
This commit is contained in:
Aaron Stannard 2015-06-25 20:34:58 -07:00
Родитель 368a92694f
Коммит 1e14d3f31a
30 изменённых файлов: 2426 добавлений и 2 удалений

11
.editorconfig Normal file
Просмотреть файл

@ -0,0 +1,11 @@
; This file is for unifying the coding style for different editors and IDEs.
; More information at http://EditorConfig.org
root = true
[*]
end_of_line = CRLF
[*.cs]
indent_style = space
indent_size = 4

26
.gitattributes поставляемый Normal file
Просмотреть файл

@ -0,0 +1,26 @@
# Auto detect text files and perform LF normalization
* text=auto
# Custom for Visual Studio
*.cs diff=csharp
*.sln merge=union
*.csproj merge=union
*.vbproj merge=union
*.fsproj merge=union
*.dbproj merge=union
# Standard to msysgit
*.doc diff=astextplain
*.DOC diff=astextplain
*.docx diff=astextplain
*.DOCX diff=astextplain
*.dot diff=astextplain
*.DOT diff=astextplain
*.pdf diff=astextplain
*.PDF diff=astextplain
*.rtf diff=astextplain
*.RTF diff=astextplain
# Needed for Mono build shell script
*.sh -text eol=lf

213
.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1,213 @@
# Akka
# Fake directories
src/.build/**
#GitExtensions
us.stackdump
#KDiff3 and other git merge tools
*.orig
#-------------------------------------------------------------------------------
#Based on https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
# User-specific files
*.suo
*.user
*.sln.docstates
#MonoDevelop
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
build/
bld/
[Bb]in/
[Oo]bj/
# Roslyn cache directories
*.ide/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
#NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.pch
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opensdf
*.sdf
*.cachefile
# Visual Studio profiler
*.psess
*.vsp
*.vspx
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding addin-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# NCrunch
_NCrunch_*
.*crunch*.local.xml
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# TODO: Comment out the next line if you want to keep your passwords hidden
*.pubxml
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/packages/*
# except build/, which is used as an MSBuild target.
!**/packages/build/
# If using the old MSBuild-Integrated Package Restore, uncomment this:
!**/packages/repositories.config
# Windows Azure Build Output
csx/
*.build.csdef
# Windows Store app package directory
AppPackages/
# Others
sql/
*.Cache
ClientBin/
[Ss]tyle[Cc]op.*
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.pfx
*.publishsettings
node_modules/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
# SQL Server files
*.mdf
*.ldf
# make exception for Akka.Persistence.SqlServer database file
!AkkaPersistenceSqlServerSpecDb.mdf
!AkkaPersistenceSqlServerSpecDb_log.ldf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
# Microsoft Fakes
FakesAssemblies/
/src/.Akka.boltdata/NCover/Executions/0.jf
/src/.Akka.boltdata/NCover/Executions/ProjectId/0.jf
/src/.Akka.boltdata/NCover/Executions/ProjectOrderIndex/0.jf
/src/.Akka.boltdata/NCover/Projects/0.jf
/src/.Akka.boltdata/NCover/Projects/Name/0.jf
/src/.Akka.boltdata/Settings.json
/src/.Akka.boltdata/TestResults.json
resetdev.bat
/src/packages/repositories.config

136
CONTRIBUTING.md Normal file
Просмотреть файл

@ -0,0 +1,136 @@
# Contributing to Akka.NET
Akka.NET is a large project and contributions are more than welcome, so thank you for wanting to contribute to Akka.NET!
---
### Checklist before creating a Pull Request
Submit only relevant commits. We don't mind many commits in a pull request, but they must be relevant as explained below.
- __Use a feature branch__ The pull request should be created from a feature branch, and not from _dev_. See below for why.
- __No merge-commits__
If you have commits that looks like this _"Merge branch 'my-branch' into dev"_ or _"Merge branch 'dev' of github .com/akkadotnet/akka.net into dev"_ you're probaly using merge instead of [rebase](https://help.github.com/articles/about-git-rebase) locally. See below on _Handling updates from upstream_.
- __Squash commits__ Often we create temporary commits like _"Started implementing feature x"_ and then _"Did a bit more on feature x"_. Squash these commits together using [interactive rebase](https://help.github.com/articles/about-git-rebase). Also see [Squashing commits with rebase](http://gitready.com/advanced/2009/02/10/squashing-commits-with-rebase.html).
- __Descriptive commit messages__ If a commit's message isn't descriptive, change it using [interactive rebase](https://help.github.com/articles/about-git-rebase). Refer to issues using `#issue`. Example of a bad message ~~"Small cleanup"~~. Example of good message: _"Removed Security.Claims header from FSM, which broke Mono build per #62"_. Don't be afraid to write long messages, if needed. Try to explain _why_ you've done the changes. The Erlang repo has some info on [writing good commit messages](https://github.com/erlang/otp/wiki/Writing-good-commit-messages).
- __No one-commit-to-rule-them-all__ Large commits that changes too many things at the same time are very hard to review. Split large commits into smaller. See this [StackOverflow question](http://stackoverflow.com/questions/6217156/break-a-previous-commit-into-multiple-commits) for information on how to do this.
- __Tests__ Add relevant tests and make sure all existing ones still passes. Tests can be run using the command
- __No Warnings__ Make sure your code do not produce any build warnings.
After reviewing a Pull request, we might ask you to fix some commits. After you've done that you need to force push to update your branch in your local fork.
####Title and Description for the Pull Request####
Give the PR a descriptive title and in the description field describe what you have done in general terms and why. This will help the reviewers greatly, and provide a history for the future.
Especially if you modify something existing, be very clear! Have you changed any algorithms, or did you just intend to reorder the code? Justify why the changes are needed.
---
### Getting started
Make sure you have a [GitHub](https://github.com/) account.
- Fork, clone, add upstream to the Akka.NET repository. See [Fork a repo](https://help.github.com/articles/fork-a-repo) for more detailed instructions or follow the instructions below.
- Fork by clicking _Fork_ on https://github.com/akkadotnet/akka.net
- Clone your fork locally.
```
git clone https://github.com/YOUR-USERNAME/akka.net
```
- Add an upstream remote.
```
git remote add upstream https://github.com/akkadotnet/akka.net
```
You now have two remotes: _upstream_ points to https://github.com/akkadotnet/akka.net, and _origin_ points to your fork on GitHub.
- Make changes. See below.
Unsure where to start? Issues marked with [_up for grabs_](https://github.com/akkadotnet/akka.net/labels/up%20for%20grabs) are things we want help with.
See also: [Contributing to Open Source on GitHub](https://guides.github.com/activities/contributing-to-open-source/)
New to Git? See https://help.github.com/articles/what-are-other-good-resources-for-learning-git-and-github
### Making changes
__Never__ work directly on _dev_ or _master_ and you should never send a pull request from master - always from a feature branch created by you.
- Pick an [issue](https://github.com/akkadotnet/akka.net/issues). If no issue exists (search first) create one.
- Get any changes from _upstream_.
```
git checkout dev
git fetch upstream
git merge --ff-only upstream/dev
git push origin dev #(optional) this makes sure dev in your own fork on GitHub is up to date
```
See https://help.github.com/articles/fetching-a-remote for more info
- Create a new feature branch. It's important that you do your work on your own branch and that it's created off of _dev_. Tip: Give it a descriptive name and include the issue number, e.g. `implement-testkits-eventfilter-323` or `295-implement-tailchopping-router`, so that others can see what is being worked on.
```
git checkout -b my-new-branch-123
```
- Work on your feature. Commit.
- Rebase often, see below.
- Make sure you adhere to _Checklist before creating a Pull Request_ described above.
- Push the branch to your fork on GitHub
```
git push origin my-new-branch-123
```
- Send a Pull Request, see https://help.github.com/articles/using-pull-requests to the _dev_ branch.
See also: [Understanding the GitHub Flow](https://guides.github.com/introduction/flow/) (we're using `dev` as our master branch)
### Handling updates from upstream
While you're working away in your branch it's quite possible that your upstream _dev_ may be updated. If this happens you should:
- [Stash](http://git-scm.com/book/en/Git-Tools-Stashing) any un-committed changes you need to
```
git stash
```
- Update your local _dev_ by fetching from _upstream_
```
git checkout dev
git fetch upstream
git merge --ff-only upstream/dev
```
- Rebase your feature branch on _dev_. See [Git Branching - Rebasing](http://git-scm.com/book/en/Git-Branching-Rebasing) for more info on rebasing
```
git checkout my-new-branch-123
git rebase dev
git push origin dev #(optional) this makes sure dev in your own fork on GitHub is up to date
```
This ensures that your history is "clean" i.e. you have one branch off from _dev_ followed by your changes in a straight line. Failing to do this ends up with several "messy" merges in your history, which we don't want. This is the reason why you should always work in a branch and you should never be working in, or sending pull requests from _dev_.
If you're working on a long running feature then you may want to do this quite often, rather than run the risk of potential merge issues further down the line.
### Making changes to a Pull request
If you realize you've missed something after submitting a Pull request, just commit to your local branch and push the branch just like you did the first time. This commit will automatically be included in the Pull request.
If we ask you to change already published commits using interactive rebase (like squashing or splitting commits or rewriting commit messages) you need to force push using `-f`:
```
git push -f origin my-new-branch-123
```
### All my commits are on dev. How do I get them to a new branch? ###
If all commits are on _dev_ you need to move them to a new feature branch.
You can rebase your local _dev_ on _upstream/dev_ (to remove any merge commits), rename it, and recreate _dev_
```
git checkout dev
git rebase upstream/dev
git branch -m my-new-branch-123
git branch dev upstream/dev
```
Or you can create a new branch off of _dev_ and then cherry pick the commits
```
git checkout -b my-new-branch-123 upstream/dev
git cherry-pick rev #rev is the revisions you want to pick
git cherry-pick rev #repeat until you have picked all commits
git branch -m dev old-dev #rename dev
git branch dev upstream/dev #create a new dev
```
## Code guidelines
See [Contributor Guidelines](http://akkadotnet.github.io/wiki/Contributor%20guidelines) on the wiki.
---
Props to [NancyFX](https://github.com/NancyFx/Nancy) from which we've "borrowed" some of this text.

Просмотреть файл

@ -1,2 +1,88 @@
# Akka.Persistence.PostgreSql
Akka.Persistence.PostgreSql provider
## Akka.Persistence.PostgreSql
Akka Persistence journal and snapshot store backed by PostgreSql database.
**WARNING: Akka.Persistence.PostgreSql plugin is still in beta and it's mechanics described below may be still subject to change**.
### Setup
To activate the journal plugin, add the following lines to actor system configuration file:
```
akka.persistence.journal.plugin = "akka.persistence.journal.postgresql"
akka.persistence.journal.postgresql.connection-string = "<database connection string>"
```
Similar configuration may be used to setup a PostgreSql snapshot store:
```
akka.persistence.snasphot-store.plugin = "akka.persistence.snasphot-store.postgresql"
akka.persistence.snasphot-store.postgresql.connection-string = "<database connection string>"
```
Remember that connection string must be provided separately to Journal and Snapshot Store. To finish setup simply initialize plugin using: `PostgreSqlPersistence.Init(actorSystem);`
### Configuration
Both journal and snapshot store share the same configuration keys (however they resides in separate scopes, so they are definied distinctly for either journal or snapshot store):
- `class` (string with fully qualified type name) - determines class to be used as a persistent journal. Default: *Akka.Persistence.PostgreSql.Journal.PostgreSqlJournal, Akka.Persistence.PostgreSql* (for journal) and *Akka.Persistence.PostgreSql.Snapshot.PostgreSqlSnapshotStore, Akka.Persistence.PostgreSql* (for snapshot store).
- `plugin-dispatcher` (string with configuration path) - describes a message dispatcher for persistent journal. Default: *akka.actor.default-dispatcher*
- `connection-string` - connection string used to access PostgreSql database. Default: *none*.
- `connection-timeout` - timespan determining default connection timeouts on database-related operations. Default: *30s*
- `schema-name` - name of the database schema, where journal or snapshot store tables should be placed. Default: *public*
- `table-name` - name of the table used by either journal or snapshot store. Default: *event_journal* (for journal) or *snapshot_store* (for snapshot store)
- `auto-initialize` - flag determining if journal or snapshot store related tables should by automatically created when they have not been found in connected database. Default: *false*
### Custom SQL data queries
PostgreSql persistence plugin defines a default table schema used for both journal and snapshot store.
**EventJournal table**:
+----------------+-------------+------------+---------------+---------+
| persistence_id | sequence_nr | is_deleted | payload_type | payload |
+----------------+-------------+------------+---------------+---------+
| varchar(200) | bigint | boolean | varchar(500) | bytea |
+----------------+-------------+------------+---------------+---------+
**SnapshotStore table**:
+----------------+--------------+--------------------------+------------------+---------------+----------+
| persistence_id | sequence_nr | created_at | created_at_ticks | snapshot_type | snapshot |
+----------------+--------------+--------------------------+------------------+--------------------------+
| varchar(200) | bigint | timestamp with time zone | smallint | varchar(500) | bytea |
+----------------+--------------+--------------------------+------------------+--------------------------+
**created_at and created_at_ticks - The max precision of a PostgreSQL timestamp is 6. The max precision of a .Net DateTime object is 7. Because of this differences, the additional ticks are saved in a separate column and combined during deserialization. There is also a check constraint restricting created_at_ticks to the range [0,10) to ensure that there are no precision differences in the opposite direction.**
Underneath Akka.Persistence.PostgreSql uses the Npgsql library to communicate with the database. You may choose not to use a dedicated built in ones, but to create your own being better fit for your use case. To do so, you have to create your own versions of `IJournalQueryBuilder` and `IJournalQueryMapper` (for custom journals) or `ISnapshotQueryBuilder` and `ISnapshotQueryMapper` (for custom snapshot store) and then attach inside journal, just like in the example below:
```csharp
class MyCustomPostgreSqlJournal: Akka.Persistence.PostgreSql.Journal.PostgreSqlJournal
{
public MyCustomPostgreSqlJournal() : base()
{
QueryBuilder = new MyCustomJournalQueryBuilder();
QueryMapper = new MyCustomJournalQueryMapper();
}
}
```
The final step is to setup your custom journal using akka config:
```
akka.persistence.journal.postgresql.class = "MyModule.MyCustomPostgreSqlJournal, MyModule"
```
### Tests
The PostgreSql tests are packaged as a separate build task with a target of "RunPostgreSqlTests".
In order to run the tests, you must do the following things:
1. Download and install PostgreSql from: http://www.postgresql.org/download/
2. Install PostgreSql with the default settings. The default connection string uses the following credentials:
1. Username: postgres
2. Password: postgres
3. A custom app.config file can be used and needs to be placed in the same folder as the dll

27
RELEASE_NOTES.md Normal file
Просмотреть файл

@ -0,0 +1,27 @@
#### 1.0.4 June 12 2015 ####
#### 1.0.3 June 12 2015 ####
**Bugfix release for Akka.NET v1.0.2.**
This release addresses an issue with Akka.Persistence.SqlServer and Akka.Persistence.PostgreSql where both packages were missing a reference to Akka.Persistence.Sql.Common.
In Akka.NET v1.0.3 we've packaged Akka.Persistence.Sql.Common into its own NuGet package and referenced it in the affected packages.
#### 1.0.2 June 2 2015
Initial Release of Akka.Persistence.PostgreSql
Fixes & Changes - Akka.Persistence
* [Renamed GuaranteedDelivery classes to AtLeastOnceDelivery](https://github.com/akkadotnet/akka.net/pull/984)
* [Changes in Akka.Persistence SQL backend](https://github.com/akkadotnet/akka.net/pull/963)
* [PostgreSQL persistence plugin for both event journal and snapshot store](https://github.com/akkadotnet/akka.net/pull/971)
* [Cassandra persistence plugin](https://github.com/akkadotnet/akka.net/pull/995)
**New Features:**
**Akka.Persistence.PostgreSql** and **Akka.Persistence.Cassandra**
Akka.Persistence now has two additional concrete implementations for PostgreSQL and Cassandra! You can install either of the packages using the following commandline:
[Akka.Persistence.PostgreSql Configuration Docs](https://github.com/akkadotnet/akka.net/tree/dev/src/contrib/persistence/Akka.Persistence.PostgreSql)
```
PM> Install-Package Akka.Persistence.PostgreSql
```

20
build.cmd Normal file
Просмотреть файл

@ -0,0 +1,20 @@
@echo off
pushd %~dp0
src\.nuget\NuGet.exe update -self
src\.nuget\NuGet.exe install FAKE -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages -ExcludeVersion -Version 3.28.8
src\.nuget\NuGet.exe install xunit.runner.console -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages\FAKE -ExcludeVersion -Version 2.0.0
src\.nuget\NuGet.exe install nunit.runners -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages\FAKE -ExcludeVersion -Version 2.6.4
if not exist src\packages\SourceLink.Fake\tools\SourceLink.fsx (
src\.nuget\nuget.exe install SourceLink.Fake -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages -ExcludeVersion
)
rem cls
set encoding=utf-8
src\packages\FAKE\tools\FAKE.exe build.fsx %*
popd

575
build.fsx Normal file
Просмотреть файл

@ -0,0 +1,575 @@
#I @"src/packages/FAKE/tools"
#r "FakeLib.dll"
#r "System.Xml.Linq"
open System
open System.IO
open System.Text
open Fake
open Fake.FileUtils
open Fake.MSTest
open Fake.NUnitCommon
open Fake.TaskRunnerHelper
open Fake.ProcessHelper
cd __SOURCE_DIRECTORY__
//--------------------------------------------------------------------------------
// Information about the project for Nuget and Assembly info files
//--------------------------------------------------------------------------------
let product = "Akka.NET"
let authors = [ "Akka.NET Team" ]
let copyright = "Copyright © 2013-2015 Akka.NET Team"
let company = "Akka.NET Team"
let description = "Akka.NET is a port of the popular Java/Scala framework Akka to .NET"
let tags = ["akka";"actors";"actor";"model";"Akka";"concurrency"]
let configuration = "Release"
let toolDir = "tools"
let CloudCopyDir = toolDir @@ "CloudCopy"
let AzCopyDir = toolDir @@ "AzCopy"
// Read release notes and version
let parsedRelease =
File.ReadLines "RELEASE_NOTES.md"
|> ReleaseNotesHelper.parseReleaseNotes
let envBuildNumber = System.Environment.GetEnvironmentVariable("BUILD_NUMBER")
let buildNumber = if String.IsNullOrWhiteSpace(envBuildNumber) then "0" else envBuildNumber
let version = parsedRelease.AssemblyVersion + "." + buildNumber
let preReleaseVersion = version + "-beta"
let isUnstableDocs = hasBuildParam "unstable"
let isPreRelease = hasBuildParam "nugetprerelease"
let release = if isPreRelease then ReleaseNotesHelper.ReleaseNotes.New(version, version + "-beta", parsedRelease.Notes) else parsedRelease
printfn "Assembly version: %s\nNuget version; %s\n" release.AssemblyVersion release.NugetVersion
//--------------------------------------------------------------------------------
// Directories
let binDir = "bin"
let testOutput = "TestResults"
let nugetDir = binDir @@ "nuget"
let workingDir = binDir @@ "build"
let libDir = workingDir @@ @"lib\net45\"
let nugetExe = FullName @"src\.nuget\NuGet.exe"
let docDir = "bin" @@ "doc"
open Fake.RestorePackageHelper
Target "RestorePackages" (fun _ ->
"./src/Akka.sln"
|> RestoreMSSolutionPackages (fun p ->
{ p with
OutputPath = "./src/packages"
Retries = 4 })
)
//--------------------------------------------------------------------------------
// Clean build results
Target "Clean" <| fun _ ->
DeleteDir binDir
//--------------------------------------------------------------------------------
// Generate AssemblyInfo files with the version for release notes
open AssemblyInfoFile
Target "AssemblyInfo" <| fun _ ->
CreateCSharpAssemblyInfoWithConfig "src/SharedAssemblyInfo.cs" [
Attribute.Company company
Attribute.Copyright copyright
Attribute.Trademark ""
Attribute.Version version
Attribute.FileVersion version ] <| AssemblyInfoFileConfig(false)
for file in !! "src/**/AssemblyInfo.fs" do
let title =
file
|> Path.GetDirectoryName
|> Path.GetDirectoryName
|> Path.GetFileName
CreateFSharpAssemblyInfo file [
Attribute.Title title
Attribute.Product product
Attribute.Description description
Attribute.Copyright copyright
Attribute.Company company
Attribute.ComVisible false
Attribute.CLSCompliant true
Attribute.Version version
Attribute.FileVersion version ]
//--------------------------------------------------------------------------------
// Build the solution
Target "Build" <| fun _ ->
!!"src/Akka.sln"
|> MSBuildRelease "" "Rebuild"
|> ignore
Target "BuildMono" <| fun _ ->
!!"src/Akka.sln"
|> MSBuild "" "Rebuild" [("Configuration","Release Mono")]
|> ignore
//--------------------------------------------------------------------------------
// Build the docs
Target "Docs" <| fun _ ->
!! "documentation/akkadoc.shfbproj"
|> MSBuildRelease "" "Rebuild"
|> ignore
//--------------------------------------------------------------------------------
// Push DOCs content to Windows Azure blob storage
Target "AzureDocsDeploy" (fun _ ->
let rec pushToAzure docDir azureUrl container azureKey trialsLeft =
let tracing = enableProcessTracing
enableProcessTracing <- false
let arguments = sprintf "/Source:%s /Dest:%s /DestKey:%s /S /Y /SetContentType" (Path.GetFullPath docDir) (azureUrl @@ container) azureKey
tracefn "Pushing docs to %s. Attempts left: %d" (azureUrl) trialsLeft
try
let result = ExecProcess(fun info ->
info.FileName <- AzCopyDir @@ "AzCopy.exe"
info.Arguments <- arguments) (TimeSpan.FromMinutes 120.0) //takes a very long time to upload
if result <> 0 then failwithf "Error during AzCopy.exe upload to azure."
with exn ->
if (trialsLeft > 0) then (pushToAzure docDir azureUrl container azureKey (trialsLeft-1))
else raise exn
let canPush = hasBuildParam "azureKey" && hasBuildParam "azureUrl"
if (canPush) then
printfn "Uploading API docs to Azure..."
let azureUrl = getBuildParam "azureUrl"
let azureKey = (getBuildParam "azureKey") + "==" //hack, because it looks like FAKE arg parsing chops off the "==" that gets tacked onto the end of each Azure storage key
if(isUnstableDocs) then
pushToAzure docDir azureUrl "unstable" azureKey 3
if(not isUnstableDocs) then
pushToAzure docDir azureUrl "stable" azureKey 3
pushToAzure docDir azureUrl release.NugetVersion azureKey 3
if(not canPush) then
printfn "Missing required paraments to push docs to Azure. Run build HelpDocs to find out!"
)
Target "PublishDocs" DoNothing
//--------------------------------------------------------------------------------
// Copy the build output to bin directory
//--------------------------------------------------------------------------------
Target "CopyOutput" <| fun _ ->
let copyOutput project =
let src = "src" @@ project @@ @"bin/Release/"
let dst = binDir @@ project
CopyDir dst src allFiles
[ "core/Akka"
"core/Akka.FSharp"
"core/Akka.TestKit"
"core/Akka.Remote"
"core/Akka.Remote.TestKit"
"core/Akka.Cluster"
"core/Akka.MultiNodeTestRunner"
"core/Akka.Persistence"
"core/Akka.Persistence.FSharp"
"core/Akka.Persistence.TestKit"
"contrib/loggers/Akka.Logger.slf4net"
"contrib/loggers/Akka.Logger.NLog"
"contrib/loggers/Akka.Logger.Serilog"
"contrib/dependencyinjection/Akka.DI.Core"
"contrib/dependencyinjection/Akka.DI.AutoFac"
"contrib/dependencyinjection/Akka.DI.CastleWindsor"
"contrib/dependencyinjection/Akka.DI.Ninject"
"contrib/testkits/Akka.TestKit.Xunit"
"contrib/testkits/Akka.TestKit.NUnit"
"contrib/testkits/Akka.TestKit.Xunit2"
]
|> List.iter copyOutput
Target "BuildRelease" DoNothing
//--------------------------------------------------------------------------------
// Tests targets
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
// Clean test output
Target "CleanTests" <| fun _ ->
DeleteDir testOutput
//--------------------------------------------------------------------------------
// Run tests
open XUnit2Helper
Target "RunTests" <| fun _ ->
let msTestAssemblies = !! "src/**/bin/Release/Akka.TestKit.VsTest.Tests.dll"
let nunitTestAssemblies = !! "src/**/bin/Release/Akka.TestKit.NUnit.Tests.dll"
let xunitTestAssemblies = !! "src/**/bin/Release/*.Tests.dll" --
"src/**/bin/Release/Akka.TestKit.VsTest.Tests.dll" --
"src/**/bin/Release/Akka.TestKit.NUnit.Tests.dll" --
"src/**/bin/Release/Akka.Persistence.SqlServer.Tests.dll" --
"src/**/bin/Release/Akka.Persistence.PostgreSql.Tests.dll" --
"src/**/bin/Release/Akka.Persistence.Cassandra.Tests.dll"
mkdir testOutput
MSTest (fun p -> p) msTestAssemblies
nunitTestAssemblies
|> NUnit (fun p ->
{p with
DisableShadowCopy = true;
OutputFile = testOutput + @"\NUnitTestResults.xml"})
let xunitToolPath = findToolInSubPath "xunit.console.exe" "src/packages/xunit.runner.console*/tools"
printfn "Using XUnit runner: %s" xunitToolPath
xUnit2
(fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath })
xunitTestAssemblies
Target "RunTestsMono" <| fun _ ->
let xunitTestAssemblies = !! "src/**/bin/Release Mono/*.Tests.dll"
mkdir testOutput
let xunitToolPath = findToolInSubPath "xunit.console.exe" "src/packages/xunit.runner.console*/tools"
printfn "Using XUnit runner: %s" xunitToolPath
xUnit2
(fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath })
xunitTestAssemblies
Target "MultiNodeTests" <| fun _ ->
let multiNodeTestPath = findToolInSubPath "Akka.MultiNodeTestRunner.exe" "bin/core/Akka.MultiNodeTestRunner*"
printfn "Using MultiNodeTestRunner: %s" multiNodeTestPath
let spec = getBuildParam "spec"
let args = new StringBuilder()
|> append "Akka.MultiNodeTests.dll"
|> append "-Dmultinode.enable-filesink=on"
|> appendIfNotNullOrEmpty spec "-Dmultinode.test-spec="
|> toText
let result = ExecProcess(fun info ->
info.FileName <- multiNodeTestPath
info.WorkingDirectory <- (Path.GetDirectoryName (FullName multiNodeTestPath))
info.Arguments <- args) (System.TimeSpan.FromMinutes 60.0) (* This is a VERY long running task. *)
if result <> 0 then failwithf "MultiNodeTestRunner failed. %s %s" multiNodeTestPath args
Target "RunSqlServerTests" <| fun _ ->
let sqlServerTests = !! "src/**/bin/Release/Akka.Persistence.SqlServer.Tests.dll"
let xunitToolPath = findToolInSubPath "xunit.console.exe" "src/packages/xunit.runner.console*/tools"
printfn "Using XUnit runner: %s" xunitToolPath
xUnit
(fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath })
sqlServerTests
Target "RunPostgreSqlTests" <| fun _ ->
let postgreSqlTests = !! "src/**/bin/Release/Akka.Persistence.PostgreSql.Tests.dll"
let xunitToolPath = findToolInSubPath "xunit.console.exe" "src/packages/xunit.runner.console*/tools"
printfn "Using XUnit runner: %s" xunitToolPath
xUnit2
(fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath })
postgreSqlTests
Target "RunCassandraTests" <| fun _ ->
let cassandraTests = !! "src/**/bin/Release/Akka.Persistence.Cassandra.Tests.dll"
let xunitToolPath = findToolInSubPath "xunit.console.exe" "src/packages/xunit.runner.console*/tools"
printfn "Using XUnit runner: %s" xunitToolPath
xUnit2
(fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath })
cassandraTests
//--------------------------------------------------------------------------------
// Nuget targets
//--------------------------------------------------------------------------------
module Nuget =
// add Akka dependency for other projects
let getAkkaDependency project =
match project with
| "Akka" -> []
| "Akka.Cluster" -> ["Akka.Remote", release.NugetVersion]
| persistence when (persistence.Contains("Sql") && not (persistence.Equals("Akka.Persistence.Sql.Common"))) -> ["Akka.Persistence.Sql.Common", preReleaseVersion]
| persistence when (persistence.StartsWith("Akka.Persistence.")) -> ["Akka.Persistence", preReleaseVersion]
| di when (di.StartsWith("Akka.DI.") && not (di.EndsWith("Core"))) -> ["Akka.DI.Core", release.NugetVersion]
| testkit when testkit.StartsWith("Akka.TestKit.") -> ["Akka.TestKit", release.NugetVersion]
| _ -> ["Akka", release.NugetVersion]
// used to add -pre suffix to pre-release packages
let getProjectVersion project =
match project with
| "Akka.Cluster" -> preReleaseVersion
| persistence when persistence.StartsWith("Akka.Persistence") -> preReleaseVersion
| _ -> release.NugetVersion
open Nuget
//--------------------------------------------------------------------------------
// Clean nuget directory
Target "CleanNuget" <| fun _ ->
CleanDir nugetDir
//--------------------------------------------------------------------------------
// Pack nuget for all projects
// Publish to nuget.org if nugetkey is specified
let createNugetPackages _ =
let removeDir dir =
let del _ =
DeleteDir dir
not (directoryExists dir)
runWithRetries del 3 |> ignore
ensureDirectory nugetDir
for nuspec in !! "src/**/*.nuspec" do
printfn "Creating nuget packages for %s" nuspec
CleanDir workingDir
let project = Path.GetFileNameWithoutExtension nuspec
let projectDir = Path.GetDirectoryName nuspec
let projectFile = (!! (projectDir @@ project + ".*sproj")) |> Seq.head
let releaseDir = projectDir @@ @"bin\Release"
let packages = projectDir @@ "packages.config"
let packageDependencies = if (fileExists packages) then (getDependencies packages) else []
let dependencies = packageDependencies @ getAkkaDependency project
let releaseVersion = getProjectVersion project
let pack outputDir symbolPackage =
NuGetHelper.NuGet
(fun p ->
{ p with
Description = description
Authors = authors
Copyright = copyright
Project = project
Properties = ["Configuration", "Release"]
ReleaseNotes = release.Notes |> String.concat "\n"
Version = releaseVersion
Tags = tags |> String.concat " "
OutputPath = outputDir
WorkingDir = workingDir
SymbolPackage = symbolPackage
Dependencies = dependencies })
nuspec
// Copy dll, pdb and xml to libdir = workingDir/lib/net45/
ensureDirectory libDir
!! (releaseDir @@ project + ".dll")
++ (releaseDir @@ project + ".pdb")
++ (releaseDir @@ project + ".xml")
++ (releaseDir @@ project + ".ExternalAnnotations.xml")
|> CopyFiles libDir
// Copy all src-files (.cs and .fs files) to workingDir/src
let nugetSrcDir = workingDir @@ @"src/"
// CreateDir nugetSrcDir
let isCs = hasExt ".cs"
let isFs = hasExt ".fs"
let isAssemblyInfo f = (filename f).Contains("AssemblyInfo")
let isSrc f = (isCs f || isFs f) && not (isAssemblyInfo f)
CopyDir nugetSrcDir projectDir isSrc
//Remove workingDir/src/obj and workingDir/src/bin
removeDir (nugetSrcDir @@ "obj")
removeDir (nugetSrcDir @@ "bin")
// Create both normal nuget package and symbols nuget package.
// Uses the files we copied to workingDir and outputs to nugetdir
pack nugetDir NugetSymbolPackage.Nuspec
removeDir workingDir
let publishNugetPackages _ =
let rec publishPackage url accessKey trialsLeft packageFile =
let tracing = enableProcessTracing
enableProcessTracing <- false
let args p =
match p with
| (pack, key, "") -> sprintf "push \"%s\" %s" pack key
| (pack, key, url) -> sprintf "push \"%s\" %s -source %s" pack key url
tracefn "Pushing %s Attempts left: %d" (FullName packageFile) trialsLeft
try
let result = ExecProcess (fun info ->
info.FileName <- nugetExe
info.WorkingDirectory <- (Path.GetDirectoryName (FullName packageFile))
info.Arguments <- args (packageFile, accessKey,url)) (System.TimeSpan.FromMinutes 1.0)
enableProcessTracing <- tracing
if result <> 0 then failwithf "Error during NuGet symbol push. %s %s" nugetExe (args (packageFile, accessKey,url))
with exn ->
if (trialsLeft > 0) then (publishPackage url accessKey (trialsLeft-1) packageFile)
else raise exn
let shouldPushNugetPackages = hasBuildParam "nugetkey"
let shouldPushSymbolsPackages = (hasBuildParam "symbolspublishurl") && (hasBuildParam "symbolskey")
if (shouldPushNugetPackages || shouldPushSymbolsPackages) then
printfn "Pushing nuget packages"
if shouldPushNugetPackages then
let normalPackages=
!! (nugetDir @@ "*.nupkg")
-- (nugetDir @@ "*.symbols.nupkg") |> Seq.sortBy(fun x -> x.ToLower())
for package in normalPackages do
publishPackage (getBuildParamOrDefault "nugetpublishurl" "") (getBuildParam "nugetkey") 3 package
if shouldPushSymbolsPackages then
let symbolPackages= !! (nugetDir @@ "*.symbols.nupkg") |> Seq.sortBy(fun x -> x.ToLower())
for package in symbolPackages do
publishPackage (getBuildParam "symbolspublishurl") (getBuildParam "symbolskey") 3 package
Target "Nuget" <| fun _ ->
createNugetPackages()
publishNugetPackages()
Target "CreateNuget" <| fun _ ->
createNugetPackages()
Target "PublishNuget" <| fun _ ->
publishNugetPackages()
//--------------------------------------------------------------------------------
// Help
//--------------------------------------------------------------------------------
Target "Help" <| fun _ ->
List.iter printfn [
"usage:"
"build [target]"
""
" Targets for building:"
" * Build Builds"
" * Nuget Create and optionally publish nugets packages"
" * RunTests Runs tests"
" * All Builds, run tests, creates and optionally publish nuget packages"
""
" Other Targets"
" * Help Display this help"
" * HelpNuget Display help about creating and pushing nuget packages"
" * HelpDocs Display help about creating and pushing API docs"
""]
Target "HelpNuget" <| fun _ ->
List.iter printfn [
"usage: "
"build Nuget [nugetkey=<key> [nugetpublishurl=<url>]] "
" [symbolskey=<key> symbolspublishurl=<url>] "
" [nugetprerelease=<prefix>]"
""
"Arguments for Nuget target:"
" nugetprerelease=<prefix> Creates a pre-release package."
" The version will be version-prefix<date>"
" Example: nugetprerelease=dev =>"
" 0.6.3-dev1408191917"
""
"In order to publish a nuget package, keys must be specified."
"If a key is not specified the nuget packages will only be created on disk"
"After a build you can find them in bin/nuget"
""
"For pushing nuget packages to nuget.org and symbols to symbolsource.org"
"you need to specify nugetkey=<key>"
" build Nuget nugetKey=<key for nuget.org>"
""
"For pushing the ordinary nuget packages to another place than nuget.org specify the url"
" nugetkey=<key> nugetpublishurl=<url> "
""
"For pushing symbols packages specify:"
" symbolskey=<key> symbolspublishurl=<url> "
""
"Examples:"
" build Nuget Build nuget packages to the bin/nuget folder"
""
" build Nuget nugetprerelease=dev Build pre-release nuget packages"
""
" build Nuget nugetkey=123 Build and publish to nuget.org and symbolsource.org"
""
" build Nuget nugetprerelease=dev nugetkey=123 nugetpublishurl=http://abc"
" symbolskey=456 symbolspublishurl=http://xyz"
" Build and publish pre-release nuget packages to http://abc"
" and symbols packages to http://xyz"
""]
Target "HelpDocs" <| fun _ ->
List.iter printfn [
"usage: "
"build Docs"
"Just builds the API docs for Akka.NET locally. Does not attempt to publish."
""
"build PublishDocs azureKey=<key> "
" azureUrl=<url> "
" [unstable=true]"
""
"Arguments for PublishDocs target:"
" azureKey=<key> Azure blob storage key."
" Used to authenticate to the storage account."
""
" azureUrl=<url> Base URL for Azure storage container."
" FAKE will automatically set container"
" names based on build parameters."
""
" [unstable=true] Indicates that we'll publish to an Azure"
" container named 'unstable'. If this param"
" is not present we'll publish to containers"
" 'stable' and the 'release.version'"
""
"In order to publish documentation all of these values must be provided."
"Examples:"
" build PublishDocs azureKey=1s9HSAHA+..."
" azureUrl=http://fooaccount.blob.core.windows.net/docs"
" Build and publish docs to http://fooaccount.blob.core.windows.net/docs/stable"
" and http://fooaccount.blob.core.windows.net/docs/{release.version}"
""
" build PublishDocs azureKey=1s9HSAHA+..."
" azureUrl=http://fooaccount.blob.core.windows.net/docs"
" unstable=true"
" Build and publish docs to http://fooaccount.blob.core.windows.net/docs/unstable"
""]
//--------------------------------------------------------------------------------
// Target dependencies
//--------------------------------------------------------------------------------
// build dependencies
"Clean" ==> "AssemblyInfo" ==> "RestorePackages" ==> "Build" ==> "CopyOutput" ==> "BuildRelease"
// tests dependencies
"CleanTests" ==> "RunTests"
// nuget dependencies
"CleanNuget" ==> "CreateNuget"
"CleanNuget" ==> "BuildRelease" ==> "Nuget"
//docs dependencies
"BuildRelease" ==> "Docs" ==> "AzureDocsDeploy" ==> "PublishDocs"
Target "All" DoNothing
"BuildRelease" ==> "All"
"RunTests" ==> "All"
"MultiNodeTests" ==> "All"
"Nuget" ==> "All"
Target "AllTests" DoNothing //used for Mono builds, due to Mono 4.0 bug with FAKE / NuGet https://github.com/fsharp/fsharp/issues/427
"BuildRelease" ==> "AllTests"
"RunTests" ==> "AllTests"
"MultiNodeTests" ==> "AllTests"
"BuildRelease" ==> "RunSqlServerTests"
"BuildRelease" ==> "RunPostgreSqlTests"
"BuildRelease" ==> "RunCassandraTests"
RunTargetOrDefault "Help"

26
build.sh Normal file
Просмотреть файл

@ -0,0 +1,26 @@
#!/bin/bash
SCRIPT_PATH="${BASH_SOURCE[0]}";
if ([ -h "${SCRIPT_PATH}" ]) then
while([ -h "${SCRIPT_PATH}" ]) do SCRIPT_PATH=`readlink "${SCRIPT_PATH}"`; done
fi
pushd . > /dev/null
cd `dirname ${SCRIPT_PATH}` > /dev/null
SCRIPT_PATH=`pwd`;
popd > /dev/null
mono $SCRIPT_PATH/src/.nuget/NuGet.exe update -self
mono $SCRIPT_PATH/src/.nuget/NuGet.exe install FAKE -OutputDirectory $SCRIPT_PATH/src/packages -ExcludeVersion -Version 3.28.8
mono $SCRIPT_PATH/src/.nuget/NuGet.exe install xunit.runners -OutputDirectory $SCRIPT_PATH/src/packages/FAKE -ExcludeVersion -Version 1.9.2
mono $SCRIPT_PATH/src/.nuget/NuGet.exe install nunit.runners -OutputDirectory $SCRIPT_PATH/src/packages/FAKE -ExcludeVersion -Version 2.6.4
if ! [ -e $SCRIPT_PATH/src/packages/SourceLink.Fake/tools/SourceLink.fsx ] ; then
mono $SCRIPT_PATH/src/.nuget/NuGet.exe install SourceLink.Fake -OutputDirectory $SCRIPT_PATH/src/packages -ExcludeVersion
fi
export encoding=utf-8
mono $SCRIPT_PATH/src/packages/FAKE/tools/FAKE.exe build.fsx "$@"

Просмотреть файл

@ -0,0 +1,106 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{2D1812FD-70C0-43EE-9C25-3980E41F30E1}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Akka.Persistence.PostgreSql.Tests</RootNamespace>
<AssemblyName>Akka.Persistence.PostgreSql.Tests</AssemblyName>
<TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<SolutionDir Condition="$(SolutionDir) == '' Or $(SolutionDir) == '*Undefined*'">..\</SolutionDir>
<RestorePackages>true</RestorePackages>
<TargetFrameworkProfile />
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="Mono.Security">
<HintPath>..\..\..\packages\Npgsql.2.2.5\lib\net45\Mono.Security.dll</HintPath>
</Reference>
<Reference Include="Npgsql">
<HintPath>..\..\..\packages\Npgsql.2.2.5\lib\net45\Npgsql.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Configuration" />
<Reference Include="System.Core" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="DbUtils.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="PostgreSqlJournalSpec.cs" />
<Compile Include="PostgreSqlSnapshotStoreSpec.cs" />
</ItemGroup>
<ItemGroup>
<None Include="app.config" />
<None Include="packages.config" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\testkits\Akka.TestKit.Xunit2\Akka.TestKit.Xunit2.csproj">
<Project>{7dbd5c17-5e9d-40c4-9201-d092751532a7}</Project>
<Name>Akka.TestKit.Xunit2</Name>
</ProjectReference>
<ProjectReference Include="..\Akka.Persistence.PostgreSql\Akka.Persistence.PostgreSql.csproj">
<Project>{4b89227b-5ad1-4061-816f-570067c3727f}</Project>
<Name>Akka.Persistence.PostgreSql</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\core\Akka.Persistence.TestKit\Akka.Persistence.TestKit.csproj">
<Project>{ad9418b6-c452-4169-94fb-d43de0bfa966}</Project>
<Name>Akka.Persistence.TestKit</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\core\Akka.Persistence\Akka.Persistence.csproj">
<Project>{fca84dea-c118-424b-9eb8-34375dfef18a}</Project>
<Name>Akka.Persistence</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\core\Akka.TestKit\Akka.TestKit.csproj">
<Project>{0d3cbad0-bbdb-43e5-afc4-ed1d3ecdc224}</Project>
<Name>Akka.TestKit</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\core\Akka\Akka.csproj">
<Project>{5deddf90-37f0-48d3-a0b0-a5cbd8a7e377}</Project>
<Name>Akka</Name>
</ProjectReference>
<ProjectReference Include="..\Akka.Persistence.Sql.Common\Akka.Persistence.Sql.Common.csproj">
<Project>{3b9e6211-9488-4db5-b714-24248693b38f}</Project>
<Name>Akka.Persistence.Sql.Common</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<Import Project="$(SolutionDir)\.nuget\NuGet.targets" Condition="Exists('$(SolutionDir)\.nuget\NuGet.targets')" />
<Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild">
<PropertyGroup>
<ErrorText>This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}.</ErrorText>
</PropertyGroup>
<Error Condition="!Exists('$(SolutionDir)\.nuget\NuGet.targets')" Text="$([System.String]::Format('$(ErrorText)', '$(SolutionDir)\.nuget\NuGet.targets'))" />
</Target>
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

Просмотреть файл

@ -0,0 +1,80 @@
using System;
using System.Configuration;
using System.Data.SqlClient;
using Akka.Dispatch.SysMsg;
using Npgsql;
namespace Akka.Persistence.PostgreSql.Tests
{
public static class DbUtils
{
public static void Initialize()
{
var connectionString = ConfigurationManager.ConnectionStrings["TestDb"].ConnectionString;
var connectionBuilder = new NpgsqlConnectionStringBuilder(connectionString);
//connect to postgres database to create a new database
var databaseName = connectionBuilder.Database;
connectionBuilder.Database = "postgres";
connectionString = connectionBuilder.ToString();
using (var conn = new NpgsqlConnection(connectionString))
{
conn.Open();
bool dbExists;
using (var cmd = new NpgsqlCommand())
{
cmd.CommandText = string.Format(@"SELECT TRUE FROM pg_database WHERE datname='{0}'", databaseName);
cmd.Connection = conn;
var result = cmd.ExecuteScalar();
dbExists = result != null && Convert.ToBoolean(result);
}
if (dbExists)
{
DoClean(conn);
}
else
{
DoCreate(conn, databaseName);
}
}
}
public static void Clean()
{
var connectionString = ConfigurationManager.ConnectionStrings["TestDb"].ConnectionString;
using (var conn = new NpgsqlConnection(connectionString))
{
conn.Open();
DoClean(conn);
}
}
private static void DoCreate(NpgsqlConnection conn, string databaseName)
{
using (var cmd = new NpgsqlCommand())
{
cmd.CommandText = string.Format(@"CREATE DATABASE {0}", databaseName);
cmd.Connection = conn;
cmd.ExecuteNonQuery();
}
}
private static void DoClean(NpgsqlConnection conn)
{
using (var cmd = new NpgsqlCommand())
{
cmd.CommandText = @"
DROP TABLE IF EXISTS public.event_journal;
DROP TABLE IF EXISTS public.snapshot_store";
cmd.Connection = conn;
cmd.ExecuteNonQuery();
}
}
}
}

Просмотреть файл

@ -0,0 +1,49 @@
using System.Configuration;
using Akka.Configuration;
using Akka.Persistence.TestKit.Journal;
namespace Akka.Persistence.PostgreSql.Tests
{
public class PostgreSqlJournalSpec : JournalSpec
{
private static readonly Config SpecConfig;
static PostgreSqlJournalSpec()
{
var connectionString = ConfigurationManager.ConnectionStrings["TestDb"].ConnectionString;
var config = @"
akka.persistence {
publish-plugin-commands = on
journal {
plugin = ""akka.persistence.journal.postgresql""
postgresql {
class = ""Akka.Persistence.PostgreSql.Journal.PostgreSqlJournal, Akka.Persistence.PostgreSql""
plugin-dispatcher = ""akka.actor.default-dispatcher""
table-name = event_journal
schema-name = public
auto-initialize = on
connection-string = """ + connectionString + @"""
}
}
}";
SpecConfig = ConfigurationFactory.ParseString(config);
//need to make sure db is created before the tests start
DbUtils.Initialize();
}
public PostgreSqlJournalSpec()
: base(SpecConfig, "PostgreSqlJournalSpec")
{
Initialize();
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
DbUtils.Clean();
}
}
}

Просмотреть файл

@ -0,0 +1,49 @@
using System.Configuration;
using Akka.Configuration;
using Akka.Persistence.TestKit.Snapshot;
namespace Akka.Persistence.PostgreSql.Tests
{
public class PostgreSqlSnapshotStoreSpec : SnapshotStoreSpec
{
private static readonly Config SpecConfig;
static PostgreSqlSnapshotStoreSpec()
{
var connectionString = ConfigurationManager.ConnectionStrings["TestDb"].ConnectionString;
var config = @"
akka.persistence {
publish-plugin-commands = on
snapshot-store {
plugin = ""akka.persistence.snapshot-store.postgresql""
postgresql {
class = ""Akka.Persistence.PostgreSql.Snapshot.PostgreSqlSnapshotStore, Akka.Persistence.PostgreSql""
plugin-dispatcher = ""akka.actor.default-dispatcher""
table-name = snapshot_store
schema-name = public
auto-initialize = on
connection-string = """ + connectionString + @"""
}
}
}";
SpecConfig = ConfigurationFactory.ParseString(config);
//need to make sure db is created before the tests start
DbUtils.Initialize();
}
public PostgreSqlSnapshotStoreSpec()
: base(SpecConfig, "PostgreSqlSnapshotStoreSpec")
{
Initialize();
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
DbUtils.Clean();
}
}
}

Просмотреть файл

@ -0,0 +1,36 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Akka.Persistence.PostgreSql.Tests")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Akka.Persistence.PostgreSql.Tests")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("8494fd8c-15ae-489e-83aa-1ac37b458964")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]

Просмотреть файл

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<connectionStrings>
<add name="TestDb" connectionString="Server=localhost;Port=5432;Database=akka_persistence_tests;User Id=postgres;Password=postgres" providerName="Npgsql"/>
</connectionStrings>
</configuration>

Просмотреть файл

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Npgsql" version="2.2.5" targetFramework="net45" />
</packages>

Просмотреть файл

@ -0,0 +1,101 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{4B89227B-5AD1-4061-816F-570067C3727F}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Akka.Persistence.PostgreSql</RootNamespace>
<AssemblyName>Akka.Persistence.PostgreSql</AssemblyName>
<TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<SolutionDir Condition="$(SolutionDir) == '' Or $(SolutionDir) == '*Undefined*'">..\</SolutionDir>
<RestorePackages>true</RestorePackages>
<TargetFrameworkProfile />
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="Mono.Security">
<HintPath>..\..\..\packages\Npgsql.2.2.5\lib\net45\Mono.Security.dll</HintPath>
</Reference>
<Reference Include="Npgsql">
<HintPath>..\..\..\packages\Npgsql.2.2.5\lib\net45\Npgsql.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="Extension.cs" />
<Compile Include="InternalExtensions.cs" />
<Compile Include="Journal\QueryBuilder.cs" />
<Compile Include="Journal\QueryMapper.cs" />
<Compile Include="Journal\PostgreSqlJournal.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Snapshot\QueryBuilder.cs" />
<Compile Include="Snapshot\QueryMapper.cs" />
<Compile Include="Snapshot\PostgreSqlSnapshotStore.cs" />
<Compile Include="PostgreSqlInitializer.cs" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="postgresql.conf">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</EmbeddedResource>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\core\Akka.Persistence\Akka.Persistence.csproj">
<Project>{fca84dea-c118-424b-9eb8-34375dfef18a}</Project>
<Name>Akka.Persistence</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\core\Akka\Akka.csproj">
<Project>{5deddf90-37f0-48d3-a0b0-a5cbd8a7e377}</Project>
<Name>Akka</Name>
</ProjectReference>
<ProjectReference Include="..\Akka.Persistence.Sql.Common\Akka.Persistence.Sql.Common.csproj">
<Project>{3b9e6211-9488-4db5-b714-24248693b38f}</Project>
<Name>Akka.Persistence.Sql.Common</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<None Include="Akka.Persistence.PostgreSql.nuspec" />
<None Include="packages.config" />
<None Include="README.md" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<Import Project="$(SolutionDir)\.nuget\NuGet.targets" Condition="Exists('$(SolutionDir)\.nuget\NuGet.targets')" />
<Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild">
<PropertyGroup>
<ErrorText>This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}.</ErrorText>
</PropertyGroup>
<Error Condition="!Exists('$(SolutionDir)\.nuget\NuGet.targets')" Text="$([System.String]::Format('$(ErrorText)', '$(SolutionDir)\.nuget\NuGet.targets'))" />
</Target>
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

Просмотреть файл

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="utf-8"?>
<package xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<metadata xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
<id>@project@</id>
<title>@project@@title@</title>
<version>@build.number@</version>
<authors>@authors@</authors>
<owners>@authors@</owners>
<description>Akka.NET Persistence journal and snapshot store backed by PostgreSql.</description>
<licenseUrl>https://github.com/akkadotnet/akka.net/blob/master/LICENSE</licenseUrl>
<projectUrl>https://github.com/akkadotnet/akka.net</projectUrl>
<iconUrl>http://getakka.net/images/AkkaNetLogo.Normal.png</iconUrl>
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<releaseNotes>@releaseNotes@</releaseNotes>
<copyright>@copyright@</copyright>
<tags>@tags@ persistence eventsource postgresql</tags>
@dependencies@
@references@
</metadata>
</package>

Просмотреть файл

@ -0,0 +1,116 @@
using System;
using Akka.Actor;
using Akka.Configuration;
using Akka.Persistence.Sql.Common;
namespace Akka.Persistence.PostgreSql
{
/// <summary>
/// Configuration settings representation targeting PostgreSql journal actor.
/// </summary>
public class PostgreSqlJournalSettings : JournalSettings
{
public const string JournalConfigPath = "akka.persistence.journal.postgresql";
/// <summary>
/// Flag determining in case of event journal table missing, it should be automatically initialized.
/// </summary>
public bool AutoInitialize { get; private set; }
public PostgreSqlJournalSettings(Config config)
: base(config)
{
AutoInitialize = config.GetBoolean("auto-initialize");
}
}
/// <summary>
/// Configuration settings representation targeting PostgreSql snapshot store actor.
/// </summary>
public class PostgreSqlSnapshotStoreSettings : SnapshotStoreSettings
{
public const string SnapshotStoreConfigPath = "akka.persistence.snapshot-store.postgresql";
/// <summary>
/// Flag determining in case of snapshot store table missing, it should be automatically initialized.
/// </summary>
public bool AutoInitialize { get; private set; }
public PostgreSqlSnapshotStoreSettings(Config config)
: base(config)
{
AutoInitialize = config.GetBoolean("auto-initialize");
}
}
/// <summary>
/// An actor system extension initializing support for PostgreSql persistence layer.
/// </summary>
public class PostgreSqlPersistenceExtension : IExtension
{
/// <summary>
/// Journal-related settings loaded from HOCON configuration.
/// </summary>
public readonly PostgreSqlJournalSettings JournalSettings;
/// <summary>
/// Snapshot store related settings loaded from HOCON configuration.
/// </summary>
public readonly PostgreSqlSnapshotStoreSettings SnapshotStoreSettings;
public PostgreSqlPersistenceExtension(ExtendedActorSystem system)
{
system.Settings.InjectTopLevelFallback(PostgreSqlPersistence.DefaultConfiguration());
JournalSettings = new PostgreSqlJournalSettings(system.Settings.Config.GetConfig(PostgreSqlJournalSettings.JournalConfigPath));
SnapshotStoreSettings = new PostgreSqlSnapshotStoreSettings(system.Settings.Config.GetConfig(PostgreSqlSnapshotStoreSettings.SnapshotStoreConfigPath));
if (JournalSettings.AutoInitialize)
{
PostgreSqlInitializer.CreatePostgreSqlJournalTables(JournalSettings.ConnectionString, JournalSettings.SchemaName, JournalSettings.TableName);
}
if (SnapshotStoreSettings.AutoInitialize)
{
PostgreSqlInitializer.CreatePostgreSqlSnapshotStoreTables(SnapshotStoreSettings.ConnectionString, SnapshotStoreSettings.SchemaName, SnapshotStoreSettings.TableName);
}
}
}
/// <summary>
/// Singleton class used to setup PostgreSQL backend for akka persistence plugin.
/// </summary>
public class PostgreSqlPersistence : ExtensionIdProvider<PostgreSqlPersistenceExtension>
{
public static readonly PostgreSqlPersistence Instance = new PostgreSqlPersistence();
/// <summary>
/// Initializes a PostgreSQL persistence plugin inside provided <paramref name="actorSystem"/>.
/// </summary>
public static void Init(ActorSystem actorSystem)
{
Instance.Apply(actorSystem);
}
private PostgreSqlPersistence() { }
/// <summary>
/// Creates an actor system extension for akka persistence PostgreSQL support.
/// </summary>
/// <param name="system"></param>
/// <returns></returns>
public override PostgreSqlPersistenceExtension CreateExtension(ExtendedActorSystem system)
{
return new PostgreSqlPersistenceExtension(system);
}
/// <summary>
/// Returns a default configuration for akka persistence PostgreSQL-based journals and snapshot stores.
/// </summary>
/// <returns></returns>
public static Config DefaultConfiguration()
{
return ConfigurationFactory.FromResource<PostgreSqlPersistence>("Akka.Persistence.PostgreSql.postgresql.conf");
}
}
}

Просмотреть файл

@ -0,0 +1,15 @@
using System;
using System.Data.SqlClient;
using Npgsql;
namespace Akka.Persistence.PostgreSql
{
internal static class InternalExtensions
{
public static string QuoteSchemaAndTable(this string sqlQuery, string schemaName, string tableName)
{
var cb = new NpgsqlCommandBuilder();
return string.Format(sqlQuery, cb.QuoteIdentifier(schemaName), cb.QuoteIdentifier(tableName));
}
}
}

Просмотреть файл

@ -0,0 +1,96 @@
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Data.SqlClient;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Akka.Persistence.Journal;
using Npgsql;
using Akka.Persistence.Sql.Common.Journal;
using Akka.Persistence.Sql.Common;
namespace Akka.Persistence.PostgreSql.Journal
{
public class PostgreSqlJournalEngine : JournalDbEngine
{
public PostgreSqlJournalEngine(JournalSettings journalSettings, Akka.Serialization.Serialization serialization)
: base(journalSettings, serialization)
{
QueryBuilder = new PostgreSqlJournalQueryBuilder(journalSettings.TableName, journalSettings.SchemaName);
QueryMapper = new PostgreSqlJournalQueryMapper(serialization);
}
protected override DbConnection CreateDbConnection()
{
return new NpgsqlConnection(Settings.ConnectionString);
}
protected override void CopyParamsToCommand(DbCommand sqlCommand, JournalEntry entry)
{
sqlCommand.Parameters[":persistence_id"].Value = entry.PersistenceId;
sqlCommand.Parameters[":sequence_nr"].Value = entry.SequenceNr;
sqlCommand.Parameters[":is_deleted"].Value = entry.IsDeleted;
sqlCommand.Parameters[":payload_type"].Value = entry.PayloadType;
sqlCommand.Parameters[":payload"].Value = entry.Payload;
}
}
/// <summary>
/// Persistent journal actor using PostgreSQL as persistence layer. It processes write requests
/// one by one in synchronous manner, while reading results asynchronously.
/// </summary>
public class PostgreSqlJournal : SyncWriteJournal
{
private readonly PostgreSqlPersistenceExtension _extension;
private PostgreSqlJournalEngine _engine;
public PostgreSqlJournal()
{
_extension = PostgreSqlPersistence.Instance.Apply(Context.System);
}
/// <summary>
/// Gets an engine instance responsible for handling all database-related journal requests.
/// </summary>
protected virtual JournalDbEngine Engine
{
get
{
return _engine ?? (_engine = new PostgreSqlJournalEngine(_extension.JournalSettings, Context.System.Serialization));
}
}
protected override void PreStart()
{
base.PreStart();
Engine.Open();
}
protected override void PostStop()
{
base.PostStop();
Engine.Close();
}
public override Task ReplayMessagesAsync(string persistenceId, long fromSequenceNr, long toSequenceNr, long max, Action<IPersistentRepresentation> replayCallback)
{
return Engine.ReplayMessagesAsync(persistenceId, fromSequenceNr, toSequenceNr, max, Context.Sender, replayCallback);
}
public override Task<long> ReadHighestSequenceNrAsync(string persistenceId, long fromSequenceNr)
{
return Engine.ReadHighestSequenceNrAsync(persistenceId, fromSequenceNr);
}
public override void WriteMessages(IEnumerable<IPersistentRepresentation> messages)
{
Engine.WriteMessages(messages);
}
public override void DeleteMessagesTo(string persistenceId, long toSequenceNr, bool isPermanent)
{
Engine.DeleteMessagesTo(persistenceId, toSequenceNr, isPermanent);
}
}
}

Просмотреть файл

@ -0,0 +1,140 @@
using System.Data;
using System.Data.SqlClient;
using System.Text;
using Npgsql;
using NpgsqlTypes;
using Akka.Persistence.Sql.Common.Journal;
using System.Data.Common;
namespace Akka.Persistence.PostgreSql.Journal
{
internal class PostgreSqlJournalQueryBuilder : IJournalQueryBuilder
{
private readonly string _schemaName;
private readonly string _tableName;
private readonly string _selectHighestSequenceNrSql;
private readonly string _insertMessagesSql;
public PostgreSqlJournalQueryBuilder(string tableName, string schemaName)
{
_tableName = tableName;
_schemaName = schemaName;
_insertMessagesSql = "INSERT INTO {0}.{1} (persistence_id, sequence_nr, is_deleted, payload_type, payload) VALUES (:persistence_id, :sequence_nr, :is_deleted, :payload_type, :payload)"
.QuoteSchemaAndTable(_schemaName, _tableName);
_selectHighestSequenceNrSql = @"SELECT MAX(sequence_nr) FROM {0}.{1} WHERE persistence_id = :persistence_id".QuoteSchemaAndTable(_schemaName, _tableName);
}
public DbCommand SelectMessages(string persistenceId, long fromSequenceNr, long toSequenceNr, long max)
{
var sql = BuildSelectMessagesSql(fromSequenceNr, toSequenceNr, max);
var command = new NpgsqlCommand(sql)
{
Parameters = { PersistenceIdToSqlParam(persistenceId) }
};
return command;
}
public DbCommand SelectHighestSequenceNr(string persistenceId)
{
var command = new NpgsqlCommand(_selectHighestSequenceNrSql)
{
Parameters = { PersistenceIdToSqlParam(persistenceId) }
};
return command;
}
public DbCommand InsertBatchMessages(IPersistentRepresentation[] messages)
{
var command = new NpgsqlCommand(_insertMessagesSql);
command.Parameters.Add(":persistence_id", NpgsqlDbType.Varchar);
command.Parameters.Add(":sequence_nr", NpgsqlDbType.Bigint);
command.Parameters.Add(":is_deleted", NpgsqlDbType.Boolean);
command.Parameters.Add(":payload_type", NpgsqlDbType.Varchar);
command.Parameters.Add(":payload", NpgsqlDbType.Bytea);
return command;
}
public DbCommand DeleteBatchMessages(string persistenceId, long toSequenceNr, bool permanent)
{
var sql = BuildDeleteSql(toSequenceNr, permanent);
var command = new NpgsqlCommand(sql)
{
Parameters = { PersistenceIdToSqlParam(persistenceId) }
};
return command;
}
private string BuildDeleteSql(long toSequenceNr, bool permanent)
{
var sqlBuilder = new StringBuilder();
if (permanent)
{
sqlBuilder.Append("DELETE FROM {0}.{1} ".QuoteSchemaAndTable(_schemaName, _tableName));
}
else
{
sqlBuilder.Append("UPDATE {0}.{1} SET is_deleted = true ".QuoteSchemaAndTable(_schemaName, _tableName));
}
sqlBuilder.Append("WHERE persistence_id = :persistence_id");
if (toSequenceNr != long.MaxValue)
{
sqlBuilder.Append(" AND sequence_nr <= ").Append(toSequenceNr);
}
var sql = sqlBuilder.ToString();
return sql;
}
private string BuildSelectMessagesSql(long fromSequenceNr, long toSequenceNr, long max)
{
var sqlBuilder = new StringBuilder();
sqlBuilder.AppendFormat(
@"SELECT
persistence_id,
sequence_nr,
is_deleted,
payload_type,
payload ")
.Append(" FROM {0}.{1} WHERE persistence_id = :persistence_id".QuoteSchemaAndTable(_schemaName, _tableName));
// since we guarantee type of fromSequenceNr, toSequenceNr and max
// we can inline them without risk of SQL injection
if (fromSequenceNr > 0)
{
if (toSequenceNr != long.MaxValue)
sqlBuilder.Append(" AND sequence_nr BETWEEN ")
.Append(fromSequenceNr)
.Append(" AND ")
.Append(toSequenceNr);
else
sqlBuilder.Append(" AND sequence_nr >= ").Append(fromSequenceNr);
}
if (toSequenceNr != long.MaxValue)
sqlBuilder.Append(" AND sequence_nr <= ").Append(toSequenceNr);
if (max != long.MaxValue)
{
sqlBuilder.AppendFormat(" LIMIT {0}", max);
}
var sql = sqlBuilder.ToString();
return sql;
}
private static NpgsqlParameter PersistenceIdToSqlParam(string persistenceId, string paramName = null)
{
return new NpgsqlParameter(paramName ?? ":persistence_id", NpgsqlDbType.Varchar, persistenceId.Length) { Value = persistenceId };
}
}
}

Просмотреть файл

@ -0,0 +1,39 @@
using System;
using System.Data.Common;
using System.Data.SqlClient;
using Npgsql;
using Akka.Persistence.Sql.Common.Journal;
using Akka.Actor;
namespace Akka.Persistence.PostgreSql.Journal
{
internal class PostgreSqlJournalQueryMapper : IJournalQueryMapper
{
private readonly Akka.Serialization.Serialization _serialization;
public PostgreSqlJournalQueryMapper(Akka.Serialization.Serialization serialization)
{
_serialization = serialization;
}
public IPersistentRepresentation Map(DbDataReader reader, IActorRef sender = null)
{
var persistenceId = reader.GetString(0);
var sequenceNr = reader.GetInt64(1);
var isDeleted = reader.GetBoolean(2);
var payload = GetPayload(reader);
return new Persistent(payload, sequenceNr, persistenceId, isDeleted, sender);
}
private object GetPayload(DbDataReader reader)
{
var payloadType = reader.GetString(3);
var type = Type.GetType(payloadType, true);
var binary = (byte[]) reader[4];
var serializer = _serialization.FindSerializerForType(type);
return serializer.FromBinary(binary, type);
}
}
}

Просмотреть файл

@ -0,0 +1,99 @@
using System;
using System.Data.SqlClient;
using Npgsql;
namespace Akka.Persistence.PostgreSql
{
internal static class PostgreSqlInitializer
{
private const string SqlJournalFormat = @"
DO
$do$
BEGIN
IF NOT EXISTS (SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '{2}' AND TABLE_NAME = '{3}') THEN
CREATE TABLE {0}.{1} (
persistence_id VARCHAR(200) NOT NULL,
sequence_nr BIGINT NOT NULL,
is_deleted BOOLEAN NOT NULL,
payload_type VARCHAR(500) NOT NULL,
payload BYTEA NOT NULL,
CONSTRAINT {3}_pk PRIMARY KEY (persistence_id, sequence_nr)
);
CREATE INDEX {3}_sequence_nr_idx ON {0}.{1}(sequence_nr);
END IF;
END
$do$
";
private const string SqlSnapshotStoreFormat = @"
DO
$do$
BEGIN
IF NOT EXISTS (SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '{2}' AND TABLE_NAME = '{3}') THEN
CREATE TABLE {0}.{1} (
persistence_id VARCHAR(200) NOT NULL,
sequence_nr BIGINT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL,
created_at_ticks SMALLINT NOT NULL CHECK(created_at_ticks >= 0 AND created_at_ticks < 10),
snapshot_type VARCHAR(500) NOT NULL,
snapshot BYTEA NOT NULL,
CONSTRAINT {3}_pk PRIMARY KEY (persistence_id, sequence_nr)
);
CREATE INDEX {3}_sequence_nr_idx ON {0}.{1}(sequence_nr);
CREATE INDEX {3}_created_at_idx ON {0}.{1}(created_at);
END IF;
END
$do$
";
/// <summary>
/// Initializes a PostgreSQL journal-related tables according to 'schema-name', 'table-name'
/// and 'connection-string' values provided in 'akka.persistence.journal.postgresql' config.
/// </summary>
internal static void CreatePostgreSqlJournalTables(string connectionString, string schemaName, string tableName)
{
var sql = InitJournalSql(tableName, schemaName);
ExecuteSql(connectionString, sql);
}
/// <summary>
/// Initializes a PostgreSQL snapshot store related tables according to 'schema-name', 'table-name'
/// and 'connection-string' values provided in 'akka.persistence.snapshot-store.postgresql' config.
/// </summary>
internal static void CreatePostgreSqlSnapshotStoreTables(string connectionString, string schemaName, string tableName)
{
var sql = InitSnapshotStoreSql(tableName, schemaName);
ExecuteSql(connectionString, sql);
}
private static string InitJournalSql(string tableName, string schemaName = null)
{
if (string.IsNullOrEmpty(tableName)) throw new ArgumentNullException("tableName", "Akka.Persistence.PostgreSql journal table name is required");
schemaName = schemaName ?? "public";
var cb = new NpgsqlCommandBuilder();
return string.Format(SqlJournalFormat, cb.QuoteIdentifier(schemaName), cb.QuoteIdentifier(tableName), cb.UnquoteIdentifier(schemaName), cb.UnquoteIdentifier(tableName));
}
private static string InitSnapshotStoreSql(string tableName, string schemaName = null)
{
if (string.IsNullOrEmpty(tableName)) throw new ArgumentNullException("tableName", "Akka.Persistence.PostgreSql snapshot store table name is required");
schemaName = schemaName ?? "public";
var cb = new NpgsqlCommandBuilder();
return string.Format(SqlSnapshotStoreFormat, cb.QuoteIdentifier(schemaName), cb.QuoteIdentifier(tableName), cb.UnquoteIdentifier(schemaName), cb.UnquoteIdentifier(tableName));
}
private static void ExecuteSql(string connectionString, string sql)
{
using (var conn = new NpgsqlConnection(connectionString))
using (var command = conn.CreateCommand())
{
conn.Open();
command.CommandText = sql;
command.ExecuteNonQuery();
}
}
}
}

Просмотреть файл

@ -0,0 +1,36 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Akka.Persistence.PostgreSql")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Akka.Persistence.PostgreSql")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("3b21dbd6-ebb9-44cb-8dee-edbfb5bf0a00")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]

Просмотреть файл

@ -0,0 +1,44 @@
using System.Collections.Generic;
using System.Data.SqlClient;
using System.Threading;
using System.Threading.Tasks;
using Akka.Persistence.Snapshot;
using Npgsql;
using Akka.Persistence.Sql.Common.Snapshot;
using Akka.Persistence.Sql.Common;
using System;
using System.Data.Common;
namespace Akka.Persistence.PostgreSql.Snapshot
{
/// <summary>
/// Actor used for storing incoming snapshots into persistent snapshot store backed by PostgreSQL database.
/// </summary>
public class PostgreSqlSnapshotStore : DbSnapshotStore
{
private readonly PostgreSqlPersistenceExtension _extension;
private readonly PostgreSqlSnapshotStoreSettings _settings;
public PostgreSqlSnapshotStore()
{
_extension = PostgreSqlPersistence.Instance.Apply(Context.System);
_settings = _extension.SnapshotStoreSettings;
QueryBuilder = new PostgreSqlSnapshotQueryBuilder(_settings.SchemaName, _settings.TableName);
QueryMapper = new PostgreSqlSnapshotQueryMapper(Context.System.Serialization);
}
protected override SnapshotStoreSettings Settings
{
get
{
return _settings;
}
}
protected override DbConnection CreateDbConnection()
{
return new NpgsqlConnection(Settings.ConnectionString);
}
}
}

Просмотреть файл

@ -0,0 +1,166 @@
using System;
using System.Data;
using System.Data.SqlClient;
using System.Text;
using Npgsql;
using NpgsqlTypes;
using Akka.Persistence.Sql.Common.Snapshot;
using System.Data.Common;
namespace Akka.Persistence.PostgreSql.Snapshot
{
internal class PostgreSqlSnapshotQueryBuilder : ISnapshotQueryBuilder
{
private readonly string _deleteSql;
private readonly string _insertSql;
private readonly string _selectSql;
public PostgreSqlSnapshotQueryBuilder(string schemaName, string tableName)
{
_deleteSql = @"DELETE FROM {0}.{1} WHERE persistence_id = :persistence_id ".QuoteSchemaAndTable(schemaName, tableName);
_insertSql = @"INSERT INTO {0}.{1} (persistence_id, sequence_nr, created_at, created_at_ticks, snapshot_type, snapshot) VALUES (:persistence_id, :sequence_nr, :created_at, :created_at_ticks, :snapshot_type, :snapshot)".QuoteSchemaAndTable(schemaName, tableName);
_selectSql = @"SELECT persistence_id, sequence_nr, created_at, created_at_ticks, snapshot_type, snapshot FROM {0}.{1} WHERE persistence_id = :persistence_id".QuoteSchemaAndTable(schemaName, tableName);
}
public DbCommand DeleteOne(string persistenceId, long sequenceNr, DateTime timestamp)
{
var sqlCommand = new NpgsqlCommand();
sqlCommand.Parameters.Add(new NpgsqlParameter(":persistence_id", NpgsqlDbType.Varchar, persistenceId.Length)
{
Value = persistenceId
});
var sb = new StringBuilder(_deleteSql);
if (sequenceNr < long.MaxValue && sequenceNr > 0)
{
sb.Append(@"AND sequence_nr = :sequence_nr ");
sqlCommand.Parameters.Add(new NpgsqlParameter(":sequence_nr", NpgsqlDbType.Bigint) {Value = sequenceNr});
}
if (timestamp > DateTime.MinValue && timestamp < DateTime.MaxValue)
{
sb.Append(@"AND created_at = :created_at AND created_at_ticks = :created_at_ticks");
sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at", NpgsqlDbType.Timestamp)
{
Value = GetMaxPrecisionTicks(timestamp)
});
sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at_ticks", NpgsqlDbType.Smallint)
{
Value = GetExtraTicks(timestamp)
});
}
sqlCommand.CommandText = sb.ToString();
return sqlCommand;
}
public DbCommand DeleteMany(string persistenceId, long maxSequenceNr, DateTime maxTimestamp)
{
var sqlCommand = new NpgsqlCommand();
sqlCommand.Parameters.Add(new NpgsqlParameter(":persistence_id", NpgsqlDbType.Varchar, persistenceId.Length)
{
Value = persistenceId
});
var sb = new StringBuilder(_deleteSql);
if (maxSequenceNr < long.MaxValue && maxSequenceNr > 0)
{
sb.Append(@" AND sequence_nr <= :sequence_nr ");
sqlCommand.Parameters.Add(new NpgsqlParameter(":sequence_nr", NpgsqlDbType.Bigint)
{
Value = maxSequenceNr
});
}
if (maxTimestamp > DateTime.MinValue && maxTimestamp < DateTime.MaxValue)
{
sb.Append(
@" AND (created_at < :created_at OR (created_at = :created_at AND created_at_ticks <= :created_at_ticks)) ");
sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at", NpgsqlDbType.Timestamp)
{
Value = GetMaxPrecisionTicks(maxTimestamp)
});
sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at_ticks", NpgsqlDbType.Smallint)
{
Value = GetExtraTicks(maxTimestamp)
});
}
sqlCommand.CommandText = sb.ToString();
return sqlCommand;
}
public DbCommand InsertSnapshot(SnapshotEntry entry)
{
var sqlCommand = new NpgsqlCommand(_insertSql)
{
Parameters =
{
new NpgsqlParameter(":persistence_id", NpgsqlDbType.Varchar, entry.PersistenceId.Length) { Value = entry.PersistenceId },
new NpgsqlParameter(":sequence_nr", NpgsqlDbType.Bigint) { Value = entry.SequenceNr },
new NpgsqlParameter(":created_at", NpgsqlDbType.Timestamp) { Value = GetMaxPrecisionTicks(entry.Timestamp) },
new NpgsqlParameter(":created_at_ticks", NpgsqlDbType.Smallint) { Value = GetExtraTicks(entry.Timestamp) },
new NpgsqlParameter(":snapshot_type", NpgsqlDbType.Varchar, entry.SnapshotType.Length) { Value = entry.SnapshotType },
new NpgsqlParameter(":snapshot", NpgsqlDbType.Bytea, entry.Snapshot.Length) { Value = entry.Snapshot }
}
};
return sqlCommand;
}
public DbCommand SelectSnapshot(string persistenceId, long maxSequenceNr, DateTime maxTimestamp)
{
var sqlCommand = new NpgsqlCommand();
sqlCommand.Parameters.Add(new NpgsqlParameter(":persistence_id", NpgsqlDbType.Varchar, persistenceId.Length)
{
Value = persistenceId
});
var sb = new StringBuilder(_selectSql);
if (maxSequenceNr > 0 && maxSequenceNr < long.MaxValue)
{
sb.Append(" AND sequence_nr <= :sequence_nr ");
sqlCommand.Parameters.Add(new NpgsqlParameter(":sequence_nr", NpgsqlDbType.Bigint)
{
Value = maxSequenceNr
});
}
if (maxTimestamp > DateTime.MinValue && maxTimestamp < DateTime.MaxValue)
{
sb.Append(
@" AND (created_at < :created_at OR (created_at = :created_at AND created_at_ticks <= :created_at_ticks)) ");
sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at", NpgsqlDbType.Timestamp)
{
Value = GetMaxPrecisionTicks(maxTimestamp)
});
sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at_ticks", NpgsqlDbType.Smallint)
{
Value = GetExtraTicks(maxTimestamp)
});
}
sb.Append(" ORDER BY sequence_nr DESC");
sqlCommand.CommandText = sb.ToString();
return sqlCommand;
}
private static DateTime GetMaxPrecisionTicks(DateTime date)
{
var ticks = (date.Ticks / 10) * 10;
ticks = date.Ticks - ticks;
return date.AddTicks(-1 * ticks);
}
private static short GetExtraTicks(DateTime date)
{
var ticks = date.Ticks;
return (short)(ticks % 10);
}
}
}

Просмотреть файл

@ -0,0 +1,44 @@
using System;
using System.Data.Common;
using System.Data.SqlClient;
using Npgsql;
using Akka.Persistence.Sql.Common.Snapshot;
namespace Akka.Persistence.PostgreSql.Snapshot
{
internal class PostgreSqlSnapshotQueryMapper : ISnapshotQueryMapper
{
private readonly Akka.Serialization.Serialization _serialization;
public PostgreSqlSnapshotQueryMapper(Akka.Serialization.Serialization serialization)
{
_serialization = serialization;
}
public SelectedSnapshot Map(DbDataReader reader)
{
var persistenceId = reader.GetString(0);
var sequenceNr = reader.GetInt64(1);
var timestamp = reader.GetDateTime(2);
var timestampTicks = reader.GetInt16(3);
timestamp = timestamp.AddTicks(timestampTicks);
var metadata = new SnapshotMetadata(persistenceId, sequenceNr, timestamp);
var snapshot = GetSnapshot(reader);
return new SelectedSnapshot(metadata, snapshot);
}
private object GetSnapshot(DbDataReader reader)
{
var type = Type.GetType(reader.GetString(4), true);
var serializer = _serialization.FindSerializerForType(type);
var binary = (byte[])reader[5];
var obj = serializer.FromBinary(binary, type);
return obj;
}
}
}

Просмотреть файл

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Npgsql" version="2.2.5" targetFramework="net45" />
</packages>

Просмотреть файл

@ -0,0 +1,54 @@
akka.persistence{
journal {
postgresql {
# qualified type name of the PostgreSql persistence journal actor
class = "Akka.Persistence.PostgreSql.Journal.PostgreSqlJournal, Akka.Persistence.PostgreSql"
# dispatcher used to drive journal actor
plugin-dispatcher = "akka.actor.default-dispatcher"
# connection string used for database access
connection-string = ""
# default SQL commands timeout
connection-timeout = 30s
# PostgreSql schema name to table corresponding with persistent journal
schema-name = public
# PostgreSql table corresponding with persistent journal
table-name = event_journal
# should corresponding journal table be initialized automatically
auto-initialize = off
}
}
snapshot-store {
postgresql {
# qualified type name of the PostgreSql persistence journal actor
class = "Akka.Persistence.PostgreSql.Snapshot.PostgreSqlSnapshotStore, Akka.Persistence.PostgreSql"
# dispatcher used to drive journal actor
plugin-dispatcher = ""akka.actor.default-dispatcher""
# connection string used for database access
connection-string = ""
# default SQL commands timeout
connection-timeout = 30s
# PostgreSql schema name to table corresponding with persistent journal
schema-name = public
# PostgreSql table corresponding with persistent journal
table-name = snapshot_store
# should corresponding journal table be initialized automatically
auto-initialize = off
}
}
}