diff --git a/IntegrationTests/IntegrationTests.csproj b/IntegrationTests/IntegrationTests.csproj
index 5578e21..9776643 100644
--- a/IntegrationTests/IntegrationTests.csproj
+++ b/IntegrationTests/IntegrationTests.csproj
@@ -73,8 +73,8 @@
Code
-
+
diff --git a/IntegrationTests/File.cs b/IntegrationTests/OptimisticFile.cs
similarity index 80%
rename from IntegrationTests/File.cs
rename to IntegrationTests/OptimisticFile.cs
index a43d836..c9b7c4e 100644
--- a/IntegrationTests/File.cs
+++ b/IntegrationTests/OptimisticFile.cs
@@ -1,48 +1,49 @@
-using System;
-using System.IO;
-using NUnit.Framework;
-using SnowMaker;
-
-namespace IntegrationTests.cs
-{
- [TestFixture]
- public class File : Scenarios
- {
- protected override TestScope BuildTestScope()
- {
- return new TestScope();
- }
-
- protected override IOptimisticDataStore BuildStore(TestScope scope)
- {
- return new DebugOnlyFileDataStore(scope.DirectoryPath);
- }
-
- public class TestScope : ITestScope
- {
- public TestScope()
- {
- var ticks = DateTime.UtcNow.Ticks;
- IdScopeName = string.Format("snowmakertest{0}", ticks);
-
- DirectoryPath = Path.Combine(Path.GetTempPath(), IdScopeName);
- Directory.CreateDirectory(DirectoryPath);
- }
-
- public string IdScopeName { get; private set; }
- public string DirectoryPath { get; private set; }
-
- public string ReadCurrentPersistedValue()
- {
- var filePath = Path.Combine(DirectoryPath, string.Format("{0}.txt", IdScopeName));
- return System.IO.File.ReadAllText(filePath);
- }
-
- public void Dispose()
- {
- if (Directory.Exists(DirectoryPath))
- Directory.Delete(DirectoryPath, true);
- }
- }
- }
-}
+using System;
+using System.IO;
+using NUnit.Framework;
+using SnowMaker;
+
+namespace IntegrationTests.cs
+{
+ [TestFixture]
+ public class OptimisticFile : Scenarios
+ {
+ protected override TestScope BuildTestScope()
+ {
+ return new TestScope();
+ }
+
+ protected override IOptimisticDataStore BuildStore(TestScope scope)
+ {
+ return new FileOptimisticDataStore(scope.DirectoryPath);
+ }
+
+ public class TestScope : ITestScope
+ {
+ public TestScope()
+ {
+ var ticks = DateTime.UtcNow.Ticks;
+ IdScopeName = string.Format("snowmakertest{0}", ticks);
+
+ DirectoryPath = Path.Combine(Path.GetTempPath(), IdScopeName);
+ Directory.CreateDirectory(DirectoryPath);
+ }
+
+ public string IdScopeName { get; private set; }
+ public string DirectoryPath { get; private set; }
+
+ public string ReadCurrentPersistedValue()
+ {
+ var filePath = Path.Combine(DirectoryPath, string.Format("{0}.txt", IdScopeName));
+ using (TextReader reader = new StreamReader(filePath, System.Text.Encoding.Default))
+ return reader.ReadToEnd();
+ }
+
+ public void Dispose()
+ {
+ if (Directory.Exists(DirectoryPath))
+ Directory.Delete(DirectoryPath, true);
+ }
+ }
+ }
+}
diff --git a/SnowMaker/BlobOptimisticDataStore.cs b/SnowMaker/BlobOptimisticDataStore.cs
index d2d6048..fe6a4a0 100644
--- a/SnowMaker/BlobOptimisticDataStore.cs
+++ b/SnowMaker/BlobOptimisticDataStore.cs
@@ -4,6 +4,7 @@
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
using System.IO;
+using System;
namespace SnowMaker
{
@@ -24,35 +25,37 @@ public BlobOptimisticDataStore(CloudStorageAccount account, string containerName
blobReferences = new Dictionary();
}
-
- public string GetData(string blockName)
+
+ public long GetNextBatch(string blockName, int batchSize)
{
+ if (batchSize <= 0)
+ throw new ArgumentOutOfRangeException("batchSize");
+
+ long id;
var blobReference = GetBlobReference(blockName);
using (var stream = new MemoryStream())
{
blobReference.DownloadToStream(stream);
- return Encoding.UTF8.GetString(stream.ToArray());
+ if (!Int64.TryParse(Encoding.UTF8.GetString(stream.ToArray()), out id))
+ throw new Exception(String.Format("The id seed returned from the blob for blockName '{0}' was corrupt, and could not be parsed as a long. The data returned was: {1}", blockName, Encoding.UTF8.GetString(stream.ToArray())));
+ if (id <= 0)
+ throw new Exception(String.Format("The id seed returned from the blob for blockName '{0}' was {1}", blockName, id));
}
- }
- public bool TryOptimisticWrite(string scopeName, string data)
- {
- var blobReference = GetBlobReference(scopeName);
try
{
UploadText(
blobReference,
- data,
+ (id + batchSize).ToString(),
AccessCondition.GenerateIfMatchCondition(blobReference.Properties.ETag));
}
catch (StorageException exc)
{
if (exc.RequestInformation.HttpStatusCode == (int)HttpStatusCode.PreconditionFailed)
- return false;
-
- throw;
+ return -1;
}
- return true;
+
+ return id;
}
ICloudBlob GetBlobReference(string blockName)
diff --git a/SnowMaker/DebugOnlyFileDataStore.cs b/SnowMaker/DebugOnlyFileDataStore.cs
deleted file mode 100644
index fab24d4..0000000
--- a/SnowMaker/DebugOnlyFileDataStore.cs
+++ /dev/null
@@ -1,41 +0,0 @@
-using System.IO;
-
-namespace SnowMaker
-{
- public class DebugOnlyFileDataStore : IOptimisticDataStore
- {
- const string SeedValue = "1";
-
- readonly string directoryPath;
-
- public DebugOnlyFileDataStore(string directoryPath)
- {
- this.directoryPath = directoryPath;
- }
-
- public string GetData(string blockName)
- {
- var blockPath = Path.Combine(directoryPath, string.Format("{0}.txt", blockName));
- try
- {
- return File.ReadAllText(blockPath);
- }
- catch (FileNotFoundException)
- {
- using (var file = File.Create(blockPath))
- using (var streamWriter = new StreamWriter(file))
- {
- streamWriter.Write(SeedValue);
- }
- return SeedValue;
- }
- }
-
- public bool TryOptimisticWrite(string blockName, string data)
- {
- var blockPath = Path.Combine(directoryPath, string.Format("{0}.txt", blockName));
- File.WriteAllText(blockPath, data);
- return true;
- }
- }
-}
diff --git a/SnowMaker/FileOptimisticDataStore.cs b/SnowMaker/FileOptimisticDataStore.cs
new file mode 100644
index 0000000..afb14c3
--- /dev/null
+++ b/SnowMaker/FileOptimisticDataStore.cs
@@ -0,0 +1,72 @@
+using System;
+using System.IO;
+using System.Text;
+
+namespace SnowMaker
+{
+ public class FileOptimisticDataStore : IOptimisticDataStore
+ {
+ public static readonly Encoding Encoding = Encoding.Default;
+ public const long SeedValue = 1;
+
+ readonly string directoryPath;
+
+ public FileOptimisticDataStore(string directoryPath)
+ {
+ this.directoryPath = directoryPath;
+ }
+
+ public long GetNextBatch(string blockName, int batchSize)
+ {
+ if (batchSize <= 0)
+ throw new ArgumentOutOfRangeException("batchSize");
+
+ var blockPath = Path.Combine(directoryPath, string.Format("{0}.txt", blockName));
+
+ try
+ {
+ using (FileStream stream = File.Open(blockPath, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None))
+ {
+ if (stream.Length == 0)
+ {
+ // a new file was created
+ using (StreamWriter writer = new StreamWriter(stream, Encoding))
+ writer.Write((SeedValue + batchSize).ToString());
+ return SeedValue;
+ }
+
+ // read the next available id
+ // can't use StreamReader to read here bc it would call Dispose on the provided stream object when StreamReader is disposed
+ StringBuilder str = new StringBuilder();
+ byte[] buffer = new byte[128];
+ int offset = 0, length;
+ do
+ {
+ length = stream.Read(buffer, offset, buffer.Length);
+ str.Append(Encoding.GetString(buffer, 0, length));
+ offset += length;
+ }
+ while (stream.Position < stream.Length);
+
+ long id;
+ if (!Int64.TryParse(str.ToString(), out id))
+ throw new Exception(String.Format("The id seed returned from the file for blockName '{0}' was corrupt, and could not be parsed as a long. The data returned was: {1}", blockName, str.ToString()));
+ if (id <= 0)
+ throw new Exception(String.Format("The id seed returned from the file for blockName '{0}' was {1}", blockName, id));
+
+ // mark the next batch as taken
+ stream.Position = 0;
+ stream.SetLength(Encoding.GetByteCount((id + batchSize).ToString()));
+ using (StreamWriter writer = new StreamWriter(stream, Encoding))
+ writer.Write((id + batchSize).ToString());
+
+ return id;
+ }
+ }
+ catch (IOException)
+ {
+ return -1;
+ }
+ }
+ }
+}
diff --git a/SnowMaker/IOptimisticDataStore.cs b/SnowMaker/IOptimisticDataStore.cs
index e608f09..3df35da 100644
--- a/SnowMaker/IOptimisticDataStore.cs
+++ b/SnowMaker/IOptimisticDataStore.cs
@@ -2,7 +2,12 @@
{
public interface IOptimisticDataStore
{
- string GetData(string blockName);
- bool TryOptimisticWrite(string blockName, string data);
+ ///
+ /// Marks the next ids starting at the next available id
+ ///
+ ///
+ ///
+ /// The first available id in the given batch size, or -1 if the call was unable to lock the store for exclusive access
+ long GetNextBatch(string blockName, int batchSize);
}
}
diff --git a/SnowMaker/SnowMaker.csproj b/SnowMaker/SnowMaker.csproj
index f9082bf..66ee00e 100644
--- a/SnowMaker/SnowMaker.csproj
+++ b/SnowMaker/SnowMaker.csproj
@@ -65,7 +65,7 @@
Code
-
+
Code
diff --git a/SnowMaker/UniqueIdGenerator.cs b/SnowMaker/UniqueIdGenerator.cs
index 7261695..9ef69f0 100644
--- a/SnowMaker/UniqueIdGenerator.cs
+++ b/SnowMaker/UniqueIdGenerator.cs
@@ -65,22 +65,13 @@ void UpdateFromSyncStore(string scopeName, ScopeState state)
while (writesAttempted < maxWriteAttempts)
{
- var data = optimisticDataStore.GetData(scopeName);
-
- long nextId;
- if (!long.TryParse(data, out nextId))
- throw new UniqueIdGenerationException(string.Format(
- "The id seed returned from storage for scope '{0}' was corrupt, and could not be parsed as a long. The data returned was: {1}",
- scopeName,
- data));
-
- state.LastId = nextId - 1;
- state.HighestIdAvailableInBatch = nextId - 1 + batchSize;
- var firstIdInNextBatch = state.HighestIdAvailableInBatch + 1;
-
- if (optimisticDataStore.TryOptimisticWrite(scopeName, firstIdInNextBatch.ToString(CultureInfo.InvariantCulture)))
+ long id = optimisticDataStore.GetNextBatch(scopeName, batchSize);
+ if (id != -1)
+ {
+ state.LastId = id - 1;
+ state.HighestIdAvailableInBatch = id - 1 + batchSize;
return;
-
+ }
writesAttempted++;
}
diff --git a/UnitTests/FileOptimisticDataStoreTests.cs b/UnitTests/FileOptimisticDataStoreTests.cs
new file mode 100644
index 0000000..cc8799c
--- /dev/null
+++ b/UnitTests/FileOptimisticDataStoreTests.cs
@@ -0,0 +1,146 @@
+using System;
+//using NSubstitute;
+using NUnit.Framework;
+using System.IO;
+using System.Threading.Tasks;
+using System.Threading;
+
+namespace SnowMaker.UnitTests
+{
+ [TestFixture]
+ public class FileOptimisticDataStoreTests
+ {
+ private class TestScope: IDisposable
+ {
+ public readonly string FilePath;
+
+ public TestScope(string scope)
+ {
+ FilePath = Path.Combine(Path.GetTempPath(), string.Format("{0}.txt", scope));
+ }
+
+ // does not lock the file
+ public long ReadCurrentPersistedValue()
+ {
+ using (TextReader reader = new StreamReader(FilePath, FileOptimisticDataStore.Encoding))
+ return Convert.ToInt64(reader.ReadToEnd());
+ }
+
+ public void Dispose()
+ {
+ int count = 0;
+ do
+ {
+ try
+ {
+ File.Delete(FilePath);
+ Thread.Sleep(10);
+ return;
+ }
+ catch
+ {
+ // retry, it could still be blocked by another thread from the tests
+ count++;
+ }
+ }
+ while(count < 5);
+ }
+ }
+
+ private const string scope = "test";
+ private const int batch = 1;
+
+ [Test]
+ public void ConstructorShouldNotCreateFile()
+ {
+ using (var testScope = new TestScope(scope))
+ {
+ var store = new FileOptimisticDataStore(Path.GetTempPath());
+ Assert.IsFalse(File.Exists(testScope.FilePath));
+ }
+ }
+
+ [Test]
+ public void ShouldCreateFileOnFirstAccess()
+ {
+ using (var testScope = new TestScope(scope))
+ {
+ var store = new FileOptimisticDataStore(Path.GetTempPath());
+ store.GetNextBatch(scope, batch);
+ Assert.IsTrue(File.Exists(testScope.FilePath));
+ Assert.AreEqual(testScope.ReadCurrentPersistedValue(), FileOptimisticDataStore.SeedValue + batch);
+ }
+ }
+
+ [Test]
+ public void GetNextBatchShouldBlockFileAccess()
+ {
+ using (var testScope = new TestScope(scope))
+ {
+ CancellationTokenSource cancelTokenSource1 = new CancellationTokenSource();
+ CancellationTokenSource cancelTokenSource2 = new CancellationTokenSource();
+
+ try
+ {
+ var store = new FileOptimisticDataStore(Path.GetTempPath());
+ store.GetNextBatch(scope, batch); // create the file
+
+ CancellationToken cancelToken1 = cancelTokenSource1.Token;
+ Task task1 = Task.Factory.StartNew(() =>
+ {
+ do
+ store.GetNextBatch(scope, batch);
+ while (!cancelToken1.IsCancellationRequested);
+ }, cancelToken1);
+
+ CancellationToken cancelToken2 = cancelTokenSource2.Token;
+ Task task2 = Task.Factory.StartNew(() =>
+ {
+ do
+ {
+ try
+ {
+ testScope.ReadCurrentPersistedValue();
+ }
+ catch (IOException e)
+ {
+ if (e.Message.Equals("The process cannot access the file '" + testScope.FilePath + "' because it is being used by another process."))
+ return;
+ throw;
+ }
+ }
+ while (!cancelToken2.IsCancellationRequested);
+ }, cancelToken2);
+
+ if (task2.Wait(3000) && !task2.IsFaulted)
+ Assert.Pass();
+ else
+ {
+ if (task2.IsFaulted)
+ Assert.Inconclusive("The second thread failed with error '" + task2.Exception.ToString() + "'.");
+ else
+ Assert.Inconclusive("The second thread was not blocked in an interval of 3000 ms.");
+ }
+ }
+ catch
+ {
+ cancelTokenSource1.Cancel();
+ cancelTokenSource2.Cancel();
+ throw;
+ }
+ }
+ }
+
+ [Test]
+ public void GetNextBatchShouldReturnMinusOneWhenBlocked()
+ {
+ using (var testScope = new TestScope(scope))
+ {
+ var store = new FileOptimisticDataStore(Path.GetTempPath());
+ store.GetNextBatch(scope, batch);
+ using (FileStream stream = File.Open(testScope.FilePath, FileMode.Open, FileAccess.Read, FileShare.None))
+ Assert.AreEqual(-1, store.GetNextBatch(scope, batch));
+ }
+ }
+ }
+}
diff --git a/UnitTests/UniqueIdGeneratorTest.cs b/UnitTests/UniqueIdGeneratorTest.cs
index 207b1a6..3c23e92 100644
--- a/UnitTests/UniqueIdGeneratorTest.cs
+++ b/UnitTests/UniqueIdGeneratorTest.cs
@@ -13,7 +13,7 @@ public void ConstructorShouldNotRetrieveDataFromStore()
var store = Substitute.For();
// ReSharper disable once ObjectCreationAsStatement
new UniqueIdGenerator(store);
- store.DidNotReceiveWithAnyArgs().GetData(null);
+ store.DidNotReceiveWithAnyArgs().GetNextBatch(null, 1);
}
[Test]
@@ -40,79 +40,42 @@ public void MaxWriteAttemptsShouldThrowArgumentOutOfRangeExceptionWhenValueIsNeg
};
}
- [Test]
- [ExpectedException(typeof(UniqueIdGenerationException))]
- public void NextIdShouldThrowExceptionOnCorruptData()
- {
- var store = Substitute.For();
- store.GetData("test").Returns("abc");
-
- var generator = new UniqueIdGenerator(store);
-
- generator.NextId("test");
- }
-
- [Test]
- [ExpectedException(typeof(UniqueIdGenerationException))]
- public void NextIdShouldThrowExceptionOnNullData()
- {
- var store = Substitute.For();
- store.GetData("test").Returns((string)null);
-
- var generator = new UniqueIdGenerator(store);
-
- generator.NextId("test");
- }
-
[Test]
public void NextIdShouldReturnNumbersSequentially()
{
var store = Substitute.For();
- store.GetData("test").Returns("0", "250");
- store.TryOptimisticWrite("test", "3").Returns(true);
+ store.GetNextBatch("test", 3).Returns(1, 251);
- var subject = new UniqueIdGenerator(store)
- {
- BatchSize = 3
- };
+ var subject = new UniqueIdGenerator(store) { BatchSize = 3 };
- Assert.AreEqual(0, subject.NextId("test"));
Assert.AreEqual(1, subject.NextId("test"));
Assert.AreEqual(2, subject.NextId("test"));
+ Assert.AreEqual(3, subject.NextId("test"));
}
[Test]
public void NextIdShouldRollOverToNewBlockWhenCurrentBlockIsExhausted()
{
var store = Substitute.For();
- store.GetData("test").Returns("0", "250");
- store.TryOptimisticWrite("test", "3").Returns(true);
- store.TryOptimisticWrite("test", "253").Returns(true);
+ store.GetNextBatch("test", 3).Returns(1, 251);
- var subject = new UniqueIdGenerator(store)
- {
- BatchSize = 3
- };
+ var subject = new UniqueIdGenerator(store) { BatchSize = 3 };
- Assert.AreEqual(0, subject.NextId("test"));
Assert.AreEqual(1, subject.NextId("test"));
Assert.AreEqual(2, subject.NextId("test"));
- Assert.AreEqual(250, subject.NextId("test"));
+ Assert.AreEqual(3, subject.NextId("test"));
Assert.AreEqual(251, subject.NextId("test"));
Assert.AreEqual(252, subject.NextId("test"));
+ Assert.AreEqual(253, subject.NextId("test"));
}
[Test]
public void NextIdShouldThrowExceptionWhenRetriesAreExhausted()
{
var store = Substitute.For();
- store.GetData("test").Returns("0");
- store.TryOptimisticWrite("test", "3").Returns(false, false, false, true);
+ store.GetNextBatch("test", 100).Returns(-1, -1, -1, 1);
- var generator = new UniqueIdGenerator(store)
- {
- MaxWriteAttempts = 3
- };
+ var generator = new UniqueIdGenerator(store) { MaxWriteAttempts = 3 };
try
{
diff --git a/UnitTests/UnitTests.csproj b/UnitTests/UnitTests.csproj
index 4ea996b..60198d5 100644
--- a/UnitTests/UnitTests.csproj
+++ b/UnitTests/UnitTests.csproj
@@ -74,6 +74,7 @@
+