Skip to content
This repository was archived by the owner on Oct 28, 2023. It is now read-only.

#2 Refactored DebugOnlyFileDataStore to FileOptimisticDataStore #3

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion IntegrationTests/IntegrationTests.csproj
Original file line number Diff line number Diff line change
@@ -73,8 +73,8 @@
<Compile Include="Azure.cs">
<SubType>Code</SubType>
</Compile>
<Compile Include="File.cs" />
<Compile Include="ITestScope.cs" />
<Compile Include="OptimisticFile.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Scenarios.cs" />
</ItemGroup>
97 changes: 49 additions & 48 deletions IntegrationTests/File.cs → IntegrationTests/OptimisticFile.cs
Original file line number Diff line number Diff line change
@@ -1,48 +1,49 @@
using System;
using System.IO;
using NUnit.Framework;
using SnowMaker;

namespace IntegrationTests.cs
{
[TestFixture]
public class File : Scenarios<File.TestScope>
{
protected override TestScope BuildTestScope()
{
return new TestScope();
}

protected override IOptimisticDataStore BuildStore(TestScope scope)
{
return new DebugOnlyFileDataStore(scope.DirectoryPath);
}

public class TestScope : ITestScope
{
public TestScope()
{
var ticks = DateTime.UtcNow.Ticks;
IdScopeName = string.Format("snowmakertest{0}", ticks);

DirectoryPath = Path.Combine(Path.GetTempPath(), IdScopeName);
Directory.CreateDirectory(DirectoryPath);
}

public string IdScopeName { get; private set; }
public string DirectoryPath { get; private set; }

public string ReadCurrentPersistedValue()
{
var filePath = Path.Combine(DirectoryPath, string.Format("{0}.txt", IdScopeName));
return System.IO.File.ReadAllText(filePath);
}

public void Dispose()
{
if (Directory.Exists(DirectoryPath))
Directory.Delete(DirectoryPath, true);
}
}
}
}
using System;
using System.IO;
using NUnit.Framework;
using SnowMaker;

namespace IntegrationTests.cs
{
[TestFixture]
public class OptimisticFile : Scenarios<OptimisticFile.TestScope>
{
protected override TestScope BuildTestScope()
{
return new TestScope();
}

protected override IOptimisticDataStore BuildStore(TestScope scope)
{
return new FileOptimisticDataStore(scope.DirectoryPath);
}

public class TestScope : ITestScope
{
public TestScope()
{
var ticks = DateTime.UtcNow.Ticks;
IdScopeName = string.Format("snowmakertest{0}", ticks);

DirectoryPath = Path.Combine(Path.GetTempPath(), IdScopeName);
Directory.CreateDirectory(DirectoryPath);
}

public string IdScopeName { get; private set; }
public string DirectoryPath { get; private set; }

public string ReadCurrentPersistedValue()
{
var filePath = Path.Combine(DirectoryPath, string.Format("{0}.txt", IdScopeName));
using (TextReader reader = new StreamReader(filePath, System.Text.Encoding.Default))
return reader.ReadToEnd();
}

public void Dispose()
{
if (Directory.Exists(DirectoryPath))
Directory.Delete(DirectoryPath, true);
}
}
}
}
27 changes: 15 additions & 12 deletions SnowMaker/BlobOptimisticDataStore.cs
Original file line number Diff line number Diff line change
@@ -4,6 +4,7 @@
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
using System.IO;
using System;

namespace SnowMaker
{
@@ -24,35 +25,37 @@ public BlobOptimisticDataStore(CloudStorageAccount account, string containerName

blobReferences = new Dictionary<string, ICloudBlob>();
}

public string GetData(string blockName)
public long GetNextBatch(string blockName, int batchSize)
{
if (batchSize <= 0)
throw new ArgumentOutOfRangeException("batchSize");

long id;
var blobReference = GetBlobReference(blockName);
using (var stream = new MemoryStream())
{
blobReference.DownloadToStream(stream);
return Encoding.UTF8.GetString(stream.ToArray());
if (!Int64.TryParse(Encoding.UTF8.GetString(stream.ToArray()), out id))
throw new Exception(String.Format("The id seed returned from the blob for blockName '{0}' was corrupt, and could not be parsed as a long. The data returned was: {1}", blockName, Encoding.UTF8.GetString(stream.ToArray())));
if (id <= 0)
throw new Exception(String.Format("The id seed returned from the blob for blockName '{0}' was {1}", blockName, id));
}
}

public bool TryOptimisticWrite(string scopeName, string data)
{
var blobReference = GetBlobReference(scopeName);
try
{
UploadText(
blobReference,
data,
(id + batchSize).ToString(),
AccessCondition.GenerateIfMatchCondition(blobReference.Properties.ETag));
}
catch (StorageException exc)
{
if (exc.RequestInformation.HttpStatusCode == (int)HttpStatusCode.PreconditionFailed)
return false;

throw;
return -1;
}
return true;

return id;
}

ICloudBlob GetBlobReference(string blockName)
41 changes: 0 additions & 41 deletions SnowMaker/DebugOnlyFileDataStore.cs

This file was deleted.

72 changes: 72 additions & 0 deletions SnowMaker/FileOptimisticDataStore.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
using System;
using System.IO;
using System.Text;

namespace SnowMaker
{
public class FileOptimisticDataStore : IOptimisticDataStore
{
public static readonly Encoding Encoding = Encoding.Default;
public const long SeedValue = 1;

readonly string directoryPath;

public FileOptimisticDataStore(string directoryPath)
{
this.directoryPath = directoryPath;
}

public long GetNextBatch(string blockName, int batchSize)
{
if (batchSize <= 0)
throw new ArgumentOutOfRangeException("batchSize");

var blockPath = Path.Combine(directoryPath, string.Format("{0}.txt", blockName));

try
{
using (FileStream stream = File.Open(blockPath, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None))
{
if (stream.Length == 0)
{
// a new file was created
using (StreamWriter writer = new StreamWriter(stream, Encoding))
writer.Write((SeedValue + batchSize).ToString());
return SeedValue;
}

// read the next available id
// can't use StreamReader to read here bc it would call Dispose on the provided stream object when StreamReader is disposed
StringBuilder str = new StringBuilder();
byte[] buffer = new byte[128];
int offset = 0, length;
do
{
length = stream.Read(buffer, offset, buffer.Length);
str.Append(Encoding.GetString(buffer, 0, length));
offset += length;
}
while (stream.Position < stream.Length);

long id;
if (!Int64.TryParse(str.ToString(), out id))
throw new Exception(String.Format("The id seed returned from the file for blockName '{0}' was corrupt, and could not be parsed as a long. The data returned was: {1}", blockName, str.ToString()));
if (id <= 0)
throw new Exception(String.Format("The id seed returned from the file for blockName '{0}' was {1}", blockName, id));

// mark the next batch as taken
stream.Position = 0;
stream.SetLength(Encoding.GetByteCount((id + batchSize).ToString()));
using (StreamWriter writer = new StreamWriter(stream, Encoding))
writer.Write((id + batchSize).ToString());

return id;
}
}
catch (IOException)
{
return -1;
}
}
}
}
9 changes: 7 additions & 2 deletions SnowMaker/IOptimisticDataStore.cs
Original file line number Diff line number Diff line change
@@ -2,7 +2,12 @@
{
public interface IOptimisticDataStore
{
string GetData(string blockName);
bool TryOptimisticWrite(string blockName, string data);
/// <summary>
/// Marks the next <paramref name="batchSize"/> ids starting at the next available id
/// </summary>
/// <param name="blockName"></param>
/// <param name="batchSize"></param>
/// <returns>The first available id in the given batch size, or -1 if the call was unable to lock the store for exclusive access</returns>
long GetNextBatch(string blockName, int batchSize);
}
}
2 changes: 1 addition & 1 deletion SnowMaker/SnowMaker.csproj
Original file line number Diff line number Diff line change
@@ -65,7 +65,7 @@
<SubType>Code</SubType>
</Compile>
<Compile Include="DictionaryExtensions.cs" />
<Compile Include="DebugOnlyFileDataStore.cs" />
<Compile Include="FileOptimisticDataStore.cs" />
<Compile Include="IOptimisticDataStore.cs">
<SubType>Code</SubType>
</Compile>
21 changes: 6 additions & 15 deletions SnowMaker/UniqueIdGenerator.cs
Original file line number Diff line number Diff line change
@@ -65,22 +65,13 @@ void UpdateFromSyncStore(string scopeName, ScopeState state)

while (writesAttempted < maxWriteAttempts)
{
var data = optimisticDataStore.GetData(scopeName);

long nextId;
if (!long.TryParse(data, out nextId))
throw new UniqueIdGenerationException(string.Format(
"The id seed returned from storage for scope '{0}' was corrupt, and could not be parsed as a long. The data returned was: {1}",
scopeName,
data));

state.LastId = nextId - 1;
state.HighestIdAvailableInBatch = nextId - 1 + batchSize;
var firstIdInNextBatch = state.HighestIdAvailableInBatch + 1;

if (optimisticDataStore.TryOptimisticWrite(scopeName, firstIdInNextBatch.ToString(CultureInfo.InvariantCulture)))
long id = optimisticDataStore.GetNextBatch(scopeName, batchSize);
if (id != -1)
{
state.LastId = id - 1;
state.HighestIdAvailableInBatch = id - 1 + batchSize;
return;

}
writesAttempted++;
}

146 changes: 146 additions & 0 deletions UnitTests/FileOptimisticDataStoreTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
using System;
//using NSubstitute;
using NUnit.Framework;
using System.IO;
using System.Threading.Tasks;
using System.Threading;

namespace SnowMaker.UnitTests
{
[TestFixture]
public class FileOptimisticDataStoreTests
{
private class TestScope: IDisposable
{
public readonly string FilePath;

public TestScope(string scope)
{
FilePath = Path.Combine(Path.GetTempPath(), string.Format("{0}.txt", scope));
}

// does not lock the file
public long ReadCurrentPersistedValue()
{
using (TextReader reader = new StreamReader(FilePath, FileOptimisticDataStore.Encoding))
return Convert.ToInt64(reader.ReadToEnd());
}

public void Dispose()
{
int count = 0;
do
{
try
{
File.Delete(FilePath);
Thread.Sleep(10);
return;
}
catch
{
// retry, it could still be blocked by another thread from the tests
count++;
}
}
while(count < 5);
}
}

private const string scope = "test";
private const int batch = 1;

[Test]
public void ConstructorShouldNotCreateFile()
{
using (var testScope = new TestScope(scope))
{
var store = new FileOptimisticDataStore(Path.GetTempPath());
Assert.IsFalse(File.Exists(testScope.FilePath));
}
}

[Test]
public void ShouldCreateFileOnFirstAccess()
{
using (var testScope = new TestScope(scope))
{
var store = new FileOptimisticDataStore(Path.GetTempPath());
store.GetNextBatch(scope, batch);
Assert.IsTrue(File.Exists(testScope.FilePath));
Assert.AreEqual(testScope.ReadCurrentPersistedValue(), FileOptimisticDataStore.SeedValue + batch);
}
}

[Test]
public void GetNextBatchShouldBlockFileAccess()
{
using (var testScope = new TestScope(scope))
{
CancellationTokenSource cancelTokenSource1 = new CancellationTokenSource();
CancellationTokenSource cancelTokenSource2 = new CancellationTokenSource();

try
{
var store = new FileOptimisticDataStore(Path.GetTempPath());
store.GetNextBatch(scope, batch); // create the file

CancellationToken cancelToken1 = cancelTokenSource1.Token;
Task task1 = Task.Factory.StartNew(() =>
{
do
store.GetNextBatch(scope, batch);
while (!cancelToken1.IsCancellationRequested);
}, cancelToken1);

CancellationToken cancelToken2 = cancelTokenSource2.Token;
Task task2 = Task.Factory.StartNew(() =>
{
do
{
try
{
testScope.ReadCurrentPersistedValue();
}
catch (IOException e)
{
if (e.Message.Equals("The process cannot access the file '" + testScope.FilePath + "' because it is being used by another process."))
return;
throw;
}
}
while (!cancelToken2.IsCancellationRequested);
}, cancelToken2);

if (task2.Wait(3000) && !task2.IsFaulted)
Assert.Pass();
else
{
if (task2.IsFaulted)
Assert.Inconclusive("The second thread failed with error '" + task2.Exception.ToString() + "'.");
else
Assert.Inconclusive("The second thread was not blocked in an interval of 3000 ms.");
}
}
catch
{
cancelTokenSource1.Cancel();
cancelTokenSource2.Cancel();
throw;
}
}
}

[Test]
public void GetNextBatchShouldReturnMinusOneWhenBlocked()
{
using (var testScope = new TestScope(scope))
{
var store = new FileOptimisticDataStore(Path.GetTempPath());
store.GetNextBatch(scope, batch);
using (FileStream stream = File.Open(testScope.FilePath, FileMode.Open, FileAccess.Read, FileShare.None))
Assert.AreEqual(-1, store.GetNextBatch(scope, batch));
}
}
}
}
57 changes: 10 additions & 47 deletions UnitTests/UniqueIdGeneratorTest.cs
Original file line number Diff line number Diff line change
@@ -13,7 +13,7 @@ public void ConstructorShouldNotRetrieveDataFromStore()
var store = Substitute.For<IOptimisticDataStore>();
// ReSharper disable once ObjectCreationAsStatement
new UniqueIdGenerator(store);
store.DidNotReceiveWithAnyArgs().GetData(null);
store.DidNotReceiveWithAnyArgs().GetNextBatch(null, 1);
}

[Test]
@@ -40,79 +40,42 @@ public void MaxWriteAttemptsShouldThrowArgumentOutOfRangeExceptionWhenValueIsNeg
};
}

[Test]
[ExpectedException(typeof(UniqueIdGenerationException))]
public void NextIdShouldThrowExceptionOnCorruptData()
{
var store = Substitute.For<IOptimisticDataStore>();
store.GetData("test").Returns("abc");

var generator = new UniqueIdGenerator(store);

generator.NextId("test");
}

[Test]
[ExpectedException(typeof(UniqueIdGenerationException))]
public void NextIdShouldThrowExceptionOnNullData()
{
var store = Substitute.For<IOptimisticDataStore>();
store.GetData("test").Returns((string)null);

var generator = new UniqueIdGenerator(store);

generator.NextId("test");
}

[Test]
public void NextIdShouldReturnNumbersSequentially()
{
var store = Substitute.For<IOptimisticDataStore>();
store.GetData("test").Returns("0", "250");
store.TryOptimisticWrite("test", "3").Returns(true);
store.GetNextBatch("test", 3).Returns(1, 251);

var subject = new UniqueIdGenerator(store)
{
BatchSize = 3
};
var subject = new UniqueIdGenerator(store) { BatchSize = 3 };

Assert.AreEqual(0, subject.NextId("test"));
Assert.AreEqual(1, subject.NextId("test"));
Assert.AreEqual(2, subject.NextId("test"));
Assert.AreEqual(3, subject.NextId("test"));
}

[Test]
public void NextIdShouldRollOverToNewBlockWhenCurrentBlockIsExhausted()
{
var store = Substitute.For<IOptimisticDataStore>();
store.GetData("test").Returns("0", "250");
store.TryOptimisticWrite("test", "3").Returns(true);
store.TryOptimisticWrite("test", "253").Returns(true);
store.GetNextBatch("test", 3).Returns(1, 251);

var subject = new UniqueIdGenerator(store)
{
BatchSize = 3
};
var subject = new UniqueIdGenerator(store) { BatchSize = 3 };

Assert.AreEqual(0, subject.NextId("test"));
Assert.AreEqual(1, subject.NextId("test"));
Assert.AreEqual(2, subject.NextId("test"));
Assert.AreEqual(250, subject.NextId("test"));
Assert.AreEqual(3, subject.NextId("test"));
Assert.AreEqual(251, subject.NextId("test"));
Assert.AreEqual(252, subject.NextId("test"));
Assert.AreEqual(253, subject.NextId("test"));
}

[Test]
public void NextIdShouldThrowExceptionWhenRetriesAreExhausted()
{
var store = Substitute.For<IOptimisticDataStore>();
store.GetData("test").Returns("0");
store.TryOptimisticWrite("test", "3").Returns(false, false, false, true);
store.GetNextBatch("test", 100).Returns(-1, -1, -1, 1);

var generator = new UniqueIdGenerator(store)
{
MaxWriteAttempts = 3
};
var generator = new UniqueIdGenerator(store) { MaxWriteAttempts = 3 };

try
{
1 change: 1 addition & 0 deletions UnitTests/UnitTests.csproj
Original file line number Diff line number Diff line change
@@ -74,6 +74,7 @@
</ItemGroup>
<ItemGroup>
<Compile Include="DictionaryExtentionsTests.cs" />
<Compile Include="FileOptimisticDataStoreTests.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="UniqueIdGeneratorTest.cs" />
</ItemGroup>