Skip to content

Commit

Permalink
CI/CD Adding automatic pipeline tests (#153)
Browse files Browse the repository at this point in the history
* Support for pipelines endpoint

* pipelines

* Adding assert

* Adding lock

* Refactor initialization

* Cleanup

* yaml update

* Using async initialize

* Cleanup

* throwing if retries consumed

* Removing TestClassData

* Renames
  • Loading branch information
ealsur authored Jul 30, 2020
1 parent 81c53de commit 9b4efa2
Show file tree
Hide file tree
Showing 9 changed files with 185 additions and 204 deletions.
45 changes: 45 additions & 0 deletions pipeline/azure-pipelines-emulator.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
trigger: none
pr:
branches:
include:
- master
- releases/*

pool:
vmImage: 'windows-latest'

steps:
- pwsh: |
Write-Host "Downloading Cosmos Emulator - $env:EMULATORMSIURL" -ForegroundColor green
Invoke-WebRequest "$env:EMULATORMSIURL" -OutFile "$env:temp\azure-cosmosdb-emulator.msi"
Write-Host "Finished Downloading Cosmos Emulator - $env:temp\azure-cosmosdb-emulator.msi" -ForegroundColor green
dir "$env:temp"
choco install lessmsi
choco upgrade lessmsi
mkdir "$env:temp\Azure Cosmos DB Emulator"
lessmsi x "$env:temp\azure-cosmosdb-emulator.msi" "$env:temp\Azure Cosmos DB Emulator\"
Write-Host "Starting Cosmos DB Emulator" -ForegroundColor green
Start-Process "$env:temp\Azure Cosmos DB Emulator\SourceDir\Azure Cosmos DB Emulator\CosmosDB.Emulator.exe" "/NoExplorer /NoUI /DisableRateLimiting /PartitionCount=100 /Consistency=Strong /enableRio /overrides=sqlAllowGroupByClause:true" -Verb RunAs
Import-Module "$env:temp\Azure Cosmos DB Emulator\SourceDir\Azure Cosmos DB Emulator\PSModules\Microsoft.Azure.CosmosDB.Emulator"
Get-Item env:* | Sort-Object -Property Name
for ($i=0; $i -lt 10; $i++) {
$status=Get-CosmosDbEmulatorStatus
if ($status -ne "Running") {
sleep 30;
Write-Host "Cosmos DB Emulator Status: $status" -ForegroundColor yellow
} else {
break;
}
}
- task: DotNetCoreCLI@2
displayName: Build
inputs:
command: build
projects: '**/*.csproj'
- script: dotnet test ./src/DocumentDB.ChangeFeedProcessor.IntegrationTests/DocumentDB.ChangeFeedProcessor.IntegrationTests.csproj --logger trx
displayName: 'Running tests'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testRunner: VSTest
testResultsFiles: '**/*.trx'
29 changes: 29 additions & 0 deletions pipeline/azure-pipelines.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
trigger: none
pr:
branches:
include:
- master
- releases/*

strategy:
matrix:
windows:
imageName: 'windows-latest'

pool:
vmImage: $(imageName)

steps:
- task: DotNetCoreCLI@2
displayName: Build
inputs:
command: build
projects: '**/*.csproj'
arguments: '--configuration Release'
- script: dotnet test ./src/DocumentDB.ChangeFeedProcessor.UnitTests/DocumentDB.ChangeFeedProcessor.UnitTests.csproj --logger trx
displayName: 'Running tests'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testRunner: VSTest
testResultsFiles: '**/*.trx'
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,10 @@ public class DynamicCollectionTests : IntegrationTest
{
const int documentCount = 513;

public DynamicCollectionTests(IntegrationTestFixture fixture) : base(fixture, typeof(DynamicCollectionTests))
{
}

[Fact]
public async Task CountAddedDocuments()
{
int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.ClassData.monitoredCollectionInfo);
int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.MonitoredCollectionInfo);
int openedCount = 0, processedCount = 0;
var allObserversStarted = new ManualResetEvent(false);
var allDocsProcessed = new ManualResetEvent(false);
Expand All @@ -54,7 +50,7 @@ public async Task CountAddedDocuments()

var host = new ChangeFeedEventHost(
Guid.NewGuid().ToString(),
this.ClassData.monitoredCollectionInfo,
this.MonitoredCollectionInfo,
this.LeaseCollectionInfo,
new ChangeFeedOptions { StartFromBeginning = false },
new ChangeFeedHostOptions());
Expand All @@ -63,11 +59,11 @@ public async Task CountAddedDocuments()
var isStartOk = allObserversStarted.WaitOne(IntegrationTest.changeWaitTimeout + IntegrationTest.changeWaitTimeout);
Assert.True(isStartOk, "Timed out waiting for observres to start");

using (var client = new DocumentClient(this.ClassData.monitoredCollectionInfo.Uri, this.ClassData.monitoredCollectionInfo.MasterKey, this.ClassData.monitoredCollectionInfo.ConnectionPolicy))
using (var client = new DocumentClient(this.MonitoredCollectionInfo.Uri, this.MonitoredCollectionInfo.MasterKey, this.MonitoredCollectionInfo.ConnectionPolicy))
{
await IntegrationTestsHelper.CreateDocumentsAsync(
client,
UriFactory.CreateDocumentCollectionUri(this.ClassData.monitoredCollectionInfo.DatabaseName, this.ClassData.monitoredCollectionInfo.CollectionName),
UriFactory.CreateDocumentCollectionUri(this.MonitoredCollectionInfo.DatabaseName, this.MonitoredCollectionInfo.CollectionName),
documentCount);
}

Expand All @@ -86,8 +82,8 @@ await IntegrationTestsHelper.CreateDocumentsAsync(
[Fact]
public async Task TestStartTime()
{
var collectionUri = UriFactory.CreateDocumentCollectionUri(this.ClassData.monitoredCollectionInfo.DatabaseName, this.ClassData.monitoredCollectionInfo.CollectionName);
using (var client = new DocumentClient(this.ClassData.monitoredCollectionInfo.Uri, this.ClassData.monitoredCollectionInfo.MasterKey, this.ClassData.monitoredCollectionInfo.ConnectionPolicy))
var collectionUri = UriFactory.CreateDocumentCollectionUri(this.MonitoredCollectionInfo.DatabaseName, this.MonitoredCollectionInfo.CollectionName);
using (var client = new DocumentClient(this.MonitoredCollectionInfo.Uri, this.MonitoredCollectionInfo.MasterKey, this.MonitoredCollectionInfo.ConnectionPolicy))
{
await client.CreateDocumentAsync(collectionUri, JsonConvert.DeserializeObject("{\"id\": \"doc1\"}"));

Expand All @@ -99,7 +95,7 @@ public async Task TestStartTime()

await client.CreateDocumentAsync(collectionUri, JsonConvert.DeserializeObject("{\"id\": \"doc2\"}"));

int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.ClassData.monitoredCollectionInfo);
int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.MonitoredCollectionInfo);
var allDocsProcessed = new ManualResetEvent(false);

var processedDocs = new List<Document>();
Expand All @@ -118,7 +114,7 @@ public async Task TestStartTime()

var host = new ChangeFeedEventHost(
Guid.NewGuid().ToString(),
this.ClassData.monitoredCollectionInfo,
this.MonitoredCollectionInfo,
this.LeaseCollectionInfo,
new ChangeFeedOptions { StartTime = timeInBeweeen },
new ChangeFeedHostOptions());
Expand All @@ -143,15 +139,15 @@ public async Task TestStartTime()
public async Task TestReducePageSizeScenario()
{
// Use different colleciton: we need 1-partition collection to make sure all docs get to same partition.
var databaseUri = UriFactory.CreateDatabaseUri(this.ClassData.monitoredCollectionInfo.DatabaseName);
var databaseUri = UriFactory.CreateDatabaseUri(this.MonitoredCollectionInfo.DatabaseName);

DocumentCollectionInfo monitoredCollectionInfo = new DocumentCollectionInfo(this.ClassData.monitoredCollectionInfo);
monitoredCollectionInfo.CollectionName = this.ClassData.monitoredCollectionInfo.CollectionName + "_" + Guid.NewGuid().ToString();
DocumentCollectionInfo monitoredCollectionInfo = new DocumentCollectionInfo(this.MonitoredCollectionInfo);
monitoredCollectionInfo.CollectionName = this.MonitoredCollectionInfo.CollectionName + "_" + Guid.NewGuid().ToString();

var collectionUri = UriFactory.CreateDocumentCollectionUri(this.ClassData.monitoredCollectionInfo.DatabaseName, monitoredCollectionInfo.CollectionName);
var collectionUri = UriFactory.CreateDocumentCollectionUri(this.MonitoredCollectionInfo.DatabaseName, monitoredCollectionInfo.CollectionName);
var monitoredCollection = new DocumentCollection { Id = monitoredCollectionInfo.CollectionName };

using (var client = new DocumentClient(this.ClassData.monitoredCollectionInfo.Uri, this.ClassData.monitoredCollectionInfo.MasterKey, this.ClassData.monitoredCollectionInfo.ConnectionPolicy))
using (var client = new DocumentClient(this.MonitoredCollectionInfo.Uri, this.MonitoredCollectionInfo.MasterKey, this.MonitoredCollectionInfo.ConnectionPolicy))
{
await client.CreateDocumentCollectionAsync(databaseUri, monitoredCollection, new RequestOptions { OfferThroughput = 10000 });

Expand All @@ -169,7 +165,7 @@ public async Task TestReducePageSizeScenario()
);}"
};

var sprocUri = UriFactory.CreateStoredProcedureUri(this.ClassData.monitoredCollectionInfo.DatabaseName, monitoredCollection.Id, sproc.Id);
var sprocUri = UriFactory.CreateStoredProcedureUri(this.MonitoredCollectionInfo.DatabaseName, monitoredCollection.Id, sproc.Id);
await client.CreateStoredProcedureAsync(collectionUri, sproc);
await client.ExecuteStoredProcedureAsync<object>(sprocUri, 0);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,15 @@ namespace Microsoft.Azure.Documents.ChangeFeedProcessor.IntegrationTests
[Collection("Integration tests")]
public class EstimatorTests : IntegrationTest
{
public EstimatorTests(IntegrationTestFixture fixture) : base(fixture, typeof(EstimatorTests), false)
public EstimatorTests() : base(false)
{
}

[Fact]
public async Task CountPendingDocuments()
{
// Cleanup the test collection to avoid other tests' documents causing issues with StartFromBeginning
await this.ResetTestCollection();
int documentCount = 1;
int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.ClassData.monitoredCollectionInfo);
int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.MonitoredCollectionInfo);
int openedCount = 0, processedCount = 0;
var allObserversStarted = new ManualResetEvent(false);
var allDocsProcessed = new ManualResetEvent(false);
Expand All @@ -52,7 +50,7 @@ public async Task CountPendingDocuments()

var host = new ChangeFeedEventHost(
hostName,
this.ClassData.monitoredCollectionInfo,
this.MonitoredCollectionInfo,
this.LeaseCollectionInfo,
new ChangeFeedOptions { StartFromBeginning = false },
new ChangeFeedHostOptions());
Expand All @@ -64,11 +62,11 @@ public async Task CountPendingDocuments()
long estimation = await host.GetEstimatedRemainingWork();
Assert.Equal(0, estimation);

using (var client = new DocumentClient(this.ClassData.monitoredCollectionInfo.Uri, this.ClassData.monitoredCollectionInfo.MasterKey, this.ClassData.monitoredCollectionInfo.ConnectionPolicy))
using (var client = new DocumentClient(this.MonitoredCollectionInfo.Uri, this.MonitoredCollectionInfo.MasterKey, this.MonitoredCollectionInfo.ConnectionPolicy))
{
await IntegrationTestsHelper.CreateDocumentsAsync(
client,
UriFactory.CreateDocumentCollectionUri(this.ClassData.monitoredCollectionInfo.DatabaseName, this.ClassData.monitoredCollectionInfo.CollectionName),
UriFactory.CreateDocumentCollectionUri(this.MonitoredCollectionInfo.DatabaseName, this.MonitoredCollectionInfo.CollectionName),
1);

var isStartOk = allObserversStarted.WaitOne(IntegrationTest.changeWaitTimeout + IntegrationTest.changeWaitTimeout);
Expand All @@ -84,15 +82,15 @@ await IntegrationTestsHelper.CreateDocumentsAsync(

await IntegrationTestsHelper.CreateDocumentsAsync(
client,
UriFactory.CreateDocumentCollectionUri(this.ClassData.monitoredCollectionInfo.DatabaseName, this.ClassData.monitoredCollectionInfo.CollectionName),
UriFactory.CreateDocumentCollectionUri(this.MonitoredCollectionInfo.DatabaseName, this.MonitoredCollectionInfo.CollectionName),
1);

estimation = await host.GetEstimatedRemainingWork();
Assert.Equal(1, estimation);

await IntegrationTestsHelper.CreateDocumentsAsync(
client,
UriFactory.CreateDocumentCollectionUri(this.ClassData.monitoredCollectionInfo.DatabaseName, this.ClassData.monitoredCollectionInfo.CollectionName),
UriFactory.CreateDocumentCollectionUri(this.MonitoredCollectionInfo.DatabaseName, this.MonitoredCollectionInfo.CollectionName),
10);

estimation = await host.GetEstimatedRemainingWork();
Expand All @@ -101,7 +99,7 @@ await IntegrationTestsHelper.CreateDocumentsAsync(
// Create a new host to process pending changes
var newHost = new ChangeFeedEventHost(
hostName,
this.ClassData.monitoredCollectionInfo,
this.MonitoredCollectionInfo,
this.LeaseCollectionInfo,
new ChangeFeedOptions { StartFromBeginning = false },
new ChangeFeedHostOptions());
Expand Down Expand Up @@ -132,13 +130,11 @@ await IntegrationTestsHelper.CreateDocumentsAsync(
[Fact]
public async Task WhenNoLeasesExistReturn1()
{
// Cleanup the test collection to avoid other tests' documents causing issues with StartFromBeginning
await this.ResetTestCollection();
var hostName = Guid.NewGuid().ToString();

var host = new ChangeFeedEventHost(
hostName,
this.ClassData.monitoredCollectionInfo,
this.MonitoredCollectionInfo,
this.LeaseCollectionInfo,
new ChangeFeedOptions { StartFromBeginning = false },
new ChangeFeedHostOptions());
Expand All @@ -155,10 +151,8 @@ public async Task WhenNoLeasesExistReturn1()
[Fact]
public async Task WhenLeasesHaveContinuationTokenNullReturn0()
{
// Cleanup the test collection to avoid other tests' documents causing issues with StartFromBeginning
await this.ResetTestCollection();
int documentCount = 1;
int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.ClassData.monitoredCollectionInfo);
int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.MonitoredCollectionInfo);
int openedCount = 0, processedCount = 0;
var allObserversStarted = new ManualResetEvent(false);
var allDocsProcessed = new ManualResetEvent(false);
Expand All @@ -183,7 +177,7 @@ public async Task WhenLeasesHaveContinuationTokenNullReturn0()
// We create a host to initialize the leases with ContinuationToken null
var host = new ChangeFeedEventHost(
hostName,
this.ClassData.monitoredCollectionInfo,
this.MonitoredCollectionInfo,
this.LeaseCollectionInfo,
new ChangeFeedOptions { StartFromBeginning = false },
new ChangeFeedHostOptions());
Expand All @@ -205,10 +199,8 @@ public async Task WhenLeasesHaveContinuationTokenNullReturn0()
[Fact]
public async Task WhenLeasesHaveContinuationTokenNullStartFromBeginning()
{
// Cleanup the test collection to avoid other tests' documents causing issues with StartFromBeginning
await this.ResetTestCollection();
int documentCount = 1;
int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.ClassData.monitoredCollectionInfo);
int partitionCount = await IntegrationTestsHelper.GetPartitionCount(this.MonitoredCollectionInfo);
int openedCount = 0, processedCount = 0;
var allObserversStarted = new ManualResetEvent(false);
var allDocsProcessed = new ManualResetEvent(false);
Expand All @@ -233,7 +225,7 @@ public async Task WhenLeasesHaveContinuationTokenNullStartFromBeginning()
// We create a host to initialize the leases with ContinuationToken null
var host = new ChangeFeedEventHost(
hostName,
this.ClassData.monitoredCollectionInfo,
this.MonitoredCollectionInfo,
this.LeaseCollectionInfo,
new ChangeFeedOptions { StartFromBeginning = false },
new ChangeFeedHostOptions());
Expand All @@ -244,14 +236,14 @@ public async Task WhenLeasesHaveContinuationTokenNullStartFromBeginning()
await host.UnregisterObserversAsync();

using (var client = new DocumentClient(
this.ClassData.monitoredCollectionInfo.Uri,
this.ClassData.monitoredCollectionInfo.MasterKey,
this.ClassData.monitoredCollectionInfo.ConnectionPolicy))
this.MonitoredCollectionInfo.Uri,
this.MonitoredCollectionInfo.MasterKey,
this.MonitoredCollectionInfo.ConnectionPolicy))
{
// Insert documents
await IntegrationTestsHelper.CreateDocumentsAsync(
client,
UriFactory.CreateDocumentCollectionUri(this.ClassData.monitoredCollectionInfo.DatabaseName, this.ClassData.monitoredCollectionInfo.CollectionName),
UriFactory.CreateDocumentCollectionUri(this.MonitoredCollectionInfo.DatabaseName, this.MonitoredCollectionInfo.CollectionName),
10);

// Since the leases have ContinuationToken null state, the estimator will use StartFromBeginning and pick-up the changes that happened from the start
Expand Down
Loading

0 comments on commit 9b4efa2

Please sign in to comment.