I want to test if mongo db can have a collection up to 50 000 000 000.
So I insert 10K elements every second using method:
public async Task InsertManyAsync(List<DBRoutLine> list)
{
await _collRouts.InsertManyAsync(list);
}
Data looks like this:
namespace DbLayer.Models
{
public class DBRoutLineMeta
{
[BsonId]
[BsonRepresentation(BsonType.ObjectId)]
public string id { get; set; }
public int counter { get; set; }
}
[BsonIgnoreExtraElements]
public class DBRoutLine
{
[BsonId]
[BsonRepresentation(BsonType.ObjectId)]
public string id { get; set; }
public DBRoutLineMeta meta { get; set; } = new DBRoutLineMeta();
public DateTime timestamp { get; set; } = DateTime.UtcNow;
public string some_data { get; set; } = DateTime.Now.ToString();
}
}
id members not required actually but I have them, just for testing.
So I've got exception like this:
"A bulk write operation resulted in one or more errors. WriteErrors: [ { Category : "DuplicateKey", Code : 11000, Message : "E11000 duplicate key error collection: TSTest.system.buckets.TSTable dup key: { _id: ObjectId('634e87301297fa65b7df9923') }" } ]."
after sometime. It can be also like this:
"time-series insert failed: TSTest.TSTable :: caused by :: Expected
And it will never recover from error even if I recreate connection to mongo server. Only application restart helps to insert records again.
Test code:
using DbLayer.Models;
using DbLayer.Services;
using MongoDB.Bson;
Console.WriteLine("Hello, World!");
var service = new RoutService();
try
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
var list = new List<DBRoutLine>();
for (int i = 0; i < 10000; i++)
{
DBRoutLine line = new DBRoutLine();
list.Add(line);
}
Task task = Task.Run(async () => {
int max_counter = await service.GetMaxCount();
bool recover = false;
while (!token.IsCancellationRequested)
{
try
{
if (!recover)
{
foreach (DBRoutLine line in list)
{
line.meta.counter = ++max_counter;
line.id = ObjectId.GenerateNewId().ToString();
line.meta.id = line.id;
}
}
var t1 = DateTime.Now;
await service.InsertManyAsync(list);
var t2 = DateTime.Now;
max_counter = await service.GetMaxCount();
var t3 = DateTime.Now;
Console
.WriteLine(
$"{max_counter}->Insert:{(int)(t2 - t1).TotalMilliseconds}, GetMax:{(int)(t3 - t2).TotalMilliseconds}");
recover = false;
}
catch(Exception ex)
{
recover = true;
await Task.Delay(3000);
Console.WriteLine(ex.Message.ToString());
service = new RoutService();
max_counter = await service.GetMaxCount();
}
}
}, token);
Console.WriteLine("Press any key to stop emulation\n");
Console.ReadKey();
tokenSource.Cancel();
Task.WaitAll(task);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
Service code:
using DbLayer.Models;
using MongoDB.Bson;
using MongoDB.Driver;
namespace DbLayer.Services
{
public class RoutService:IDisposable
{
private readonly IMongoCollection<DBRoutLine> _collRouts;
private readonly MongoClient _mongoClient;
private readonly string CollName = "TSTable";
public RoutService(
)
{
var ConnectionString = "mongodb://mongoservice:27017";
_mongoClient = new MongoClient(
ConnectionString);
var mongoDatabase = _mongoClient.GetDatabase(
"TSTest");
var filter = new BsonDocument("name", CollName);
var options = new ListCollectionNamesOptions { Filter = filter };
if (!mongoDatabase.ListCollectionNames(options).Any())
{
var createOptions = new CreateCollectionOptions();
var timeField = nameof(DBRoutLine.timestamp);
var metaField = nameof(DBRoutLine.meta);
createOptions.TimeSeriesOptions =
new TimeSeriesOptions(timeField, metaField, TimeSeriesGranularity.Minutes);
mongoDatabase.CreateCollection(
CollName,
createOptions);
}
_collRouts =
mongoDatabase.GetCollection<DBRoutLine>(
CollName
);
CreateIndexes();
}
private void CreateIndexes()
{
{
IndexKeysDefinition<DBRoutLine> keys =
new IndexKeysDefinitionBuilder<DBRoutLine>()
.Descending(d => d.meta.counter);
var indexModel = new CreateIndexModel<DBRoutLine>(
keys, new CreateIndexOptions()
{ Name = "counter" }
);
_collRouts.Indexes.CreateOneAsync(indexModel);
}
////////////////////////////////////////////////
{
IndexKeysDefinition<DBRoutLine> keys =
new IndexKeysDefinitionBuilder<DBRoutLine>()
.Ascending(d => d.meta.id);
var indexModel = new CreateIndexModel<DBRoutLine>(
keys, new CreateIndexOptions()
{ Name = "id" }
);
_collRouts.Indexes.CreateOneAsync(indexModel);
}
}
public async Task InsertManyAsync(List<DBRoutLine> list)
{
await _collRouts.InsertManyAsync(list);
}
public async Task<int> GetMaxCount()
{
var last = await _collRouts
.Find(i=> i.meta.counter > 0)
.SortByDescending( i => i.meta.counter).FirstOrDefaultAsync();
if (last == null)
{
return 0;
}
return last.meta.counter;
}
public void Dispose()
{
}
}
}
project repository:
github.com/pruginkad/TestMongo
Ok, I found the bug. I changed timestamp only once when I created List of documents.
in this code:
foreach (DBRoutLine line in list)
{
line.meta.counter = ++max_counter;
line.id = ObjectId.GenerateNewId().ToString();
line.meta.id = line.id;
line.timestamp = DateTime.UtcNow;//missing line
}
I had to update timestamp, my mistake.
Anyway it's kind of strange that exception happen after every 17M documents and disappear after restart of mongo db
Related
I'm trying to insert some test values into an Azure table using storage connection string. When I tried to perform insert operation it showing error as cannot convert Entities.SyncJob to Microsoft.Azure.CosmosDB.Table.ITableEntity.
public async Task BackupJobsAsync(List<SyncJob> syncJobs)
{
var tableName = "Table";
var table = await GetCloudTableAsync("connectionString", tableName);
if (!await table.ExistsAsync())
{
await table.CreateIfNotExistsAsync();
}
var backupCount = 0;
var batchSize = 100;
var currentSize = 0;
var groups = syncJobs.GroupBy(x => x.PartitionKey).ToList();
foreach (var group in groups)
{
var batchOperation = new TableBatchOperation();
foreach (var job in group)
{
batchOperation.Insert(job);
if (++currentSize == batchSize)
{
var result = await table.ExecuteBatchAsync(batchOperation);
backupCount += result.Count(x => IsSuccessStatusCode(x.HttpStatusCode));
batchOperation = new TableBatchOperation();
currentSize = 0;
}
}
if (batchOperation.Any())
{
var result = await table.ExecuteBatchAsync(batchOperation);
backupCount += result.Count(x => IsSuccessStatusCode(x.HttpStatusCode));
}
}
}
SyncJob class:
namespace Entities
{
public class SyncJob : ITableEntity
{
public SyncJob()
{
}
public SyncJob(string partitionKey, string rowKey)
{
PartitionKey = partitionKey;
RowKey = rowKey;
}
public string PartitionKey { get; set; }
public string RowKey { get; set; }
public string ABC { get; set; }
public string DEF { get; set; }
public string GHI { get; set; }
public string JKL { get; set; }
}
}
Error Screenshot for reference. Can anyone let me know how can we overcome this? Have tried by casting the value, but the result is same.
UPDATE:
I just noticed that this is using a deprecated package nuget package Microsoft.Azure.Cosmos.Table. How do I update this code to use new package Azure.Data.Tables?
I need to get subscribe to Uniswap pair contract Sync event and get pair reserves. So here what I tried to do:
[Event("Sync")]
class PairSyncEventDTO : IEventDTO
{
[Parameter("uint112", "reserve0")]
public virtual BigInteger Reserve0 { get; set; }
[Parameter("uint112", "reserve1", 2)]
public virtual BigInteger Reserve1 { get; set; }
}
public async Task Start()
{
readonly string uniSwapFactoryAddress = "0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f";
string uniSwapFactoryAbi = GetAbi(Resources.IUniswapV2Factory);
string uniSwapPairAbi = GetAbi(Resources.IUniswapV2Pair);
var web3 = new Web3("https://mainnet.infura.io/v3/fff");
Contract uniSwapFactoryContract = web3.Eth.GetContract(uniSwapFactoryAbi, uniSwapFactoryAddress);
Function uniSwapGetPairFunction = uniSwapFactoryContract.GetFunction("getPair");
string daiAddress = "0x6b175474e89094c44da98b954eedeac495271d0f";
string wethAddress = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2";
string pairContractAddress = await uniSwapGetPairFunction.CallAsync<string>(wethAddress, daiAddress);
Contract pairContract = web3.Eth.GetContract(uniSwapPairAbi, pairContractAddress);
Event pairSyncEvent = pairContract.GetEvent("Sync");
NewFilterInput pairSyncFilter = pairSyncEvent.EventABI.CreateFilterInput();
using (var client = new StreamingWebSocketClient("wss://mainnet.infura.io/ws/v3/fff"))
{
var subscription = new EthLogsObservableSubscription(client);
subscription.GetSubscriptionDataResponsesAsObservable().
Subscribe(log =>
{
try
{
EventLog<PairSyncEventDTO> decoded = Event<PairSyncEventDTO>.DecodeEvent(log);
if (decoded != null)
{
decimal reserve0 = Web3.Convert.FromWei(decoded.Event.Reserve0);
decimal reserve1 = Web3.Convert.FromWei(decoded.Event.Reserve1);
Console.WriteLine($#"Price={reserve0 / reserve1}");
}
else Console.WriteLine(#"Found not standard transfer log");
}
catch (Exception ex)
{
Console.WriteLine(#"Log Address: " + log.Address + #" is not a standard transfer log:", ex.Message);
}
});
await client.StartAsync();
await subscription.SubscribeAsync(pairSyncFilter);
}
}
string GetAbi(byte[] storedContractJson)
{
string json = Encoding.UTF8.GetString(storedContractJson);
JObject contractObject = JObject.Parse(json);
if (!contractObject.TryGetValue("abi", out JToken abiJson)) throw new KeyNotFoundException("abi object was not found in stored contract json");
return abiJson.ToString();
}
And it seems to subscribe, but never enters Subscribe lambda.
Also if I try to await subscription.SubscribeAsync(); without any filter, it also doesn't enter Subscribe lambda.
But after executing SubscribeAsync CPU is significantly loaded by the process.
What am I doing wrong? Why isn't Subscribe lambda called?
Why does it load CPU?
I don't see a major issue with your code, but as I don't have the abis, this is an example without them. The "Sync" event does not fire all the time, so that might have been the issue.
using Nethereum.ABI.FunctionEncoding.Attributes;
using Nethereum.Contracts;
using Nethereum.JsonRpc.WebSocketStreamingClient;
using Nethereum.RPC.Reactive.Eth.Subscriptions;
using System;
using System.Collections.Generic;
using System.Numerics;
using System.Text;
using System.Threading.Tasks;
using Nethereum.RPC.Eth.DTOs;
using Nethereum.RPC.Web3;
using Newtonsoft.Json.Linq;
namespace Nethereum.WSLogStreamingUniswapSample
{
class Program
{
[Event("Sync")]
class PairSyncEventDTO : IEventDTO
{
[Parameter("uint112", "reserve0")]
public virtual BigInteger Reserve0 { get; set; }
[Parameter("uint112", "reserve1", 2)]
public virtual BigInteger Reserve1 { get; set; }
}
public partial class GetPairFunction : GetPairFunctionBase { }
[Function("getPair", "address")]
public class GetPairFunctionBase : FunctionMessage
{
[Parameter("address", "tokenA", 1)]
public virtual string TokenA { get; set; }
[Parameter("address", "tokenB", 2)]
public virtual string TokenB { get; set; }
}
public static async Task Main()
{
string uniSwapFactoryAddress = "0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f";
var web3 = new Web3.Web3("https://mainnet.infura.io/v3/7238211010344719ad14a89db874158c");
string daiAddress = "0x6b175474e89094c44da98b954eedeac495271d0f";
string wethAddress = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2";
var pairContractAddress = await web3.Eth.GetContractQueryHandler<GetPairFunction>()
.QueryAsync<string>(uniSwapFactoryAddress,
new GetPairFunction() {TokenA = daiAddress, TokenB = wethAddress});
var filter = web3.Eth.GetEvent<PairSyncEventDTO>(pairContractAddress).CreateFilterInput();
using (var client = new StreamingWebSocketClient("wss://mainnet.infura.io/ws/v3/7238211010344719ad14a89db874158c"))
{
var subscription = new EthLogsObservableSubscription(client);
subscription.GetSubscriptionDataResponsesAsObservable().
Subscribe(log =>
{
try
{
EventLog<PairSyncEventDTO> decoded = Event<PairSyncEventDTO>.DecodeEvent(log);
if (decoded != null)
{
decimal reserve0 = Web3.Web3.Convert.FromWei(decoded.Event.Reserve0);
decimal reserve1 = Web3.Web3.Convert.FromWei(decoded.Event.Reserve1);
Console.WriteLine($#"Price={reserve0 / reserve1}");
}
else Console.WriteLine(#"Found not standard transfer log");
}
catch (Exception ex)
{
Console.WriteLine(#"Log Address: " + log.Address + #" is not a standard transfer log:", ex.Message);
}
});
await client.StartAsync();
subscription.GetSubscribeResponseAsObservable().Subscribe(id => Console.WriteLine($"Subscribed with id: {id}"));
await subscription.SubscribeAsync(filter);
Console.ReadLine();
await subscription.UnsubscribeAsync();
}
}
}
To keep it alive in infura, you might want to ping it every so often
Example
while (true)
{
var handler = new EthBlockNumberObservableHandler(client);
handler.GetResponseAsObservable().Subscribe(x => Console.WriteLine(x.Value));
await handler.SendRequestAsync();
Thread.Sleep(30000);
}
I have my below code which runs when Task.Delay(500000) but when my Task.Delay(5000) it does not give me any result as the time duration is very less to execute the output expected. I am looking for a way to redesign code where I can handle this without Task.Delay() as my response time may vary for each execution. How should I do it?
Note: Modified the code with the method suggested in answer by Roald. Earlier query on Task.Delay() cant be handled asynchronously for Change Feed. The other way around will be to use Pull model instead of Push Model
using Microsoft.Azure.Cosmos;
using Microsoft.Azure.Documents.ChangeFeedProcessor;
using System;
using System.Collections.Generic;
using System.Net;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace ConsoleApp1
{
public class ChangeFeedProcessorOptions
{
public int BufferCapacity { get; set; }
public string ProcessorName { get; set; }
public Container LeaseContainer { get; set; }
public string InstanceName { get; set; }
public DateTime StartTime { get; set; }
}
class Program
{
static async Task Main()
{
var client = new CosmosClient("AccountEndpoint = https://test.documents.azure.com:443/;AccountKey=oaEOA==;");
var database = client.GetDatabase("testDatabase");
var container = database.GetContainer("testContainer");
var options = new ChangeFeedProcessorOptions
{
BufferCapacity = 10,
InstanceName = "ChangeFeedInstanceName",
LeaseContainer = database.GetContainer("leases"),
ProcessorName = "ChangeFeedProcessorName",
StartTime = DateTime.Now.AddDays(-7).ToUniversalTime()
};
var count = 0;
await foreach (var doc in container.GetChangeFeed<document>(options))
{
Console.Write(doc, b: true);
count++;
if (count == 6)
{
break;
}
}
}
public static async IAsyncEnumerable<document> GetChangeFeed<document>(this Container self, ChangeFeedProcessorOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var channel = Channel.CreateBounded<document>(new BoundedChannelOptions(options.BufferCapacity)
{
FullMode = BoundedChannelFullMode.Wait,
SingleReader = true,
SingleWriter = true
});
var processor = self
.GetChangeFeedProcessorBuilder<document>(options.ProcessorName, async (items, cancellation) =>
{
foreach (var item in items)
{
await channel.Writer.WriteAsync(item, cancellation);
}
})
.WithLeaseContainer(options.LeaseContainer)
.WithInstanceName(options.InstanceName)
.WithStartTime(options.StartTime)
.Build();
await processor.StartAsync();
try
{
await foreach (var item in channel.Reader.ReadAllAsync(cancellationToken))
{
yield return item;
}
}
finally
{
await processor.StopAsync();
}
}
}
}
I think that the most flexible way is to first turn the change feed into an IAsyncEnumerable so that then you can just use linq or some straight forward imperative code to process it.
You can get the IAsyncEnumerable using this extension method
SDK version <= 3.15.0
public record ChangeFeedProcessorOptions
{
public int BufferCapacity { get; init; }
public string ProcessorName { get; init; }
public Container LeaseContainer { get; init; }
public string InstanceName { get; init; }
public DateTime StartTime { get; init; }
}
public static async IAsyncEnumerable<T> GetChangeFeed<T>(this Container self, ChangeFeedProcessorOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var channel = Channel.CreateBounded<T>(new BoundedChannelOptions(options.BufferCapacity)
{
FullMode = BoundedChannelFullMode.Wait,
SingleReader = true,
SingleWriter = true
});
var processor = self
.GetChangeFeedProcessorBuilder<T>(options.ProcessorName, async (items, cancellation) =>
{
foreach (var item in items)
{
await channel.Writer.WriteAsync(item, cancellation);
}
})
.WithLeaseContainer(options.LeaseContainer)
.WithInstanceName(options.InstanceName)
.WithStartTime(options.StartTime)
.Build();
await processor.StartAsync();
try
{
await foreach (var item in channel.Reader.ReadAllAsync(cancellationToken))
{
yield return item;
}
}
finally
{
await processor.StopAsync();
}
}
SDK version > 3.15.0
public record ChangeFeedProcessorOptions
{
public DateTime StartTime { get; init; }
public TimeSpan PollInterval { get; init; }
}
public static async IAsyncEnumerable<T> GetChangeFeed<T>(this Container self, ChangeFeedProcessorOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var iterator = self.GetChangeFeedIterator<T>(ChangeFeedStartFrom.Time(options.StartTime));
while (iterator.HasMoreResults)
{
FeedResponse<T> items;
try
{
items = await iterator.ReadNextAsync(cancellationToken);
}
catch (CosmosException ex) when (ex.StatusCode == HttpStatusCode.NotModified)
{
// No changes
await Task.Delay(options.PollInterval, cancellationToken);
continue;
}
foreach (var item in items)
{
yield return item;
}
}
}
and then use it like this:
static async Task Main()
{
var client = new CosmosClient("AccountEndpoint = https://test.documents.azure.com:443/;AccountKey=oaEOA==;");
var database = client.GetDatabase("testDatabase");
var container = database.GetContainer("testContainer");
var options = new ChangeFeedProcessorOptions
{
BufferCapacity = 10,
InstanceName = ChangeFeedInstanceName,
LeaseContainer = database.GetContainer("leases"),
ProcessorName = ChangeFeedProcessorName,
StartTime = DateTime.Now.Subtract(MaxAge).ToUniversalTime()
};
var count = 0;
await foreach (var doc in container.GetChangeFeed<Recording>(options))
{
WriteObject(doc, b: true);
count++;
if (count == 6)
{
break;
}
}
}
or even better if you add System.Linq.Async
await foreach (var doc in container.GetChangeFeed<Recording>(options).Take(6))
{
WriteObject(doc, b: true);
}
For a different implementation you can also take a look here, it uses two semaphores instead of a Channel to achieve the same result.
Async code in powershell cmdlets
The issues you are having are due to the fact that in a cmdlet calls to WriteObject, WriteVerbose, WriteWarning, etc are required to come from the main thread.
To solve this you need to run a message pump in ProcessRecord and use it to post back to the main thread when you need to call any of those methods, exactly what you would have to do in WinForm or WPF using the Dispatcher.
A library that takes care of this is PowerShellAsync
using the library your code would become
[Cmdlet(VerbsCommon.Get, "ChangedRecording")]
[OutputType(typeof(Recording))]
public class SyncRecording : AsyncCmdlet
{
// ...
protected override async Task ProcessRecordAsync()
{
var container = ...;
await foreach (var doc in container.GetChangeFeed<Recording>(options).Take(6))
{
WriteObject(doc, b: true);
}
}
}
I am working on custom filter which should accomplish simple thing. All my APIs wrapped into 'Response' object. I want to fill in all properties using filter. This is code I have for the filter:
public class MeteringFilter : IActionFilter
{
public Task<HttpResponseMessage> ExecuteActionFilterAsync(
HttpActionContext actionContext,
CancellationToken cancellationToken,
Func<Task<HttpResponseMessage>> continuation)
{
var attribute =
actionContext.ActionDescriptor.GetCustomAttributes<MeterAttribute>(true).SingleOrDefault() ??
actionContext.ActionDescriptor.ControllerDescriptor.GetCustomAttributes<MeterAttribute>(true).SingleOrDefault();
if (attribute == null) return continuation();
var operation = actionContext.ActionDescriptor.ActionName;
var user = actionContext.RequestContext.Principal.Identity.Name;
var started = DateTimeOffset.Now;
return continuation().ContinueWith(t =>
{
var completed = DateTimeOffset.Now;
var duration = completed - started;
var c = t.Result.Content;
// This is code which does not work but I like to have:
// When debugger stops here I can see Content.Value and my object but I can't use this property like below
var cv = t.Result.Content.Value as Response<object>;
return t.Result;
});
}
public bool AllowMultiple => true;
}
I found similar question where it was suggested to do var c = t.Result.Content.ReadAsAsync(typeof(Response<>)); but I can't do this because I can't make lambda function async in this case.
Any suggestion on how to get typed object out of HttpContent so I can assign properties before it returns to caller?
Here is Response<T>
public class Response<T>
{
public string Url { get; set; }
public DateTime ServerTime { get; set; }
public TimeSpan TimeTook { get; set; }
public T Data { get; set; }
public Error Error { get; set; }
}
EDIT
Here is how code looks now. I do get access to object, but webservice does not respond with data I fill to client. It seems that code executed later after serialization/media formatting takes place.
I guess question becomes how do I add generic "handler" before web service returns but with access to beginning of call (so I can measure times, see request params, etc)
return continuation().ContinueWith(t =>
{
var c = t.Result.Content.ReadAsAsync(typeof(Response<object>), cancellationToken);
if (c.Result is Response<object> response)
{
Debug.WriteLine("Adding times");
response.ServerTime = startedOn;
response.TimeTook = DateTime.Now - startedOn;
}
return t.Result;
}, cancellationToken);
EDIT 2:
Here is sample web api method which I want to intercept:
[HttpGet]
public Response<LookupResponseData> Carrier(int? key = null, string id = "")
{
return this.GetKeyIdBundleForLookup("Carriers", key, id);
}
private Response<LookupResponseData> GetKeyIdBundleForLookup(string lookupId, int? key, string id)
{
if (!key.HasValue && string.IsNullOrEmpty(id))
return new Response<LookupResponseData>
{
Error = new Error { Code = ErrorCodes.InvalidQueryParameter, Message = "Either key or id must be specified" }
};
var r = new Response<LookupResponseData>();
try
{
this.LookupService.GetKeyIdDescription(this.AccountId, lookupId, key, id, out var keyResult, out var idResult, out var description);
if (!keyResult.HasValue)
return new Response<LookupResponseData>
{
Error = new Error { Code = ErrorCodes.InvalidOrMissingRecord, Message = "No record found for parameters specified" }
};
r.Data = new LookupResponseData { Key = keyResult.Value, Id = idResult, Description = description };
}
catch (Exception ex)
{
this.LoggerService.Log(this.AccountId, ex);
return new Response<LookupResponseData>
{
Error = new Error { Code = ErrorCodes.Unknown, Message = "API Call failed, please contact support. Details logged." }
};
}
return r;
}
All my APIs wrapped into 'Response' object.
First you can simplify your results by creating a implicit operators:
public class Response
{
public string Url { get; set; }
public DateTime ServerTime { get; set; }
public TimeSpan TimeTook { get; set; }
}
public class Response<T> : Response
{
public T Data { get; set; }
public Error Error { get; set; }
public static implicit operator Response<TData>(TData data)
{
var result = new Response<TData>
{
Data = data,
};
return result;
}
public static implicit operator Response<TData>(Error error)
{
var result = new Response<TData>
{
Error = error,
};
return result;
}
}
Now it should be easier to really ignore the repeated code of creating the response:
private Response<LookupResponseData> GetKeyIdBundleForLookup(
string lookupId, int? key, string id)
{
if (!key.HasValue && string.IsNullOrEmpty(id))
return new Error
{
Code = ErrorCodes.InvalidQueryParameter,
Message = "Either key or id must be specified"
};
try
{
this.LookupService.GetKeyIdDescription(this.AccountId,
lookupId,
key,
id,
out var keyResult,
out var idResult,
out var description);
if (!keyResult.HasValue)
return new Error
{
Code = ErrorCodes.InvalidOrMissingRecord,
Message = "No record found for parameters specified"
};
return new LookupResponseData
{
Key = keyResult.Value,
Id = idResult, Description = description
};
catch (Exception ex)
{
this.LoggerService.Log(this.AccountId, ex);
return new Error
{
Code = ErrorCodes.Unknown,
Message = "API Call failed, please contact support. Details logged." }
};
}
}
Then you can create an Core Async Action Filter:
public class SampleAsyncActionFilter : IAsyncActionFilter
{
public async Task OnActionExecutionAsync(
ActionExecutingContext context,
ActionExecutionDelegate next)
{
// do something before the action executes
var started = DateTimeOffset.Now;
// Action Executes
var resultContext = await next();
// do something after the action executes; resultContext.Result will be set
if (result.Context.Result is Response response)
{
response.ServerTime = started;
response.TimeTook = DateTimeOffset.Now - started;
}
}
}
Or Non-Core (MVC):
public class SampleActionFilter : ActionFilterAttribute
{
private const string TimerKey = nameof(SampleActionFilter ) + "_TimerKey";
public override void OnActionExecuting(ActionExecutingContext context)
{
context.HttpContext.Items[TimerKey] = DateTimeOffset.Now;
}
public override void OnActionExecuted(ActionExecutedContext context)
{
if (context.Result is Response response)
&& context.HttpContext.Items[TimerKey] is DateTimeOffset started)
{
response.ServerTime = started;
response.TimeTook = DateTimeOffset.Now - started;
}
}
Or Non-Core (WebApi):
public class SampleActionFilter : ActionFilterAttribute
{
private const string TimerKey = nameof(SampleActionFilter ) + "_TimerKey";
public override void OnActionExecuting(HttpActionContext context)
{
context.Request.Properties[TimerKey] = DateTimeOffset.Now;
}
public override void OnActionExecuted(HttpActionExecutedContext context)
{
if (context.Result is Response response)
&& context.Request.Properties[TimerKey] is DateTimeOffset started)
{
response.ServerTime = started;
response.TimeTook = DateTimeOffset.Now - started;
}
}
I tweaked your code. I hope it helps.
I couldn't check syntax errors though
return await continuation().ContinueWith(async t =>
{
var result = await t;
var c = await result.Content.ReadAsAsync(typeof(Response<object>), cancellationToken);
if (c is Response<object> response)
{
Debug.WriteLine("Adding times");
response.ServerTime = startedOn;
response.TimeTook = DateTime.Now - startedOn;
}
return result;
}, cancellationToken);
I have cut down the code from my project significantly so it's copy and pastable but if you want to debug in a console project it'll need the nuget package: Install-Package MsgPack.Cli.
Ok, below I have commented on the line that is the issue, all I want to know is why the list is forcing duplicates within the _outgoingMessageQueue queue. Is this some kind of captured variable conundrum? Please give as much detail as possible
using MsgPack.Serialization;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Collections;
namespace QueueTest
{
public class Message
{
public string Data { get; set; }
}
public class InternalFactoryMsg<T>
{
public T Data { get; set; }
public string Group { get; set; }
public List<byte[]> ReturnIds { get; set; }
}
public class FactoryHelpers
{
public static List<byte[]> GetReturnIdentities(List<byte[]> messageBytes, byte[] identity)
{
var response = new List<byte[]>();
foreach (byte[] part in messageBytes)
{
if (part != null && part.Length > 0)
response.Add(part);
else
break;
}
// may not need this with good routing, but can avoid errors
if (messageBytes.Count > 0 && messageBytes[0] == identity)
{
messageBytes.RemoveAt(0);
Console.WriteLine("[GetReturnIdentities]: Removed identity from start, check your routing!");
}
// no return identities, send empty list as these bytes will be the
// app message and command identity couple
if (response.Count == messageBytes.Count)
return new List<byte[]>();
return response;
}
public static byte[] SerializeData<T>(T appMsg)
{
var serializer = MessagePackSerializer.Get<T>();
using (var byteStream = new MemoryStream())
{
serializer.Pack(byteStream, appMsg);
return byteStream.ToArray();
}
}
public static T DeserializeData<T>(byte[] bytes)
{
try
{
var serializer = MessagePackSerializer.Get<T>();
using (var byteStream = new MemoryStream(bytes))
{
return serializer.Unpack(byteStream);
}
}
catch (Exception ex)
{
return default(T);
}
}
}
public class Factory: FactoryHelpers
{
protected ConcurrentQueue<KeyValuePair<string, List<byte[]>>> _outgoingMessageQueue { get; set; }
public ConcurrentQueue<KeyValuePair<string, List<byte[]>>> IncomingMessageQueue { get; set; }
public Factory()
{
_outgoingMessageQueue = new ConcurrentQueue<KeyValuePair<string, List<byte[]>>>();
IncomingMessageQueue = new ConcurrentQueue<KeyValuePair<string, List<byte[]>>>();
// add fake incoming message
var byteMsg = new List<byte[]>()
{
Encoding.Unicode.GetBytes("socket1"),
Encoding.Unicode.GetBytes(""),
Encoding.Unicode.GetBytes("data")
};
var msg = new KeyValuePair<string, List<byte[]>>("socket1", byteMsg);
IncomingMessageQueue.Enqueue(msg);
}
public void AddMessage<T>(InternalFactoryMsg<T> msg)
{
var msgBytes = msg.ReturnIds ?? new List<byte[]>();
msgBytes.Add(new byte[0]);
msgBytes.Add(Factory.SerializeData<T>(msg.Data));
_outgoingMessageQueue.Enqueue(new KeyValuePair<string, List<byte[]>>("socket2", msgBytes));
}
public List<KeyValuePair<string, List<byte[]>>> GetQueue()
{
return _outgoingMessageQueue.ToList();
}
public static T GetDataFromBytes<T>(List<byte[]> msgBytes)
{
// ignoring null checks etc
return DeserializeData<T>(msgBytes.Last());
}
}
public static class MessageLayer
{
public static Factory Factory = new Factory();
public static void Init()
{
Task.Factory.StartNew(u =>
{
while(true)
{
KeyValuePair<string, List<byte[]>> msg;
if(Factory.IncomingMessageQueue.TryDequeue(out msg))
{
var data = msg.Value.Last();
var returnIds = Factory.GetReturnIdentities(msg.Value, Encoding.Unicode.GetBytes(msg.Key));
IncomingCommands.HandleDataCommand(data, "test grp", returnIds);
}
// nice and slow for simulation
Thread.Sleep(400);
}
}, TaskCreationOptions.LongRunning);
}
public static void SendMessage(Message msg, string group, List<byte[]> returnIds)
{
var intMsg = new InternalFactoryMsg<Message>();
intMsg.Data = msg;
intMsg.Group = group;
intMsg.ReturnIds = returnIds;
Factory.AddMessage<Message>(intMsg);
}
}
public static class DataAccessor
{
public static List<Message> GetData(byte[] data)
{
return new List<Message>()
{
new Message() { Data = "magic" },
new Message() { Data = "data!" }
};
}
}
public static class IncomingCommands
{
public static void HandleDataCommand(byte[] data, string group, List<byte[]> returnIds)
{
List<Message> result;
// does big switch, gets data response
result = DataAccessor.GetData(data);
foreach (Message msg in result)
{
var local = msg;
var fix = new List<byte[]>(returnIds);
// THIS IS THE ISSUE
// comment out the following line and uncomment the one below to fix it
// but... why??? :O !!!
MessageLayer.SendMessage(local, group, returnIds);
//MessageLayer.SendMessage(local, group, fix);
}
// check the queue
Console.WriteLine("---------------------------");
Console.WriteLine("::Checking queue contents::");
var msgs = MessageLayer.Factory.GetQueue();
foreach(var m in msgs)
{
var check = Factory.GetDataFromBytes<Message>(m.Value);
Console.WriteLine("data -> " + check.Data);
}
}
}
public class Program
{
static void Main(string[] args)
{
MessageLayer.Init();
while(true)
{
Thread.Sleep(400);
}
}
}
}
If you can't work it out, please up vote so it gets attention. Thanks
The reason was
var msgBytes = msg.ReturnIds ?? new List<byte[]>();
was causing a variable capture, meaning subsequent use forced duplicate references to the same object