I need to get subscribe to Uniswap pair contract Sync event and get pair reserves. So here what I tried to do:
[Event("Sync")]
class PairSyncEventDTO : IEventDTO
{
[Parameter("uint112", "reserve0")]
public virtual BigInteger Reserve0 { get; set; }
[Parameter("uint112", "reserve1", 2)]
public virtual BigInteger Reserve1 { get; set; }
}
public async Task Start()
{
readonly string uniSwapFactoryAddress = "0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f";
string uniSwapFactoryAbi = GetAbi(Resources.IUniswapV2Factory);
string uniSwapPairAbi = GetAbi(Resources.IUniswapV2Pair);
var web3 = new Web3("https://mainnet.infura.io/v3/fff");
Contract uniSwapFactoryContract = web3.Eth.GetContract(uniSwapFactoryAbi, uniSwapFactoryAddress);
Function uniSwapGetPairFunction = uniSwapFactoryContract.GetFunction("getPair");
string daiAddress = "0x6b175474e89094c44da98b954eedeac495271d0f";
string wethAddress = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2";
string pairContractAddress = await uniSwapGetPairFunction.CallAsync<string>(wethAddress, daiAddress);
Contract pairContract = web3.Eth.GetContract(uniSwapPairAbi, pairContractAddress);
Event pairSyncEvent = pairContract.GetEvent("Sync");
NewFilterInput pairSyncFilter = pairSyncEvent.EventABI.CreateFilterInput();
using (var client = new StreamingWebSocketClient("wss://mainnet.infura.io/ws/v3/fff"))
{
var subscription = new EthLogsObservableSubscription(client);
subscription.GetSubscriptionDataResponsesAsObservable().
Subscribe(log =>
{
try
{
EventLog<PairSyncEventDTO> decoded = Event<PairSyncEventDTO>.DecodeEvent(log);
if (decoded != null)
{
decimal reserve0 = Web3.Convert.FromWei(decoded.Event.Reserve0);
decimal reserve1 = Web3.Convert.FromWei(decoded.Event.Reserve1);
Console.WriteLine($#"Price={reserve0 / reserve1}");
}
else Console.WriteLine(#"Found not standard transfer log");
}
catch (Exception ex)
{
Console.WriteLine(#"Log Address: " + log.Address + #" is not a standard transfer log:", ex.Message);
}
});
await client.StartAsync();
await subscription.SubscribeAsync(pairSyncFilter);
}
}
string GetAbi(byte[] storedContractJson)
{
string json = Encoding.UTF8.GetString(storedContractJson);
JObject contractObject = JObject.Parse(json);
if (!contractObject.TryGetValue("abi", out JToken abiJson)) throw new KeyNotFoundException("abi object was not found in stored contract json");
return abiJson.ToString();
}
And it seems to subscribe, but never enters Subscribe lambda.
Also if I try to await subscription.SubscribeAsync(); without any filter, it also doesn't enter Subscribe lambda.
But after executing SubscribeAsync CPU is significantly loaded by the process.
What am I doing wrong? Why isn't Subscribe lambda called?
Why does it load CPU?
I don't see a major issue with your code, but as I don't have the abis, this is an example without them. The "Sync" event does not fire all the time, so that might have been the issue.
using Nethereum.ABI.FunctionEncoding.Attributes;
using Nethereum.Contracts;
using Nethereum.JsonRpc.WebSocketStreamingClient;
using Nethereum.RPC.Reactive.Eth.Subscriptions;
using System;
using System.Collections.Generic;
using System.Numerics;
using System.Text;
using System.Threading.Tasks;
using Nethereum.RPC.Eth.DTOs;
using Nethereum.RPC.Web3;
using Newtonsoft.Json.Linq;
namespace Nethereum.WSLogStreamingUniswapSample
{
class Program
{
[Event("Sync")]
class PairSyncEventDTO : IEventDTO
{
[Parameter("uint112", "reserve0")]
public virtual BigInteger Reserve0 { get; set; }
[Parameter("uint112", "reserve1", 2)]
public virtual BigInteger Reserve1 { get; set; }
}
public partial class GetPairFunction : GetPairFunctionBase { }
[Function("getPair", "address")]
public class GetPairFunctionBase : FunctionMessage
{
[Parameter("address", "tokenA", 1)]
public virtual string TokenA { get; set; }
[Parameter("address", "tokenB", 2)]
public virtual string TokenB { get; set; }
}
public static async Task Main()
{
string uniSwapFactoryAddress = "0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f";
var web3 = new Web3.Web3("https://mainnet.infura.io/v3/7238211010344719ad14a89db874158c");
string daiAddress = "0x6b175474e89094c44da98b954eedeac495271d0f";
string wethAddress = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2";
var pairContractAddress = await web3.Eth.GetContractQueryHandler<GetPairFunction>()
.QueryAsync<string>(uniSwapFactoryAddress,
new GetPairFunction() {TokenA = daiAddress, TokenB = wethAddress});
var filter = web3.Eth.GetEvent<PairSyncEventDTO>(pairContractAddress).CreateFilterInput();
using (var client = new StreamingWebSocketClient("wss://mainnet.infura.io/ws/v3/7238211010344719ad14a89db874158c"))
{
var subscription = new EthLogsObservableSubscription(client);
subscription.GetSubscriptionDataResponsesAsObservable().
Subscribe(log =>
{
try
{
EventLog<PairSyncEventDTO> decoded = Event<PairSyncEventDTO>.DecodeEvent(log);
if (decoded != null)
{
decimal reserve0 = Web3.Web3.Convert.FromWei(decoded.Event.Reserve0);
decimal reserve1 = Web3.Web3.Convert.FromWei(decoded.Event.Reserve1);
Console.WriteLine($#"Price={reserve0 / reserve1}");
}
else Console.WriteLine(#"Found not standard transfer log");
}
catch (Exception ex)
{
Console.WriteLine(#"Log Address: " + log.Address + #" is not a standard transfer log:", ex.Message);
}
});
await client.StartAsync();
subscription.GetSubscribeResponseAsObservable().Subscribe(id => Console.WriteLine($"Subscribed with id: {id}"));
await subscription.SubscribeAsync(filter);
Console.ReadLine();
await subscription.UnsubscribeAsync();
}
}
}
To keep it alive in infura, you might want to ping it every so often
Example
while (true)
{
var handler = new EthBlockNumberObservableHandler(client);
handler.GetResponseAsObservable().Subscribe(x => Console.WriteLine(x.Value));
await handler.SendRequestAsync();
Thread.Sleep(30000);
}
Related
I want to test if mongo db can have a collection up to 50 000 000 000.
So I insert 10K elements every second using method:
public async Task InsertManyAsync(List<DBRoutLine> list)
{
await _collRouts.InsertManyAsync(list);
}
Data looks like this:
namespace DbLayer.Models
{
public class DBRoutLineMeta
{
[BsonId]
[BsonRepresentation(BsonType.ObjectId)]
public string id { get; set; }
public int counter { get; set; }
}
[BsonIgnoreExtraElements]
public class DBRoutLine
{
[BsonId]
[BsonRepresentation(BsonType.ObjectId)]
public string id { get; set; }
public DBRoutLineMeta meta { get; set; } = new DBRoutLineMeta();
public DateTime timestamp { get; set; } = DateTime.UtcNow;
public string some_data { get; set; } = DateTime.Now.ToString();
}
}
id members not required actually but I have them, just for testing.
So I've got exception like this:
"A bulk write operation resulted in one or more errors. WriteErrors: [ { Category : "DuplicateKey", Code : 11000, Message : "E11000 duplicate key error collection: TSTest.system.buckets.TSTable dup key: { _id: ObjectId('634e87301297fa65b7df9923') }" } ]."
after sometime. It can be also like this:
"time-series insert failed: TSTest.TSTable :: caused by :: Expected
And it will never recover from error even if I recreate connection to mongo server. Only application restart helps to insert records again.
Test code:
using DbLayer.Models;
using DbLayer.Services;
using MongoDB.Bson;
Console.WriteLine("Hello, World!");
var service = new RoutService();
try
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
var list = new List<DBRoutLine>();
for (int i = 0; i < 10000; i++)
{
DBRoutLine line = new DBRoutLine();
list.Add(line);
}
Task task = Task.Run(async () => {
int max_counter = await service.GetMaxCount();
bool recover = false;
while (!token.IsCancellationRequested)
{
try
{
if (!recover)
{
foreach (DBRoutLine line in list)
{
line.meta.counter = ++max_counter;
line.id = ObjectId.GenerateNewId().ToString();
line.meta.id = line.id;
}
}
var t1 = DateTime.Now;
await service.InsertManyAsync(list);
var t2 = DateTime.Now;
max_counter = await service.GetMaxCount();
var t3 = DateTime.Now;
Console
.WriteLine(
$"{max_counter}->Insert:{(int)(t2 - t1).TotalMilliseconds}, GetMax:{(int)(t3 - t2).TotalMilliseconds}");
recover = false;
}
catch(Exception ex)
{
recover = true;
await Task.Delay(3000);
Console.WriteLine(ex.Message.ToString());
service = new RoutService();
max_counter = await service.GetMaxCount();
}
}
}, token);
Console.WriteLine("Press any key to stop emulation\n");
Console.ReadKey();
tokenSource.Cancel();
Task.WaitAll(task);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
Service code:
using DbLayer.Models;
using MongoDB.Bson;
using MongoDB.Driver;
namespace DbLayer.Services
{
public class RoutService:IDisposable
{
private readonly IMongoCollection<DBRoutLine> _collRouts;
private readonly MongoClient _mongoClient;
private readonly string CollName = "TSTable";
public RoutService(
)
{
var ConnectionString = "mongodb://mongoservice:27017";
_mongoClient = new MongoClient(
ConnectionString);
var mongoDatabase = _mongoClient.GetDatabase(
"TSTest");
var filter = new BsonDocument("name", CollName);
var options = new ListCollectionNamesOptions { Filter = filter };
if (!mongoDatabase.ListCollectionNames(options).Any())
{
var createOptions = new CreateCollectionOptions();
var timeField = nameof(DBRoutLine.timestamp);
var metaField = nameof(DBRoutLine.meta);
createOptions.TimeSeriesOptions =
new TimeSeriesOptions(timeField, metaField, TimeSeriesGranularity.Minutes);
mongoDatabase.CreateCollection(
CollName,
createOptions);
}
_collRouts =
mongoDatabase.GetCollection<DBRoutLine>(
CollName
);
CreateIndexes();
}
private void CreateIndexes()
{
{
IndexKeysDefinition<DBRoutLine> keys =
new IndexKeysDefinitionBuilder<DBRoutLine>()
.Descending(d => d.meta.counter);
var indexModel = new CreateIndexModel<DBRoutLine>(
keys, new CreateIndexOptions()
{ Name = "counter" }
);
_collRouts.Indexes.CreateOneAsync(indexModel);
}
////////////////////////////////////////////////
{
IndexKeysDefinition<DBRoutLine> keys =
new IndexKeysDefinitionBuilder<DBRoutLine>()
.Ascending(d => d.meta.id);
var indexModel = new CreateIndexModel<DBRoutLine>(
keys, new CreateIndexOptions()
{ Name = "id" }
);
_collRouts.Indexes.CreateOneAsync(indexModel);
}
}
public async Task InsertManyAsync(List<DBRoutLine> list)
{
await _collRouts.InsertManyAsync(list);
}
public async Task<int> GetMaxCount()
{
var last = await _collRouts
.Find(i=> i.meta.counter > 0)
.SortByDescending( i => i.meta.counter).FirstOrDefaultAsync();
if (last == null)
{
return 0;
}
return last.meta.counter;
}
public void Dispose()
{
}
}
}
project repository:
github.com/pruginkad/TestMongo
Ok, I found the bug. I changed timestamp only once when I created List of documents.
in this code:
foreach (DBRoutLine line in list)
{
line.meta.counter = ++max_counter;
line.id = ObjectId.GenerateNewId().ToString();
line.meta.id = line.id;
line.timestamp = DateTime.UtcNow;//missing line
}
I had to update timestamp, my mistake.
Anyway it's kind of strange that exception happen after every 17M documents and disappear after restart of mongo db
I need to deserialize my json data. I am using Newtonsoft.Json for json operations. I tried a lot of method to deserialize this data but i failed. Btw, I need to summarize my system for better understanding. I am posting data every minute to an API. And its response to me. So I am trying to deserialize the response.
What need I do to deserialize this json and use it like a normal c# object? I want to deserialize res variable. Thank you for your interest.
Here is the main code
var data = new SendData
{
Readtime = time,
Stationid = new Guid(_stationid),
SoftwareVersion = softwareVersion,
Period = period,
AkisHizi = akisHizi,
AkisHizi_Status = status,
AKM = akm,
AKM_Status = status,
CozunmusOksijen = cozunmusOksijen,
CozunmusOksijen_Status = status,
Debi = debi,
Debi_Status = status,
DesarjDebi = desarjDebi,
DesarjDebi_Status = status,
KOi = koi,
KOi_Status = status,
pH = ph,
pH_Status = status,
Sicaklik = sicaklik,
Sicaklik_Status = status,
Iletkenlik = iletkenlik,
Iletkenlik_Status = status
};
var res = Services.sendData(data);
MessageBox.Show(res.objects.ToString());
Here is the services model PostData method
private ResultStatus<T> PostData<T>(string url, string data) where T : new()
{
try
{
using (var webClient = new WebClient())
{
webClient.Encoding = Encoding.UTF8;
webClient.Headers.Add("AToken", JsonConvert.SerializeObject(new AToken { TicketId = this.TicketId.ToString() }));
var resp = webClient.UploadString(this.Url + url, data);
return JsonConvert.DeserializeObject<ResultStatus<T>>(resp);
}
}
catch (Exception ex)
{
return new ResultStatus<T>
{
message = ex.Message + System.Environment.NewLine + url
};
}
}
Here is the sendData method
public ResultStatus<object> sendData(SendData data)
{
var res = PostData<object>(this.stationType.ToString() + "/SendData", JsonConvert.SerializeObject(data));
return res;
}
Here is the MessageBox result (json data)
{
'Period': 1,
'ReadTime':
'2022-08-22714:01:00',
'AKM': 65.73,
'AKM_Status': 1,
'CozunmusOksijen': 0.2,
'CozunmusOksijen_Status': 1,
'Debi': 1.0,
'Debi_Status': 1,
'KOi': 25.1,
'KOi_Status': 1
}
Your JSON is probably,
{
"Period": 1,
"ReadTime": "2022-08-22T14:01:00",
"AKM": 65.73,
"AKM_Status": 1,
"CozunmusOksijen": 0.2,
"CozunmusOksijen_Status": 1,
"Debi2": 1.0,
"Debi_Status": 1,
"KOi": 25.1,
"KOi_Status": 1
}
From https://app.quicktype.io/, a C# model would be,
// <auto-generated />
//
// To parse this JSON data, add NuGet 'Newtonsoft.Json' then do:
//
// using QuickType;
//
// var thing = Thing.FromJson(jsonString);
namespace QuickType
{
using System;
using System.Collections.Generic;
using System.Globalization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
public partial class Thing
{
[JsonProperty("Period")]
public long Period { get; set; }
[JsonProperty("ReadTime")]
public DateTimeOffset ReadTime { get; set; }
[JsonProperty("AKM")]
public double Akm { get; set; }
[JsonProperty("AKM_Status")]
public long AkmStatus { get; set; }
[JsonProperty("CozunmusOksijen")]
public double CozunmusOksijen { get; set; }
[JsonProperty("CozunmusOksijen_Status")]
public long CozunmusOksijenStatus { get; set; }
[JsonProperty("Debi2")]
public long Debi2 { get; set; }
[JsonProperty("Debi_Status")]
public long DebiStatus { get; set; }
[JsonProperty("KOi")]
public double KOi { get; set; }
[JsonProperty("KOi_Status")]
public long KOiStatus { get; set; }
}
public partial class Thing
{
public static Thing FromJson(string json) => JsonConvert.DeserializeObject<Thing>(json, QuickType.Converter.Settings);
}
public static class Serialize
{
public static string ToJson(this Thing self) => JsonConvert.SerializeObject(self, QuickType.Converter.Settings);
}
internal static class Converter
{
public static readonly JsonSerializerSettings Settings = new JsonSerializerSettings
{
MetadataPropertyHandling = MetadataPropertyHandling.Ignore,
DateParseHandling = DateParseHandling.None,
Converters =
{
new IsoDateTimeConverter { DateTimeStyles = DateTimeStyles.AssumeUniversal }
},
};
}
}
As per https://learn.microsoft.com/en-us/dotnet/standard/serialization/system-text-json-how-to?pivots=dotnet-6-0#how-to-read-json-as-net-objects-deserialize
Thing? thing = JsonSerializer.Deserialize<Thing>(res);
I have successfully deployed on model on Cloud ML Engine and verified it is working with gcloud ml-engine models predict by following the instructions, now I want to send predictions to it from my C# app. How do I do that?
The online prediction API is a REST API, so you can use any library for sending HTTPS requests, although you will need to use Google's OAuth library to get your credentials.
The format of the request is JSON, as described in the docs.
To exemplify, consider the Census example. A client for that might look like:
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading.Tasks;
using Google.Apis.Auth.OAuth2;
using Newtonsoft.Json;
namespace prediction_client
{
class Person
{
public int age { get; set; }
public String workclass { get; set; }
public String education { get; set; }
public int education_num { get; set; }
public string marital_status { get; set; }
public string occupation { get; set; }
public string relationship { get; set; }
public string race { get; set; }
public string gender { get; set; }
public int capital_gain { get; set; }
public int capital_loss { get; set; }
public int hours_per_week { get; set; }
public string native_country { get; set; }
}
class Prediction
{
public List<Double> probabilities { get; set; }
public List<Double> logits { get; set; }
public Int32 classes { get; set; }
public List<Double> logistic { get; set; }
public override string ToString()
{
return JsonConvert.SerializeObject(this);
}
}
class MainClass
{
static PredictClient client = new PredictClient();
static String project = "MY_PROJECT";
static String model = "census"; // Whatever you deployed your model as
public static void Main(string[] args)
{
RunAsync().Wait();
}
static async Task RunAsync()
{
try
{
Person person = new Person
{
age = 25,
workclass = " Private",
education = " 11th",
education_num = 7,
marital_status = " Never - married",
occupation = " Machine - op - inspct",
relationship = " Own - child",
race = " Black",
gender = " Male",
capital_gain = 0,
capital_loss = 0,
hours_per_week = 40,
native_country = " United - Stats"
};
var instances = new List<Person> { person };
List<Prediction> predictions = await client.Predict<Person, Prediction>(project, model, instances);
Console.WriteLine(String.Join("\n", predictions));
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
}
}
class PredictClient {
private HttpClient client;
public PredictClient()
{
this.client = new HttpClient();
client.BaseAddress = new Uri("https://ml.googleapis.com/v1/");
client.DefaultRequestHeaders.Accept.Clear();
client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
}
public async Task<List<O>> Predict<I, O>(String project, String model, List<I> instances, String version = null)
{
var version_suffix = version == null ? "" : $"/version/{version}";
var model_uri = $"projects/{project}/models/{model}{version_suffix}";
var predict_uri = $"{model_uri}:predict";
GoogleCredential credential = await GoogleCredential.GetApplicationDefaultAsync();
var bearer_token = await credential.UnderlyingCredential.GetAccessTokenForRequestAsync();
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", bearer_token);
var request = new { instances = instances };
var content = new StringContent(JsonConvert.SerializeObject(request), Encoding.UTF8, "application/json");
var responseMessage = await client.PostAsync(predict_uri, content);
responseMessage.EnsureSuccessStatusCode();
var responseBody = await responseMessage.Content.ReadAsStringAsync();
dynamic response = JsonConvert.DeserializeObject(responseBody);
return response.predictions.ToObject<List<O>>();
}
}
}
You may have to run gcloud auth login to initialize your credentials before running locally, if you haven't already.
I have cut down the code from my project significantly so it's copy and pastable but if you want to debug in a console project it'll need the nuget package: Install-Package MsgPack.Cli.
Ok, below I have commented on the line that is the issue, all I want to know is why the list is forcing duplicates within the _outgoingMessageQueue queue. Is this some kind of captured variable conundrum? Please give as much detail as possible
using MsgPack.Serialization;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Collections;
namespace QueueTest
{
public class Message
{
public string Data { get; set; }
}
public class InternalFactoryMsg<T>
{
public T Data { get; set; }
public string Group { get; set; }
public List<byte[]> ReturnIds { get; set; }
}
public class FactoryHelpers
{
public static List<byte[]> GetReturnIdentities(List<byte[]> messageBytes, byte[] identity)
{
var response = new List<byte[]>();
foreach (byte[] part in messageBytes)
{
if (part != null && part.Length > 0)
response.Add(part);
else
break;
}
// may not need this with good routing, but can avoid errors
if (messageBytes.Count > 0 && messageBytes[0] == identity)
{
messageBytes.RemoveAt(0);
Console.WriteLine("[GetReturnIdentities]: Removed identity from start, check your routing!");
}
// no return identities, send empty list as these bytes will be the
// app message and command identity couple
if (response.Count == messageBytes.Count)
return new List<byte[]>();
return response;
}
public static byte[] SerializeData<T>(T appMsg)
{
var serializer = MessagePackSerializer.Get<T>();
using (var byteStream = new MemoryStream())
{
serializer.Pack(byteStream, appMsg);
return byteStream.ToArray();
}
}
public static T DeserializeData<T>(byte[] bytes)
{
try
{
var serializer = MessagePackSerializer.Get<T>();
using (var byteStream = new MemoryStream(bytes))
{
return serializer.Unpack(byteStream);
}
}
catch (Exception ex)
{
return default(T);
}
}
}
public class Factory: FactoryHelpers
{
protected ConcurrentQueue<KeyValuePair<string, List<byte[]>>> _outgoingMessageQueue { get; set; }
public ConcurrentQueue<KeyValuePair<string, List<byte[]>>> IncomingMessageQueue { get; set; }
public Factory()
{
_outgoingMessageQueue = new ConcurrentQueue<KeyValuePair<string, List<byte[]>>>();
IncomingMessageQueue = new ConcurrentQueue<KeyValuePair<string, List<byte[]>>>();
// add fake incoming message
var byteMsg = new List<byte[]>()
{
Encoding.Unicode.GetBytes("socket1"),
Encoding.Unicode.GetBytes(""),
Encoding.Unicode.GetBytes("data")
};
var msg = new KeyValuePair<string, List<byte[]>>("socket1", byteMsg);
IncomingMessageQueue.Enqueue(msg);
}
public void AddMessage<T>(InternalFactoryMsg<T> msg)
{
var msgBytes = msg.ReturnIds ?? new List<byte[]>();
msgBytes.Add(new byte[0]);
msgBytes.Add(Factory.SerializeData<T>(msg.Data));
_outgoingMessageQueue.Enqueue(new KeyValuePair<string, List<byte[]>>("socket2", msgBytes));
}
public List<KeyValuePair<string, List<byte[]>>> GetQueue()
{
return _outgoingMessageQueue.ToList();
}
public static T GetDataFromBytes<T>(List<byte[]> msgBytes)
{
// ignoring null checks etc
return DeserializeData<T>(msgBytes.Last());
}
}
public static class MessageLayer
{
public static Factory Factory = new Factory();
public static void Init()
{
Task.Factory.StartNew(u =>
{
while(true)
{
KeyValuePair<string, List<byte[]>> msg;
if(Factory.IncomingMessageQueue.TryDequeue(out msg))
{
var data = msg.Value.Last();
var returnIds = Factory.GetReturnIdentities(msg.Value, Encoding.Unicode.GetBytes(msg.Key));
IncomingCommands.HandleDataCommand(data, "test grp", returnIds);
}
// nice and slow for simulation
Thread.Sleep(400);
}
}, TaskCreationOptions.LongRunning);
}
public static void SendMessage(Message msg, string group, List<byte[]> returnIds)
{
var intMsg = new InternalFactoryMsg<Message>();
intMsg.Data = msg;
intMsg.Group = group;
intMsg.ReturnIds = returnIds;
Factory.AddMessage<Message>(intMsg);
}
}
public static class DataAccessor
{
public static List<Message> GetData(byte[] data)
{
return new List<Message>()
{
new Message() { Data = "magic" },
new Message() { Data = "data!" }
};
}
}
public static class IncomingCommands
{
public static void HandleDataCommand(byte[] data, string group, List<byte[]> returnIds)
{
List<Message> result;
// does big switch, gets data response
result = DataAccessor.GetData(data);
foreach (Message msg in result)
{
var local = msg;
var fix = new List<byte[]>(returnIds);
// THIS IS THE ISSUE
// comment out the following line and uncomment the one below to fix it
// but... why??? :O !!!
MessageLayer.SendMessage(local, group, returnIds);
//MessageLayer.SendMessage(local, group, fix);
}
// check the queue
Console.WriteLine("---------------------------");
Console.WriteLine("::Checking queue contents::");
var msgs = MessageLayer.Factory.GetQueue();
foreach(var m in msgs)
{
var check = Factory.GetDataFromBytes<Message>(m.Value);
Console.WriteLine("data -> " + check.Data);
}
}
}
public class Program
{
static void Main(string[] args)
{
MessageLayer.Init();
while(true)
{
Thread.Sleep(400);
}
}
}
}
If you can't work it out, please up vote so it gets attention. Thanks
The reason was
var msgBytes = msg.ReturnIds ?? new List<byte[]>();
was causing a variable capture, meaning subsequent use forced duplicate references to the same object
I've been trying to convert our service stack app host and client to use MsgPack serialization. I kept getting the exception
MsgPack.InvalidMessagePackStreamException Stream Unexpectedly Ends
After some investigation I've tracked it down to DateTimeOffset field in the response object.
Client Code
using System;
namespace Client
{
using Server;
using ServiceStack.MsgPack;
class Program
{
static void Main(string[] args)
{
var client = new MsgPackServiceClient("http://localhost:1337");
var response = client.Get(new GetRequest());
Console.WriteLine("Response: [{0}] {1}", response.Timestamp, response.Result);
Console.ReadKey();
}
}
}
Server Code
using System;
namespace Server
{
using System.Reflection;
using ServiceStack;
using ServiceStack.MsgPack;
class Program
{
static void Main(string[] args)
{
var listeningOn = args.Length == 0 ? "http://localhost:1337/" : args[0];
using (var appHost = new AppHost("Test", Assembly.GetAssembly(typeof(GetService))))
{
appHost.Init()
.Start(listeningOn);
Console.WriteLine("AppHost Created at {0}, listening on {1}", DateTime.Now, listeningOn);
Console.ReadKey();
}
}
}
public class AppHost : AppHostHttpListenerBase
{
public AppHost(string serviceName, params Assembly[] assembliesWithServices)
: base(serviceName, assembliesWithServices)
{
}
public override void Configure(Funq.Container container)
{
Plugins.Add(new MsgPackFormat());
}
}
[Route("/GetRequest", Verbs = "GET")]
public class GetRequest : IReturn<GetResponse> { }
public class GetResponse
{
public string Result { get; set; }
public string Timestamp { get; set; }
//public DateTimeOffset Timestamp { get; set; }
}
public class GetService : Service
{
public GetResponse Get(GetRequest request)
{
return new GetResponse { Result = "Success", Timestamp = DateTimeOffset.UtcNow.ToString() };
//return new GetResponse { Result = "Success", Timestamp = DateTimeOffset.UtcNow };
}
}
}
The example works without DateTimeOffset but fails with the exception when its included.
The MessagePackSerializer class appears to work as expected as demonstrated by the following test.
public static void SerializeTest()
{
var response = new GetResponse { Result = "Success", Timestamp = DateTime.UtcNow };
var serializer = MessagePackSerializer.Get<GetResponse>();
var asSingleObject = serializer.PackSingleObject(response);
var fromSingleObject = serializer.UnpackSingleObject(asSingleObject);
var packStream = new MemoryStream();
serializer.Pack(packStream, response);
packStream.Position = 0;
var fromPackStream = serializer.Unpack(packStream);
packStream.Position = 0;
serializer.PackTo(Packer.Create(packStream), response);
packStream.Position = 0;
var fromPacker = serializer.Unpack(packStream);
}
In each case the response is packed\unpacked correctly with DateTimeOffset property.
I've tried custom serializer for DateTimeOffset but that fails the same way.
MsgPack.Cli 0.5.7
ServiceStack 4.0.35