PUBNUB - Subscribe doesn't work with .NET Windows services - c#

I'm using following code to subscribe to a channel. It's working fine when I try it on a .NET Console App but when I try the same code in a .NET windows service App, then it doesn't work (doesn't hit delegate (Pubnub pnObj, PNStatus status)). Is there anything else I need to add when building a windows service?
private void SubscribeToPubNub()
{
PNConfiguration pnConfig = new PNConfiguration();
pnConfig.SubscribeKey = RmmConnection.pNubSub;
pnConfig.PublishKey = RmmConnection.pNubSubPub;
pnConfig.Uuid = ConfigManager.GetUUID();
Pubnub pubnub = new Pubnub(pnConfig);
SubscribeCallbackExt mySubscribeListener = new SubscribeCallbackExt(
delegate (Pubnub pnObj, PNMessageResult<object> message)
{
Console.WriteLine(pubnub.JsonPluggableLibrary.SerializeToJsonString(message));
},
delegate (Pubnub pnObj, PNPresenceEventResult presence)
{
if (presence != null)
{
Console.WriteLine(pubnub.JsonPluggableLibrary.SerializeToJsonString(presence));
}
},
delegate (Pubnub pnObj, PNStatus status)
{
if (status != null && status.StatusCode == 200 && status.Category == PNStatusCategory.PNConnectedCategory)
{
//connected
}
});
pubnub.AddListener(mySubscribeListener);
pubnub.Subscribe<string>()
.Channels(new string[] { "rmm_channel" })
//.Channels(new string[] { RmmConnection.pNubChannel })
.WithPresence()
.Execute();
}

Related

SSIS Custom Data-Flow Component being Automatically Removed when Running under SQL Server 2016 (As Opposed to Visual Studio 2015)

I am working on an ETL process that is part of a larger system.
This larger system includes a logging system using a REST API.
The ETL section is running under SSIS, developed in Visual Studio 2015 and deployed on SQL Server 2016.
The ETL is covered with integration tests including tests of logs being generated.
The REST API cannot be guaranteed to be running during these integration tests, and even if it is, the asynchronous nature makes testing log generation... awkward.
We could use a script component to handle the logging, but we have 30+ packages requiring logging (each a distinct operation, based on a data-point to be calculated from one database into the next, so that a team can work concurrently without having TFS merge butcher XML definitions as much as possible), so maintenance becomes a headache.
In order to get around this, I have written a custom component that will bundle up all the errors across a package execution (separated into Fatal, Error, Warning, Info, and Detail levels), add a source, and fire JSON off to the REST API. In the event that the REST API is not specified, the system will log locally instead (which for the integration tests, means that we have a synchronous and local log source to check).
The ComponentType is ComponentType.DestinationAdapter.
The component has two custom properties, one for the variable name of the logging url (defaults to $Project::LoggingURL), one for the source of the log (defaults to System::PackageName).
The component validates the custom properties to not be blank.
The component has a single connection, defaulting to a master database, used as a fallback.
The component validates that the connection is set.
The component has multiple (five) inputs and no outputs.
Each input is marked as having side-effects.
Each attached input is validated as having a single input column of type DT_WSTR.
Unattached inputs are fine (a package that cannot log any fatal errors will leave that input unattached).
If any Fatal or Error messages are detected, the component fails the package in the Post-Execute step (in order to detect as many issues as possible in a run, rather than only the first).
The build targets 32-bit, and .NET Framework 4.
On Post-Build, the dll is copied to the DTS/PipelineComponents folder, and the assembly is deregistered then reregistered in the GAC.
When executing a package through Visual Studio (right-click on the package, 'Execute Package'), the component behaves exactly as expected.
When the package is deployed to the local SQL Server 2016 instance on my machine and the integration tests are run, the validation claims that the outputs leading into my component are not used and should be removed, and the component does nothing (as if it was never there). There are no messages about the component whatsoever.
I would very much like to have the component run in SQL Server, otherwise it is completely useless.
This is the code (there is an associated UI, but the Design-Time behaviour is as expected):
[DtsPipelineComponent(
DisplayName = "Custom Logging Component",
ComponentType = ComponentType.DestinationAdapter,
IconResource = "DestinationIcon",
CurrentVersion = 1,
UITypeName = "ETLCustomDataFlowComponents.CustomLoggingComponentUI,ETLCustomDataFlowComponents,Version=1.0.0.0,Culture=neutral,PublicKeyToken=051a7fa35dda5a9f"
)]
public class HermesCustomLoggingComponent : PipelineComponent
{
public const string _SOURCE_PROPERTY = "Source Name";
public const string _LOG_PROPERTY = "Log URL";
public const string _MASTER_CONN_PROPERTY = "Master Connection";
public override void ProvideComponentProperties()
{
base.ProvideComponentProperties();
base.RemoveAllInputsOutputsAndCustomProperties();
var loggingPath = ComponentMetaData.CustomPropertyCollection.New();
loggingPath.Description = "The url to send json log messages to";
loggingPath.Name = _LOG_PROPERTY;
loggingPath.Value = string.Empty;
loggingPath.ExpressionType = DTSCustomPropertyExpressionType.CPET_NOTIFY;
var source = ComponentMetaData.CustomPropertyCollection.New();
source.Description = "The source to which the log is to be attributed";
source.Name = _SOURCE_PROPERTY;
source.Value = string.Empty;
var masterConn = ComponentMetaData.RuntimeConnectionCollection.New();
masterConn.Name = _MASTER_CONN_PROPERTY;
masterConn.Description = "The connection to log.Log as a backup when centralised logging fails";
foreach (var level in new[] { "Fatal", "Error", "Warning", "Info", "Debug" })
{
var input = ComponentMetaData.InputCollection.New();
input.Name = level;
input.HasSideEffects = true;
}
}
public override DTSValidationStatus Validate()
{
bool broken = false;
bool cancel;
foreach (IDTSInput100 input in ComponentMetaData.InputCollection)
{
if (input.IsAttached)
{
if (input.InputColumnCollection.Count != 1)
{
ComponentMetaData.FireError(0, ComponentMetaData.Name, $"{input.Name} should have only a message input", "", 0, out cancel);
broken = true;
}
else
{
if (input.InputColumnCollection[0].DataType != DataType.DT_WSTR)
{
ComponentMetaData.FireError(0, ComponentMetaData.Name, $"Input to {input.Name} is not of type DT_WSTR", "", 0, out cancel);
broken = true;
}
}
}
else
{
input.InputColumnCollection.RemoveAll();
}
}
if (ComponentMetaData.CustomPropertyCollection[_SOURCE_PROPERTY].Value == string.Empty)
{
ComponentMetaData.FireError(0, ComponentMetaData.Name, $"{_SOURCE_PROPERTY} parameter has not been set", "", 0, out cancel);
broken = true;
}
if (ComponentMetaData.CustomPropertyCollection[_LOG_PROPERTY].Value == string.Empty)
{
ComponentMetaData.FireError(0, ComponentMetaData.Name, $"{_LOG_PROPERTY} parameter has not been set", "", 0, out cancel);
broken = true;
}
if (ComponentMetaData.RuntimeConnectionCollection[_MASTER_CONN_PROPERTY].ConnectionManager == null)
{
ComponentMetaData.FireError(0, ComponentMetaData.Name, $"{_MASTER_CONN_PROPERTY} has not been set", "", 0, out cancel);
broken = true;
}
if (broken)
{
return DTSValidationStatus.VS_ISBROKEN;
}
return base.Validate();
}
private readonly List<Dictionary<string, string>> _logMessages = new List<Dictionary<string, string>>();
private readonly Dictionary<int, IDTSInput100> _inputs = new Dictionary<int, IDTSInput100>();
private readonly Dictionary<string, int> _messageCounts = new Dictionary<string, int>();
private string _source = string.Empty;
private string _loggingPath = string.Empty;
private SqlConnection sqlConnection;
public override void AcquireConnections(object transaction)
{
if (ComponentMetaData.RuntimeConnectionCollection[_MASTER_CONN_PROPERTY].ConnectionManager != null)
{
ConnectionManager cm = DtsConvert.GetWrapper(ComponentMetaData.RuntimeConnectionCollection[_MASTER_CONN_PROPERTY].ConnectionManager);
ConnectionManagerAdoNet cmAdoNet = cm.InnerObject as ConnectionManagerAdoNet;
if (cmAdoNet == null) throw new Exception($"Connection Manager {cm.Name} is not ADO.NET");
sqlConnection = cmAdoNet.AcquireConnection(transaction) as SqlConnection;
if ((sqlConnection != null) && (sqlConnection.State != ConnectionState.Open)) sqlConnection.Open();
}
}
public override void ReleaseConnections()
{
if ((sqlConnection != null) && (sqlConnection.State != ConnectionState.Closed)) sqlConnection.Close();
}
public override void PreExecute()
{
var sourceVar = ComponentMetaData.CustomPropertyCollection[_SOURCE_PROPERTY].Value;
if (sourceVar != string.Empty)
{
IDTSVariables100 variables;
VariableDispenser.LockForRead(sourceVar);
VariableDispenser.GetVariables(out variables);
_source = variables[sourceVar].Value.ToString();
}
var loggingVar = ComponentMetaData.CustomPropertyCollection[_LOG_PROPERTY].Value;
if (loggingVar != string.Empty)
{
IDTSVariables100 variables;
VariableDispenser.LockForRead(loggingVar);
VariableDispenser.GetVariables(out variables);
_loggingPath = variables[loggingVar].Value.ToString();
}
foreach (IDTSInput100 input in ComponentMetaData.InputCollection)
{
_inputs[input.ID] = input;
_messageCounts[input.Name] = 0;
}
}
public override void ProcessInput(int inputID, PipelineBuffer buffer)
{
while (buffer.NextRow())
{
string message = buffer[0].ToString();
_messageCounts[_inputs[inputID].Name] += 1;
_logMessages.Add(new Dictionary<string, string>
{
{"Level", _inputs[inputID].Name},
{"InstanceId", Environment.MachineName},
{"Source", _source},
{"Message", message}
});
}
}
public override void PostExecute()
{
if (string.IsNullOrWhiteSpace(_loggingPath))
{
List<string> logMessagesList = new List<string>();
foreach (var logMessage in _logMessages)
{
logMessagesList.Add(
$"('{logMessage["Level"].Substring(0, 1)}', '{logMessage["Source"]}', '{logMessage["Message"]}')");
}
if ((sqlConnection != null) && (sqlConnection.State == ConnectionState.Open))
{
var command = sqlConnection.CreateCommand();
command.CommandText =
$"INSERT INTO log.Log ([Level], [Source], [Message]) VALUES {string.Join(", ", logMessagesList)}";
command.ExecuteNonQuery();
}
}
else
{
List<string> logMessagesList = new List<string>();
foreach (var logMessage in _logMessages)
{
List<string> logJsonList = new List<string>();
foreach (var logElement in logMessage)
{
logJsonList.Add($"\"{logElement.Key}\":\"{logElement.Value}\"");
}
var logString = string.Join(", ", logJsonList);
if (!logMessagesList.Contains(logString))
{
logMessagesList.Add(logString);
}
}
string logJson = "[{" + string.Join("}, {", logMessagesList) + "}]";
var request = (HttpWebRequest)WebRequest.Create(_loggingPath + "api/log");
request.Method = "POST";
request.ContentType = "application/json";
request.ContentLength = logJson.Length;
using (var requestWriter = new StreamWriter(request.GetRequestStream(), System.Text.Encoding.ASCII))
{
requestWriter.Write(logJson);
}
}
foreach (var level in new[] { "Fatal", "Error" })
{
if (_messageCounts[level] > 0)
{
bool cancel;
ComponentMetaData.FireError(0, _source, "Package has logged an exception, and cannot continue", "",
0,
out cancel);
}
}
}
public override void PerformUpgrade(int pipelineVersion)
{
ComponentMetaData.Version = 1;
}
}

WCF Websocket project fails upon Entity Framework data access attempt

I am new to WebSockets (this AM) and have set up a WCF WebSocket app that works when doing a trivial example I found online (http://www.codeproject.com/Articles/619343/Using-WebSocket-in-NET-Part).
I added Entity Framework and as soon as I add code to try to access data the process (just sending a message back and forth) no longer works.
Could there be some fundamental concept I could be missing?
Does anyone have any good ideas for troubleshooting?
namespace PBWebSocket
{
public class PBWebSocket : IBWebSocket
{
private SPEntities db = new SPEntities();
public async Task SendMessageToServer(Message msg)
{
var callback = OperationContext.Current.GetCallbackChannel<IPBCallback>();
if (msg.IsEmpty || ((IChannel)callback).State != CommunicationState.Opened)
{
return;
}
byte[] body = msg.GetBody<byte[]>();
string msgTextFromClient = Encoding.UTF8.GetString(body);
var reqId = Int32.Parse(msgTextFromClient);
// *** The below line breaks it ***
var req = db.Requests.Where(r => r.Id == 164).FirstOrDefault();
reqId = reqId + 2;
Message newMsg = ByteStreamMessage.CreateMessage(
new ArraySegment<byte>(Encoding.UTF8.GetBytes(reqId.ToString())));
newMsg.Properties["WebSocketMessageProperty"] =
new WebSocketMessageProperty
{ MessageType = WebSocketMessageType.Text };
await callback.SendMessageToClient(newMsg);
}
}
}

Multiple users trying to getting Office 365 profile data using power shell commands in asp.net

We are using powershell commands to get user profile data when user logged in to our website using Office 365 credentials. Now we are getting problem when 4 to 6 users come to my website and hit the login button of office 365, When ever I pass these credentials to powershell, 3 users sessions are created but not for other users. I googled myself and found the link which states the same. Here is the link:
https://4sysops.com/forums/topic/office-365-you-have-exceeded-the-maximum-number-of-connections-allowed-3/
Can I have any resolution for this as I need to expect multiple users to my website at a time.
Here is My Sample Code:
try
{
Collection<PSObject> userList = null;
// Create Initial Session State for runspace.
InitialSessionState initialSession = InitialSessionState.CreateDefault();
initialSession.ImportPSModule(new[] { "MSOnline" });
// Create credential object.
PSCredential credential = new PSCredential(UserCredential.UserName, UserCredential.Password);
// Create command to connect office 365.
Command connectCommand = new Command("Connect-MsolService");
connectCommand.Parameters.Add((new CommandParameter("Credential", credential)));
Command getUserCommand = new Command("Get-MsolUser");
getUserCommand.Parameters.Add((new CommandParameter("UserPrincipalName", UserCredential.UserName)));
using (Runspace psRunSpace = RunspaceFactory.CreateRunspace(initialSession))
{
// Open runspace.
psRunSpace.Open();
//Iterate through each command and executes it.
foreach (var com in new Command[] { connectCommand, getUserCommand })
{
var pipe = psRunSpace.CreatePipeline();
pipe.Commands.Add(com);
// Execute command and generate results and errors (if any).
Collection<PSObject> results = pipe.Invoke();
var error = pipe.Error.ReadToEnd();
if (error.Count > 0 && com == connectCommand)
{
// MessageBox.Show(error[0].ToString(), "Problem in login");
//this.Close();
return null;
}
if (error.Count > 0 && com == getUserCommand)
{
// MessageBox.Show(error[0].ToString(), "Problem in getting users");
// this.Close();
return null;
}
else
{
userList = results;
Session["office365userslist"] = userList;
}
}
// Close the runspace.
psRunSpace.Close();
}
return userList;
}
catch (Exception ex)
{
Response.Write(ex.Message);
throw;
}
PowerShell is not recommended for such kind of scenario (Web App).
To get the user profile of a user in your web app, I suggest you using the Microsoft Graph API - Get user.
Request:
GET https://graph.microsoft.com/v1.0/me
Response:
HTTP/1.1 200 OK
Content-type: application/json
Content-length: 491
{
"businessPhones": [
"businessPhones-value"
],
"displayName": "displayName-value",
"givenName": "givenName-value",
"jobTitle": "jobTitle-value",
"mail": "mail-value",
"mobilePhone": "mobilePhone-value",
"officeLocation": "officeLocation-value",
"preferredLanguage": "preferredLanguage-value",
"surname": "surname-value",
"userPrincipalName": "userPrincipalName-value",
"id": "id-value"
}
About how to integrate the Microsoft Graph API in your web app, you can reference the sample project on GitHub Office 365 Starter Project for ASP.NET MVC.
UPDATE#1
Is there any alternative to add disclaimer text in exchange server through API?
But the graph API does not provide the function to set the disclaimer html. You can submit a feedback to Office 365 Developer Platform User Voice.
In this scenario, the possible workaround could be sequencing the requests.
For example, put all the requests in a queue and handle requests in 3 threads (max concurrency).
Sample code for your reference:
public class SequencedRequestsDemo
{
private class SampleRequest
{
public string ActionName { get; set; }
public UserCredential UserCredential { get; set; }
}
private class UserCredential
{
public string UserName { get; set; }
public string Password { get; set; }
}
private ConcurrentQueue<SampleRequest> _queue = new ConcurrentQueue<SampleRequest>();
public override void Run()
{
_queue.Enqueue(new SampleRequest { ActionName = "action_name1", UserCredential = new UserCredential() });
_queue.Enqueue(new SampleRequest { ActionName = "action_name2", UserCredential = new UserCredential() });
_queue.Enqueue(new SampleRequest { ActionName = "action_name3", UserCredential = new UserCredential() });
_queue.Enqueue(new SampleRequest { ActionName = "action_name4", UserCredential = new UserCredential() });
_queue.Enqueue(new SampleRequest { ActionName = "action_name5", UserCredential = new UserCredential() });
_queue.Enqueue(new SampleRequest { ActionName = "action_name6", UserCredential = new UserCredential() });
var thread1 = new System.Threading.Thread(() => {
WaitForRequest();
});
var thread2 = new System.Threading.Thread(() => {
WaitForRequest();
});
var thread3 = new System.Threading.Thread(() => {
WaitForRequest();
});
thread1.Start();
thread2.Start();
thread3.Start();
}
private void WaitForRequest()
{
while(true)
{
SampleRequest request;
if (_queue.TryDequeue(out request))
{
HandleRequest(request);
}
else
{
System.Threading.Thread.Sleep(1000);
}
}
}
private void HandleRequest(SampleRequest request)
{
Console.WriteLine("Handle request {0} - {1}", request.ActionName, System.Threading.Thread.CurrentThread.ManagedThreadId);
}
}

How to batch queue records and execute them in a different thread and wait till it;s over?

I am using push sharp version PushSharp 4.0.4.
I am using it in a windows application.
I have three main methods
1- BroadCastToAll
2- BrodcatsToIOS
3- BrodcatsToAndriod
I have a button calld send. On the click event of the button. I am calling the
BroadCastToAll function.
private void btnSend_Click(object sender, EventArgs e)
{
var url = "www.mohammad-jouhari.com"
var promotion = new Promotion ();
BroadCastToAll(promotion, url);
}
Here is the BrodcastToAll Function
public void BroadCastToAll(Promotion promotion, string url)
{
var deviceCatalogs = GetDeviceCatalog();
BroadCastToIOS(promotion, url, deviceCatalogs.Where(d => d.OS == "IOS").ToList());
BroadCastToAndriod(promotion, url, deviceCatalogs.Where(d => d.OS == "Android").ToList());
}
Here is the BrodcastToIOS Function
public void BroadCastToIOS(Promotion promotion, string url, List<DeviceCatalog> deviceCatalogs)
{
if (deviceCatalogs.Count == 0)
return;
lock (_lock)// Added this lock because there is a potential chance that PushSharp callback execute during registering devices
{
QueueAllAppleDevicesForNotification(promotion, url, deviceCatalogs, logsMessage);
}
}
Here is the BrodcastToAndriod Function
public void BroadCastToAndriod(Promotion promotion, string url, List<DeviceCatalog> deviceCatalogs)
{
if (deviceCatalogs.Count == 0)
return;
lock (_lock)// Added this lock because there is a potential chance that PushSharp callback execute during registering devices
{
QueueAllGcmDevicesForNotification(promotion, url, deviceCatalogs, logsMessage);
}
}
Here is the QueueAllAppleDevicesForNotification function
private void QueueAllAppleDevicesForNotification(Promotion promotion, string url, List<DeviceCatalog> deviceCatalogs)
{
var apnsServerEnviroment = UseProductionCertificate ? ApnsConfiguration.ApnsServerEnvironment.Production : ApnsConfiguration.ApnsServerEnvironment.Sandbox;
var fileService = new FileService();
var filePath = Application.StartupPath+ "/Certifcates/" + (UseProductionCertificate ? "prod.p12" : "dev.p12");
var buffer = fileService.GetFileBytes(filePath);
var config = new ApnsConfiguration(apnsServerEnviroment, buffer, APPLE_CERTIFICATE_PWD);
apnsServiceBroker = new ApnsServiceBroker(config);
apnsServiceBroker.OnNotificationFailed += (notification, aggregateEx) => {
aggregateEx.Handle (ex => {
// Log the Resposne
});
};
apnsServiceBroker.OnNotificationSucceeded += (notification) => {
// Log The Response
};
apnsServiceBroker.Start();
foreach (var deviceToken in deviceCatalogs) {
var title = GetTitle(promotion, deviceToken);
//title += DateTime.UtcNow.TimeOfDay.ToString();
var NotificationPayLoadObject = new NotificationPayLoadObjectApple();
NotificationPayLoadObject.aps.alert = title;
NotificationPayLoadObject.aps.badge = 0;
NotificationPayLoadObject.aps.sound = "default";
NotificationPayLoadObject.url = url;
var payLoad = JObject.Parse(JsonConvert.SerializeObject(NotificationPayLoadObject));
apnsServiceBroker.QueueNotification(new ApnsNotification
{
Tag = this,
DeviceToken = deviceToken.UniqueID,
Payload = payLoad
});
}
var fbs = new FeedbackService(config);
fbs.FeedbackReceived += (string deviceToken, DateTime timestamp) =>
{
// This Token is no longer avaialble in APNS
new DeviceCatalogService().DeleteExpiredIosDevice(deviceToken);
};
fbs.Check();
apnsServiceBroker.Stop();
}
And here is the QueueAllGcmDevicesForNotification
private void QueueAllGcmDevicesForNotification(Promotion promotion, string url, List<DeviceCatalog> deviceCatalogs, )
{
var config = new GcmConfiguration(ANDROID_SENDER_ID, ANDROID_SENDER_AUTH_TOKEN, ANDROID_APPLICATION_ID_PACKAGE_NAME);
gcmServiceBroker = new GcmServiceBroker(config);
gcmServiceBroker.OnNotificationFailed += (notification, aggregateEx) => {
aggregateEx.Handle (ex => {
// Log Response
return true;
});
};
gcmServiceBroker.OnNotificationSucceeded += (notification) => {
// Log Response
};
var title = GetTitle(shopexPromotion);
gcmServiceBroker.Start ();
foreach (var regId in deviceCatalogs) {
var NotificationPayLoadObject = new NotificationPayLoadObjectAndriod(url, title, "7", promotion.ImageUrl);
var payLoad = JObject.Parse(JsonConvert.SerializeObject(NotificationPayLoadObject));
gcmServiceBroker.QueueNotification(new GcmNotification
{
RegistrationIds = new List<string> {
regId.UniqueID
},
Data = payLoad
});
}
gcmServiceBroker.Stop();
}
Now When I click the send button. The event will start executing.
The BrodcastToAll function will be called. I am calling BrodcastToIOS devices first and then BrodcatsToAndriod.
Is there any way in which I can call BrodcastToIOS and wait until all the devices have been Queued and notification has been pushed by the library and the call back events fired fully then start executing the BrodcastToAndriod Fucntion ?
What lines of code I need to add ?
also Is there any way to batch the number of devices to be Queued ?
For example.
Let us say I have 1000 Devices
500 IOS
500 Andriod
Can I queue 100, 100,100,100,100 for IOS and when it's done
I queue 100,100,100,100,100 for Andriod.
Any Help is appreciated.
Thanks.
The call to broker.Stop () by default will block until all the notifications from the queue have been processed.

Windows explorer get unresponsive while application is calling web service

I have a .Net 3.5 application that I have running as a service. This application has a thread that calls a web service and stores that data in a SQLite database.
In development we never had any problems with this but in production the web service call takes up to 20 seconds to complete. When the application calls the web service and is waiting for the reply windows explorer because laggy and unresponsive. When the call returns windows explorer acts normal again until it calls the web service and waits for the reply.
The cpu usage of the process is very low and does not spike when calling the web service. I can see the I/O hit to the hard drive when it writes to the database but that all happens in a split second and surely cannot be the cause of the slowdown.
Any help would be greatly appreciated as I am at a loss for this problem right now.
Machine Specs
Windows XP SP3
Intel core i5 3.2GHz
3.5 GB RAM
Code
public void UpdateMembers()
{
var moreMembersToUpdate = true;
while (moreMembersToUpdate)
{
var response = GetMembersToUpdate();
if (response != null)
{
UpdateLicense(response.licensed);
if (response.memberResult == null || response.memberResult.recordsReturned == 0)
break;
}
else
break;
UpdateMemberCache(response.memberResult.memberList, response.memberResult.lastUpdate,
response.memberResult.lastMembersTransactionID);
moreMembersToUpdate = response.memberResult.recordsLeft;
}
}
public void UpdateLicense(bool licensed)
{
var list = DataMapper.GetMapper().QueryForList("Select.License", null);
if (list != null && list.Count > 0)
{
var isLicensed = (Boolean) list[0];
if(isLicensed != licensed)
{
DataMapper.GetMapper().Update("Update.License", licensed);
}
}
else
DataMapper.GetMapper().Insert("Insert.License", licensed);
}
public StoredValueResponse GetMembersToUpdate()
{
var token = new OperationToken
{
company = Settings.GetCompany(),
storeID = Settings.GetStoreID(),
operationID = Guid.NewGuid().ToString(),
batchSize = 3000,
password = Settings.GetPassword(),
userName = Settings.GetCompany()
};
var lastSync = GetLastMemberCacheSync();
return WebMethods.GetUpdatedMemberCache(token, lastSync.TransactionDetailID);
}
public MemberSyncRecord GetLastMemberCacheSync()
{
var lastMemberSync = DataMapper.GetMapper().QueryForList("Select.LastMemberUpdate", null);
if (lastMemberSync != null && lastMemberSync.Count > 0 && lastMemberSync[0] != null)
return (MemberSyncRecord)lastMemberSync[0];
var record = new MemberSyncRecord
{
LastMembersTransactionTime = new DateTime(1900, 1, 1),
TransactionDetailID = 0
};
return record;
}
public void UpdateMemberCache(SmallMemberInfo[] members, DateTime lastSyncTime, long transactionDetailID)
{
try
{
DataMapper.GetMapper().BeginTransaction();
foreach (var member in members)
{
DataMapper.GetMapper().Insert("Insert.MemberInfo", member);
}
DataMapper.GetMapper().CommitTransaction();
}
catch(Exception)
{
DataMapper.GetMapper().RollBackTransaction();
throw;
}
UpdateMemberCacheSyncHistory(lastSyncTime, members.Length, transactionDetailID);
}

Categories