DetectChanges always detect changes - c#

I use Microsoft SyncFramework 2.1.
I synchronize remote source directory with local destination directory.
So, if framework detect changes - it download changs from source (remote) to destination (local).
But:
DetectChanges always detect changes, but directory not changed.
That remote directory contains 1 file.
So, i write code to synchronize it:
public class SyncService
{
private FileSyncProvider _provider;
private FileSyncOptions _syncOptions;
private FileSyncScopeFilter _filter;
private string _toLocalDirPath;
private string _fromSourceDirectory;
private string _lastFromSourceDirectory; // save last directory (it can be changed);
public SyncService(string localDirPath,string
fromSourceDirectory)
{
_syncOptions = FileSyncOptions.ExplicitDetectChanges |
FileSyncOptions.RecycleDeletedFiles |
FileSyncOptions.RecyclePreviousFileOnUpdates |
FileSyncOptions.RecycleConflictLoserFiles;
_filter = new FileSyncScopeFilter();
_toLocalDirPath=localDirPath;
_fromSourceDirectory=fromSourceDirectory;
}
public void Sync()
{
if (_lastFromSourceDirectory !=Constants.FromSourceDirectory) //if directory path changed - we should dispose old provider and create new
{
if (_provider != null)
{
_provider.DetectedChanges -= Provider_DetectedChanges;
_provider.ApplyingChange -= Provider_ApplyingChange;
_provider.AppliedChange -= Provider_AppliedChange;
_provider.CopyingFile -= Provider_CopyingFile;
_provider.SkippedChange -= Provider_SkippedChange;
_provider.SkippedFileDetect -= Provider_SkippedFileDetect;
_provider.Dispose();
}
}
_provider = new FileSyncProvider(_lastFromSourceDirectory, _filter,
_syncOptions);
_provider.DetectedChanges += Provider_DetectedChanges;
_provider.ApplyingChange += Provider_ApplyingChange;
_provider.AppliedChange += Provider_AppliedChange;
_provider.CopyingFile += Provider_CopyingFile;
_provider.SkippedChange += Provider_SkippedChange;
_provider.SkippedFileDetect +=Provider_SkippedFileDetect;
DetectChangesOnFileSystemReplica();
SyncFileOneWay(_fromSourceDirectory,
_toLocalDirPath,_filter,_syncOptions);
}
private void DetectChangesOnFileSystemReplica()
{
_provider?.DetectChanges();
}
private void SyncFileOneWay(
string sourceRootPath, string desctinationRootPath,
FileSyncScopeFilter filter, FileSyncOptions options)
{
FileSyncProvider sourceProvider = null;
FileSyncProvider destinationProvider = null;
try
{
sourceProvider = new FileSyncProvider(
sourceRootPath, filter, options);
destinationProvider = new FileSyncProvider(
desctinationRootPath, filter, options);
SyncOrchestrator agent = new SyncOrchestrator();
agent.LocalProvider = destinationProvider;
agent.RemoteProvider = sourceProvider;
agent.Direction = SyncDirectionOrder.Download; //
Sync source to destination (download destination to local source)
//agent.Direction = SyncDirectionOrder.Upload;
var sourcePath = sourceProvider.RootDirectoryPath.TrimEnd('\\');
var destinationPath = destinationProvider.RootDirectoryPath.TrimEnd('\\');
agent.Synchronize(); //sync
}
finally
{ // Release resources
if (sourceProvider != null)
{
sourceProvider.Dispose();
}
if (destinationProvider != null)
{
destinationProvider.Dispose();
}
}
}
private void Provider_DetectedChanges(object sender, DetectedChangesEventArgs e)
{
Console.WriteLine($"{nameof(e.TotalFileSize)}:{e.TotalFileSize}");
}
}
So, i run Sync() method every 5 minutes and DetectChanges() says that it detected changes.
And then it syncronize.
So, why method DetectChanges detect changes if i do not change file or directory?
It is remote directory.
I want to synchronize directories only if remote directory really have some changes.

So, I researched this question and realized that after receiving the command for synchronization, the provider synchronizes what is needed and does not transfer the data.

Related

C# is crashing on an event

I am trying to create a C# service as a console app.
The main code:
static void Main(string[] args)
{
var exitCode = HostFactory.Run(
x =>
{
x.Service<HeartBeat>(s =>
{
s.ConstructUsing(heartbeat => new HeartBeat());
s.WhenStarted(heartbeat => heartbeat.Start());
s.WhenStopped(heartbeat => heartbeat.Stop());
});
x.RunAsLocalSystem();
x.SetServiceName("UpgradeServices");
x.SetDisplayName("Service Upgrade");
x.SetDescription("Service is monitoring new version.");
});
int exitCodeValue = (int)Convert.ChangeType(exitCode, exitCode.GetTypeCode());
Environment.ExitCode = exitCodeValue;
}
Then I have code for deleting and copying files as per below:
public class MovingFiles
{
public string fileName;
public string destPath;
private DirectoryInfo directory;
private DirectoryInfo myFile;
public string sourcePath;
public string targetPath;
public MovingFiles(string sourceFolder, string targetFolder)
{
sourcePath = sourceFolder;
targetPath = targetFolder;
}
public void deleteFilesMethod()
{
System.Threading.Thread.Sleep(10000);
string deleteString;
//First we want to delete all files except for the JSON file as this has all of the important settings
if (System.IO.Directory.Exists(targetPath))
{
string[] files = System.IO.Directory.GetFiles(targetPath);
// Loop through each files and then delete these if they are not the JSON file
foreach (string s in files)
{
deleteString = targetPath;
// The file name which is returned will be deleted
fileName = System.IO.Path.GetFileName(s);
if (fileName != "appsettings.json")
{
deleteString = System.IO.Path.Combine(targetPath, fileName);
try
{
System.IO.File.Delete(deleteString);
}
catch (System.IO.IOException e)
{
Console.WriteLine(e.Message);
return;
}
}
}
}
else
{
Console.WriteLine("The loop didn't run, source path doesn't exist");
}
}
public void copyFilesMethod()
{
System.Threading.Thread.Sleep(10000);
if (System.IO.Directory.Exists(sourcePath))
{
// Searching for the latest directory created in the sourcePath folder
directory = new DirectoryInfo(sourcePath);
myFile = (from f in directory.GetDirectories()
orderby f.LastWriteTime descending
select f).First();
sourcePath = System.IO.Path.Combine(sourcePath, myFile.Name);
string[] files = System.IO.Directory.GetFiles(sourcePath);
// Copy the files and overwrite destination files if they already exist.
foreach (string s in files)
{
// Use static Path methods to extract only the file name from the path.
fileName = System.IO.Path.GetFileName(s);
if (fileName != "appsettings.json")
{
destPath = System.IO.Path.Combine(targetPath, fileName);
try
{
System.IO.File.Copy(s, destPath, true);
}
catch (System.IO.IOException e)
{
Console.WriteLine(e.Message);
return;
}
}
}
}
else
{
Console.WriteLine("The loop didn't run, source path doesn't exist");
}
// Keep console window open in debug mode.
Console.WriteLine("Procedure has been completed.");
}
This should be triggered once there is a new file, which I have written as this:
class FileMonitor
{
public FileSystemWatcher watcher = new FileSystemWatcher();
public string sourcePath;
public string targetPath;
public FileMonitor(string sourceFolder, string targetFolder)
{
sourcePath = sourceFolder;
targetPath = targetFolder;
}
public void watch()
{
watcher.Path = sourcePath;
watcher.NotifyFilter = NotifyFilters.LastWrite
| NotifyFilters.FileName | NotifyFilters.DirectoryName
| NotifyFilters.CreationTime;
//var one = NotifyFilters.FileName;
watcher.Filter = "*.*";
watcher.Created += new FileSystemEventHandler (OnChanged);
watcher.EnableRaisingEvents = true;
//System.Threading.Thread.Sleep(25000);
}
public void OnChanged(object source, FileSystemEventArgs e)
{
//Copies file to another directory.
MovingFiles FileMoveOne = new MovingFiles(sourcePath, targetPath);
FileMoveOne.deleteFilesMethod();
FileMoveOne.copyFilesMethod();
}
}
What I understand once I run the below it would look every 10 seconds if there is a new file and then trigger the OnChange method, am I right?
public class HeartBeat
{
private readonly Timer _timer;
public HeartBeat()
{
_timer = new Timer(10000)
{
AutoReset = true
};
_timer.Elapsed += TimerElapsed;
}
private void TimerElapsed(object sender, ElapsedEventArgs e)
{
//StringBuilder loggingLine = new StringBuilder();
/* Every 30 seconds it will write to the file */
string[] lines = new string[] {DateTime.Now.ToString() + ": Heartbeat is active. Service is monitoring SS and DS"};
//lines[1] = DateTime.Now.ToString() + " About to check if new files are placed on server";
//loggingLine.Append(lines[i]);
File.AppendAllLines(#"C:\Users\RLEBEDEVS\Desktop\Monitor\Monitor1\HeartBeat.log", lines);
//File.AppendAllLines(#"C:\Users\RLEBEDEVS\Desktop\Monitor\Monitor1\HeartBeat.log", lines);
FileMonitor versioOne = new FileMonitor(#"C:\Users\RLEBEDEVS\Desktop\Monitor\Monitor1", #"C:\Users\RLEBEDEVS\Desktop\Monitor\Monitor2");
versioOne.watch();
}
public void Start ()
{
_timer.Start();
}
public void Stop ()
{
_timer.Stop();
}
}
The issue I am having is inconsistency.
It should copy the files to the folder Monitor2 once a new folder is created, but it is not doing that on the first creation. It does delete and copy the files on the second time once create a folder in monitor1 folder.
On every second time it is trying to copy the files it crashes with the below error which I am not familiar with:
Topshelf.Hosts.ConsoleRunHost Critical: 0 : The service threw an unhandled exception, System.UnauthorizedAccessException: Access to the path 'C:\Users\RLEBEDEVS\Desktop\Monitor\Monitor2\System.Net.Sockets.dll' is denied.
at System.IO.__Error.WinIOError(Int32 errorCode, String maybeFullPath)
at System.IO.File.InternalDelete(String path, Boolean checkHost)
at System.IO.File.Delete(String path)
at UpgradeServices.MovingFiles.deleteFilesMethod() in C:\Users\RLEBEDEVS\Desktop\C#\Service\UpgradeServices\MovingFIles.cs:line 48
at UpgradeServices.FileMonitor.OnChanged(Object source, FileSystemEventArgs e) in C:\Users\RLEBEDEVS\Desktop\C#\Service\UpgradeServices\FileMonitor.cs:line 43
at System.IO.FileSystemWatcher.OnCreated(FileSystemEventArgs e)
at System.IO.FileSystemWatcher.NotifyFileSystemEventArgs(Int32 action, String name)
at System.IO.FileSystemWatcher.CompletionStatusChanged(UInt32 errorCode, UInt32 numBytes, NativeOverlapped* overlappedPointer)
at System.Threading._IOCompletionCallback.PerformIOCompletionCallback(UInt32 errorCode, UInt32 numBytes, NativeOverlapped* pOVERLAP)
Topshelf.Hosts.ConsoleRunHost Information: 0 : Stopping the UpgradeServices service
Topshelf.Hosts.ConsoleRunHost Information: 0 : The UpgradeServices service has stopped.
The program '[497452] UpgradeServices.exe' has exited with code 1067 (0x42b).
Line 48 is this one, though it performed the tasks previously fine (on the first go).
System.IO.File.Delete(deleteString);
I see that the issue is with the way I am raising the event. Does anybody know what should I change in order to achieve the desired result which is when the service is started on every new folder created in the destiny it would perform the two methods moving and deleting files? The folder will always have only new folders created.
Regards,
It seems that in your heartbeat you starting new FileMonitor every 10 seconds, so after 20 seconds you will have 2 FileMonitor's watching and moving(deleting) the same files at the time. Just start FileMonitor once using hosted service for example. Or remove the timer handler part in your HeartBeat class and just create FileMonitor in constructor:
public HeartBeat()
{
FileMonitor versioOne = new
FileMonitor(#"C:\Users\RLEBEDEVS\Desktop\Monitor\Monitor1", #"C:\Users\RLEBEDEVS\Desktop\Monitor\Monitor2");
versioOne.watch();
// may be save it to instance field so it does not get garbage collected.
// Not sure how FileSystemWatcher behaves with subscription,
// it should prevent the "versionOne" from being collected via subscription.
}

c# outlook addin userproperty of first item in collection not found

Outlook 2016
.Net Framework 4.5
i encounter a really strange behaviour:
when i iterate through the items collection of a contact folder in some very special undefined cases (which i do not really understand) some userproperties of the first item of the collection fail to load. However the UserProperties are definitly set.
The approach is following:
I open the contact folder (to which the items will be moved) in outlook.
then i execute the "test"
the execution of the test can be suammrized as following:
click button ->
start thread
iterate through the items (on first iteration no items are present).
add new items{
create item
set userproperty PRE before item is initially saved
save item
move item to desired folder
set userproperty POST after item is moved
save item
}
end thread
click button ->
start thread
iterate through the items (here the userproperty POST sometimes fails to load on the first item of the collection, however when i investigate it, it IS there. It only fails for the first item and succeeds for every other following item).
...END
it seems to me that outlook somehow fails to update the userproperty definitions timely. But note that the first BackgroundWorker thread is already finished when iterating through the items with the second backgroundworker thread.
The problem could be related to the fact that iam viewing the folder in the explorer while the items are added and iterated.
This bug is hard to reproduce and does only occur rarely.
however i'm really missing insight into the inner workings of outlook so i can only speculate.
Idea for workarounds:
I could add an item with all userproperties before moving it. the problem here is that i need to add new userproperties, after the item is initially saved and moved to the folder, in some scenarios.
in few cases the userproperty key is dynamically created (with a pattern) so it wouldn't be optimal to predefine all userproperties.
It's very important that the userProperties are reliably loaded because some important features are based upon them.
Does anybody has a clue how the problem is caused and how to solve it? because this behaviour is driving me crazy.
some Code (not the original but it should contain all the relevant aspects)
//Ribbon
TestNS.TestCaller testCaller;
string folderID = "00000000BDB409934ED327439481EB6E1E1CC4D3010055B62301B58E32478DCD8C0D3FA6304600002C4CA4400000";
public void testButton0_Action(Office.IRibbonControl control)
{
if(testCaller == null){
testCaller = new TestNS.TestCaller(ThisAddIn.Outlook,folderID);
}
testCaller.Run();
}
//Ribbon end
using System.Runtime.InteropServices;
using Outlook = Microsoft.Office.Interop.Outlook;
using System.Diagnostics;
using System.Windows.Forms;
using System.ComponentModel;
namespace TestNS
{
public class TestCaller{
private Outlook.Application application;
private BackgroundWorker worker = new BackgroundWorker();
private Test test = null;
private string folderId;
private bool init = true;
private bool busy = false;
public TestCaller(Outlook.Application application, string folderId){
this.application = application;
this.folderId = folderId;
worker.DoWork += new DoWorkEventHandler(DoWork);
worker.RunWorkerCompleted += new RunWorkerCompletedEventHandler(OnCompleted);
}
public void Run()
{
if (!busy)
{
busy = true;
test = new Test(application, folderId, init);
worker.RunWorkerAsync();
}
}
private void DoWork(object sender, DoWorkEventArgs e)
{
test.Process();
test = null;
}
private void OnCompleted(object sender, RunWorkerCompletedEventArgs e)
{
busy = false;
init = false;
}
}
class Test
{
public const string key_preCreateProperty ="preCreate";
public const string key_postCreateProperty = "postCreate";
private Outlook.Application application;
private string folderId;
private bool createData;
public Test(Outlook.Application application,string folderId,bool createData)
{
this.application = application;
this.folderId = folderId;
this.createData = createData;
}
public void Process(){
Examine();
if(createData){
CreateData();
}
}
public void CreateData()
{
List<Poco> pocos = new List<Poco>();
for (int i = 0; i < 10; i++)
{
pocos.Add(
new Poco
{
Pre = "Pre" + i,
Post = "Post" + i
}
);
}
CreateContactItems(folderId,pocos);
}
public void Examine()
{
bool preIsLoaded = false;
bool postIsLoaded = false;
Debug.WriteLine(">>>Examine");
Outlook.MAPIFolder folder = application.Session.GetFolderFromID(folderId);
Outlook.Items folderItems = folder.Items;
int i = 0;
//print UserProperties registered to the items
foreach(Outlook.ContactItem contactItem in folderItems){
var itemUserProperties = contactItem.UserProperties;
string itemUserPropertiesString = "";
foreach (var itemProp in itemUserProperties)
{
Outlook.UserProperty prop = (Outlook.UserProperty)itemProp;
itemUserPropertiesString += " " +prop.Name + " " + prop.Value + " \n";
}
//HERE: sometimes it prints only Pre on the first index of the iteration
Debug.WriteLine(string.Format("i={0} , itemUserProperties Count={1} , following UserProperties: \n{2}", i++, itemUserProperties.Count, itemUserPropertiesString));
string pre = null;
string post = null;
try
{
pre = contactItem.GetUserProperty(key_preCreateProperty);
preIsLoaded = true;
}
catch(KeyNotFoundException ex){
Debug.WriteLine("Error: Pre Not found"); //should not happen - doesn't happen
}
try
{
post = contactItem.GetUserProperty(key_postCreateProperty);
postIsLoaded = true;
}
catch (KeyNotFoundException ex)
{
Debug.WriteLine("Error: Post Not found"); //shoul not happen - happens rarely totally indeterminitic
}
Marshal.ReleaseComObject(itemUserProperties);
}
Debug.WriteLine("<<<Examine");
if (folderItems.Count > 0 && (!preIsLoaded || !postIsLoaded))
{
MessageBox.Show("preIsLoaded="+preIsLoaded +" \n" +"postIsLoaded="+postIsLoaded);
}
Marshal.ReleaseComObject(folderItems);
Marshal.ReleaseComObject(folder);
}
public void CreateContactItems(string folderId,List<Poco> pocos)
{
Outlook.MAPIFolder folder = application.Session.GetFolderFromID(folderId);
foreach(Poco poco in pocos){
CreateContactItem(folder,poco);
}
Marshal.ReleaseComObject(folder);
}
public void CreateContactItem(Outlook.MAPIFolder testFolder,Poco data)
{
Outlook.ContactItem contactItem = application.CreateItem(Outlook.OlItemType.olContactItem);
contactItem.SetUserProperty(key_preCreateProperty, data.Pre);
contactItem.Save();
Outlook.ContactItem movedContactItem = (Outlook.ContactItem)contactItem.Move(testFolder);
Marshal.ReleaseComObject(contactItem);
contactItem = movedContactItem;
contactItem.FirstName = data.Pre;
contactItem.LastName = data.Post;
contactItem.SetUserProperty(key_postCreateProperty, data.Post);
contactItem.Save();
Marshal.ReleaseComObject(contactItem);
}
}
public static class Util
{
public static void SetUserProperty(this Outlook.ContactItem item, string name, dynamic value)
{
Outlook.UserProperty property = item.UserProperties[name];
if (property == null)
{
property = item.UserProperties.Add(name, Outlook.OlUserPropertyType.olText);
}
property.Value = value;
}
public static dynamic GetUserProperty(this Outlook.ContactItem item, string name)
{
Outlook.UserProperty property = item.UserProperties[name];
if (property != null)
{
return property.Value;
}
throw new KeyNotFoundException(string.Format("UserProperty name={0} not found", name));
}
}
public class Poco
{
public string Pre
{
get;
set;
}
public string Post
{
get;
set;
}
}
}
Thank you for any replies
Outlook Object Model cannot be used on a secondary thread within a COM addin. Outlook 2016 will raise an exception as soon as it detects an OOM object being accessed on a secondary thread.

using entityFrameWork in windows service application gives the underlying providor failed to open error

This is my service start code and my class. it is long but problem is just with database and entityFrameWork section please omit other lines. consider I just want to read a data from database:
my service start code:
protected override void OnStart(string[] args)
{
System.Threading.Thread newThread = new System.Threading.Thread(new System.Threading.ThreadStart(ReadPolling.Read));
newThread.Start();
}
this is my ReadPolling.cs file:
public class ReadPolling
{
public static webtccUsersEntities db;
public static void Read()
{
try
{
byte[] readBuffer = new byte[1024];
while (true)
{
writeToDb(readBuffer);
System.IO.File.WriteAllText("D:\\1.txt", "read-write");
}
}
catch (Exception ex)
{
System.IO.File.WriteAllText("D:\\1.txt", ex.Message);
}
}
public static void writeToDb(byte[] userId)
{
db = new webtccUsersEntities();
string _userId="";foreach(byte item in userId)
{
if(item!=0 || item!=40)
_userId += (39 - item).ToString();
}
_userId = "0009544023";
Time time;
if(db.Times.Where(i=>i.userId==_userId).Count()>0)//User has at least one time
{
time = db.Times.Where(i => i.userId == _userId).OrderBy(i => i.dayDate).OrderBy(i=>i.inTime).Last();//last time
if (time.dayDate == DateTime.Today)//today time
{
if(time.outTime==null)//wants an out for today
{
time.outTime = DateTime.Now.ToLocalTime();//an out for today
db.SaveChanges();
}
else//wants a new in/out for today
{
Time newTime = new Time();newTime = db.Times.Create(); newTime.userId = _userId; newTime.dayDate = DateTime.Now.Date; newTime.inTime = DateTime.Now.ToLocalTime();
db.Times.Add(newTime);db.SaveChanges();
}
}
else//not today time
{
//new in for today
Time newTime = new Time();newTime = db.Times.Create();newTime.dayDate = DateTime.Today;newTime.inTime = DateTime.Now.ToLocalTime();newTime.userId = _userId;
db.Times.Add(newTime);db.SaveChanges();
}
}
else//user first time
{
Time firstTime = new Time();
firstTime = db.Times.Create();
firstTime.userId = _userId;firstTime.dayDate = DateTime.Now.Date;firstTime.inTime = DateTime.Now.ToLocalTime();
db.Times.Add(firstTime);db.SaveChanges();
}
}
}
as you can see if an error occurs I write it to a file named "1.txt" in my drive "D:\". so when I start the service i open my text file and see the error "The underlying provider failed on Open.Login failed for user 'NT AUTHORITY\LOCAL SERVICE'.".
what causes the problem?
You should log the inner exception as well as it indicates the actual connectivity error, which could be as simple as an invalid or missing connection string in your .config file...

How to prevent WMI quotas from overflowing?

I am using a C# application to monitor the processes launched from a particular folder, and I am using WMI for monitoring. My WMI query is like
SELECT * FROM __InstanceCreationEvent WITHIN 5 WHERE TargetInstance ISA 'Win32_Process' AND TargetInstance.ExecutablePath LIKE '{0}%'
where I substitute the parameter with the path to the folder which I am intereseted. The WMI query is working fine and I am subscribing to the event notifications to do some additional processsing when a process from the particular folder comes up. The monitoring tool runs fine for hours after which I start getting a WMI QuotaViolation exception in my app. Once this happens I need to restart the Windows Management Instrumentation service to get the thing working.
I was initially using a
`SELECT * FROM __InstanceCreationEvent WITHIN 5 WHERE TargetInstance ISA 'Win32_Process'`
query and then checking the processes folder in the event notification, the modification in the query was done hoping it would reduce the result set and therefore prevent the Quota Violation.
Is there any way to flush the WMI quotas periodically or any other method whereby I can prevent the QuotaViolation? What is the best way to handle a QuotaViolation scenario?
Edit:
This is my process watcher object :
public class ProcessWatcher : ManagementEventWatcher
{
private string folder = "";
// Process Events
public event ProcessEventHandler ProcessCreated; //notifies process creation
//add any more event notifications required here
// WMI WQL process query strings
static readonly string WMI_OPER_EVENT_QUERY = #"SELECT * FROM __InstanceCreationEvent WITHIN 5 WHERE TargetInstance ISA 'Win32_Process'";
static readonly string WMI_OPER_EVENT_QUERY_WITH_PROC =
WMI_OPER_EVENT_QUERY + " and TargetInstance.Name = '{0}'";
public ProcessWatcher(string basepath)
{
folder = basepath;
Init(string.Empty);
}
public ProcessWatcher(string processName, string basepath)
{
folder = basepath;
Init(processName);
}
private void Init(string processName)
{
this.Query.QueryLanguage = "WQL";
if (string.IsNullOrEmpty(processName))
{
this.Query.QueryString = string.Format(WMI_OPER_EVENT_QUERY + #" AND TargetInstance.ExecutablePath LIKE '{0}%'", folder.Replace(#"\",#"\\")) ;
}
else
{
this.Query.QueryString =
string.Format(WMI_OPER_EVENT_QUERY_WITH_PROC, processName);
}
this.EventArrived += new EventArrivedEventHandler(watcher_EventArrived);
}
private void watcher_EventArrived(object sender, EventArrivedEventArgs e)
{
try
{
ManagementBaseObject mObj = e.NewEvent["TargetInstance"] as ManagementBaseObject;
if (mObj != null)
{
Win32_Process proc = new Win32_Process(mObj);
if (proc != null)
{
folder = folder.ToLower() ?? "";
string exepath = (string.IsNullOrEmpty(proc.ExecutablePath)) ? "" : proc.ExecutablePath.ToLower();
if (!string.IsNullOrEmpty(folder) && !string.IsNullOrEmpty(exepath) && exepath.Contains(folder))
{
if (ProcessCreated != null) ProcessCreated(proc);
}
}
proc.Dispose();
}
mObj.Dispose();
}
catch(Exception ex) { throw; }
finally
{
e.NewEvent.Dispose();
}
}
I create a ProcessWatcher object at app startup, in a viewmodel constructor like :
watch = new ProcessWatcher(BasePath);
watch.ProcessCreated += new ProcessEventHandler(procWatcher_ProcessCreated);
watch.Start();
The start call is where the QuotaViolation is raised if I try to start it a second time without restarting WMI.
At app exit, I am disposing off the ProcessWatcher object like :
watch.Stop();
watch.Dispose();
The Relevant Stack trace is :
Exception InnerException [System.Management.ManagementException: Quota violation
at System.Management.ManagementException.ThrowWithExtendedInfo(ManagementStatus errorCode)
at System.Management.ManagementEventWatcher.Start()
at App.ProcessTabViewModel1..ctor()
System.Management.ManagementException: Quota violation
Yes, that happens. I wrote a little test program, based on your snippet after adding the missing pieces:
static void Main(string[] args) {
for (int ix = 0; ix < 1000; ++ix) {
var obj = new ProcessWatcher("");
obj.ProcessCreated += obj_ProcessCreated;
obj.Start();
}
}
Kaboom! With the exact same stack trace as you quoted. It conked out at ix == 76. In other words, the WMI quota for this query is 75. Tested in Windows 8.1. Feels about right, this is a very expensive query, none too fast either.
You are going to have to do this fundamentally different, create only one query. One is enough, you probably got into trouble by doing this for many folders. Attack that differently, do your own filtering when you get the event. A rough example (I didn't quite get the filtering you want to do):
public class ProcessWatcher2 : IDisposable {
public delegate void ProcessCreateEvent(string name, string path);
public event ProcessCreateEvent ProcessCreated;
public ProcessWatcher2(string folder) {
this.folder = folder;
lock (locker) {
listeners.Add(this);
if (watcher == null) Initialize();
}
}
public void Dispose() {
lock (locker) {
listeners.Remove(this);
if (listeners.Count == 0) {
watcher.Stop();
watcher.Dispose();
watcher = null;
}
}
}
private static void Initialize() {
var query = new WqlEventQuery(#"SELECT * FROM __InstanceCreationEvent WITHIN 5 WHERE TargetInstance ISA 'Win32_Process'");
watcher = new ManagementEventWatcher(query);
watcher.EventArrived += watcher_EventArrived;
watcher.Start();
}
private static void watcher_EventArrived(object sender, EventArrivedEventArgs e) {
using (var proc = (ManagementBaseObject)e.NewEvent["TargetInstance"]) {
string name = (string)proc.Properties["Name"].Value;
string path = (string)proc.Properties["ExecutablePath"].Value;
lock (locker) {
foreach (var listener in listeners) {
bool filtered = false;
// Todo: implement your filtering
//...
var handler = listener.ProcessCreated;
if (!filtered && handler != null) {
handler(name, path);
}
}
}
}
}
private static ManagementEventWatcher watcher;
private static List<ProcessWatcher2> listeners = new List<ProcessWatcher2>();
private static object locker = new object();
private string folder;
}

EWS streaming notifications not received

Afternoon all,
I have a windows service which subscribes to an Office365 email account and awaits new emails, when they arrive it processes their attachments, and all is well with the world.
But... for some reason, the applications stops receiving notifications after an undetermined amount of time.
I have handled the 'OnDisconnect' event and reestablish a connection as shown in the below code, but that doesnt seem to be fixing this issue. The windows service continues to run fine, and if I restart the service everything is good again, until is failed again.
This is the my class for running exchange:
public class ExchangeConnection
{
static readonly ExchangeService Service = Exchange.Service.ConnectToService(UserDataFromConsole.GetUserData(), new TraceListener());
public event EmailReceivedHandler OnEmailReceived;
public ExchangeConnection()
{
}
public void Open()
{
SetStreamingNotifications(Service);
var signal = new AutoResetEvent(false);
signal.WaitOne();
}
private void SetStreamingNotifications(ExchangeService service)
{
var streamingsubscription = service.SubscribeToStreamingNotifications(new FolderId[] { WellKnownFolderName.Inbox }, EventType.NewMail);
var connection = new StreamingSubscriptionConnection(service, 30);
connection.AddSubscription(streamingsubscription);
connection.OnNotificationEvent += OnEvent;
connection.OnSubscriptionError += OnError;
connection.OnDisconnect += OnDisconnect;
connection.Open();
}
public void MoveEmail(ItemId id, String folderName = "Archived Emails")
{
var rootFolder = Folder.Bind(Service, WellKnownFolderName.Inbox);
var archivedFolder = rootFolder.FindFolders(new FolderView(100)).FirstOrDefault(x => x.DisplayName == folderName);
if (archivedFolder == null)
{
archivedFolder = new Folder(Service) { DisplayName = folderName };
archivedFolder.Save(WellKnownFolderName.Inbox);
}
Service.MoveItems(new List<ItemId> {id}, archivedFolder.Id);
}
#region events
private void OnDisconnect(object sender, SubscriptionErrorEventArgs args)
{
//The connection is disconnected every 30minutes, and we are unable to override this,
//so when we get disconnected we just need to reconnect again.
var connection = (StreamingSubscriptionConnection)sender;
connection.Open();
}
private void OnEvent(object sender, NotificationEventArgs args)
{
var subscription = args.Subscription;
// Loop through all item-related events.
foreach (var notification in args.Events)
{
switch (notification.EventType)
{
case EventType.NewMail:
if (notification is ItemEvent)
{
var email = Item.Bind(Service, new ItemId(((ItemEvent) notification).ItemId.UniqueId));
OnEmailReceived(new EmailReceivedArgs((EmailMessage)email));
}
break;
}
}
}
private void OnError(object sender, SubscriptionErrorEventArgs args)
{
var e = args.Exception;
Logger.LogException(e,LogEventType.Error);
}
#endregion events
}
Any help would be great, thanks.
EDIT:
After improving the error logging I have found this exception occuring:
Exception: The specified subscription was not found.
Any ideas what is causing this?
With Office365 you need to make sure you deal with affinity see http://msdn.microsoft.com/en-us/library/office/dn458789(v=exchg.150).aspx . Adding those headers will ensure your requests will always routed to the correct servers.
Cheers
Glen

Categories