I have a windows service that monitors a directory for new files and, when a file is created/copied into that directory, it renames it and does some processing.
I'm using File.Move to rename the file.
It works fine when I use the exact same code from a console application, but now when it runs as a Windows Service.
Code that initialize the file watcher(s)
public void StartFileSystemWatcher()
{
listFileSystemWatcher = new List<FileSystemWatcher>();
foreach (var customFolder in listFolders)
{
var dir = new DirectoryInfo(customFolder.FolderPath);
if (customFolder.FolderEnabled && dir.Exists)
{
var fileSWatch = new FileSystemWatcher
{
Filter = customFolder.FolderFilter,
Path = customFolder.FolderPath,
NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.FileName | NotifyFilters.DirectoryName,
EnableRaisingEvents = true
};
fileSWatch.Created += (senderObj, fileSysArgs) => FileSWatch_Created(senderObj, fileSysArgs, customFolder.VSTSUrl);
listFileSystemWatcher.Add(fileSWatch);
logger.Info(string.Format("Starting to monitor files with extension ({0}) in the folder ({1})", fileSWatch.Filter, fileSWatch.Path));
}
else
{
logger.Info(string.Format("File system monitor cannot start because the folder ({0}) does not exist", customFolder.FolderPath));
}
}
}
private void FileSWatch_Created(object sender, FileSystemEventArgs e, string vstsUrl)
{
PostFileInfoToVSTS(e.FullPath, vstsUrl);
}
Code that's run when the file is created
private void PostFileInfoToVSTS(string filePath, string vstsUrl)
{
try
{
var fileName = filePath.Substring(filePath.LastIndexOf("\\") + 1);
var fileParts = fileName.Split(new char[] { '.' });
if (fileParts.Length == 6)
{
//rename the file
var fileNewName = $"{fileParts[2]}.{fileParts[3]}.{fileParts[4]}.TXT";
var fileNewPath = $"{Path.GetDirectoryName(filePath)}\\oldfiles\\{fileNewName}";
File.Move(filePath, fileNewPath);
//prepare vsts request
var request = new VSTSRequest();
request.Variables.Region.Value = fileParts[0];
request.Variables.Sparc.Value = fileParts[1];
request.Variables.SystemName.Value = fileParts[2];
request.Variables.Type.Value = fileParts[3];
request.Variables.Name.Value = fileParts[4];
request.Variables.Filename.Value = fileNewName;
var httpWebRequest = (HttpWebRequest)WebRequest.Create(vstsUrl);
httpWebRequest.ContentType = "application/json";
httpWebRequest.Method = "POST";
httpWebRequest.Headers.Add("Authorization", ConfigurationManager.AppSettings["VSTSAuthentication"]);
//post to vsts
using (var streamWriter = new StreamWriter(httpWebRequest.GetRequestStream()))
{
streamWriter.Write(JsonConvert.SerializeObject(request));
streamWriter.Flush();
streamWriter.Close();
}
//read response
var httpResponse = (HttpWebResponse)httpWebRequest.GetResponse();
using (var streamReader = new StreamReader(httpResponse.GetResponseStream()))
{
var result = streamReader.ReadToEnd();
var json = JsonConvert.DeserializeObject<dynamic>(result);
logger.Info($"File '{fileNewPath}' processed. ReleaseId: {json.id}.");
}
}
else
{
logger.Info($"File '{fileName}' wasn't processed. Name is not in the correct format.");
}
}
catch (Exception ex)
{
logger.Error(ex);
}
}
The first time I copy a file into that directory, it works fine, but the following times, I get a System.IO.IOException: The process cannot access the file because it is being used by another process.
Related
I am trying to move data for example;
Source = "Uploads/Photos/" to Destination="Uploads/mytest/"
I am getting error like that but but when i give a specific file this works.
Basically, I want to move folder with all files.
My code is below;
public async Task<MoveResponse> MoveObject(MoveRequest moveRequest)
{
MoveResponse moveResponse = new MoveResponse();
CopyObjectRequest copyObjectRequest = new CopyObjectRequest
{
SourceBucket = moveRequest.BucketName,
DestinationBucket = moveRequest.BucketName + "/" + moveRequest.Destination,
SourceKey = moveRequest.Source,
DestinationKey = moveRequest.Source,
};
var response1 = await client.CopyObjectAsync(copyObjectRequest).ConfigureAwait(false);
if (response1.HttpStatusCode != System.Net.HttpStatusCode.OK)
{
moveResponse.IsError = true;
moveResponse.ErrorMessage = "Files could not moved to destination!";
return moveResponse;
}
return moveResponse;
}
I hope, you are using high level S3 API's.
Check out this sample code
private void uploadFolderToolStripMenuItem_Click(object sender, EventArgs e)
{
string directoryPath = textBoxBasePath.Text + listBoxFolder.SelectedItem.ToString().Replace("[", "").Replace("]", "");
string bucketName = comboBoxBucketNames.Text;
string FolderName = listBoxFolder.SelectedItem.ToString().Replace("[", "").Replace("]", "");
try
{
TransferUtility directoryTransferUtility = new TransferUtility(new AmazonS3Client(AwsAccessKeyID, AwsSecretAccessKey, RegionEndpoint.USEast1));
TransferUtilityUploadDirectoryRequest request = new TransferUtilityUploadDirectoryRequest
{
BucketName = bucketName,
KeyPrefix = FolderName,
StorageClass = S3StorageClass.StandardInfrequentAccess,
ServerSideEncryptionMethod = ServerSideEncryptionMethod.AES256,
Directory = directoryPath,
SearchOption = SearchOption.AllDirectories,
SearchPattern = "*.*",
CannedACL = S3CannedACL.AuthenticatedRead
};
ListMultipartUploadsRequest req1 = new ListMultipartUploadsRequest
{
BucketName = bucketName
};
var t = Task.Factory.FromAsync(directoryTransferUtility.BeginUploadDirectory, directoryTransferUtility.EndUploadDirectory, request, null);
t.Wait();
MessageBox.Show(string.Format("The Directory '{0}' is successfully uploaded", FolderName));
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
finally
{ }
}
I want to run .cmd file using azure function. I want to run this in background process instead of main process of azure function.
I have already saved the cmd file on azure function platform using Kudu Editor. I can run this locally but after deploying its not working at all (I am also not getting any error).
string cmdFileLocation = #"D:\home\jobs.cmd";
Process proc = new Process();
ProcessStartInfo info = new ProcessStartInfo();
try
{
info.FileName = cmdFileLocation;
info.Arguments = name;
info.WindowStyle = ProcessWindowStyle.Minimized;
info.UseShellExecute = false;
proc.StartInfo = info;
info.RedirectStandardOutput = true;
info.RedirectStandardError = true;
proc.Start();
proc.WaitForExit();
}
catch (Exception ex)
{
log.LogInformation("Exception Occurred :{0},{1}", ex.Message, ex.StackTrace.ToString());
}
For testing there is curl command in cmd file. The curl will trigger on local machine using azure function as I can see the request coming (https://webhook.site) but after deplying nothing happens.
Here is an easy way of getting any .exe/.cmd running as a web service. You just specify the input parameters to your exe/cmd in a configuration file. You can use binary files as inputs to the exe by specifying a URL to download it from.
Here's what the configuration file looks like
{
"name": "consoleAppToFunctions",
"input": {
"command": "ffmpeg.exe -i {inputFile} {output1}",
"arguments": {
"inputFile": {
"url": "https://1drv.ms/v/<link-to-file>"
//binary file input
},
"output1": {
"localfile": "out.mp3"
//stored in a temp folder
}
}
},
"output": {
"folder": "outputFolder",
"binaryFile": {
"returnFile": "*.mp3",
"returnFileName": "yourFile.mp3"
}
}
}
Here is the AZURE FUNCTION CODE for the same:
#r "Newtonsoft.Json"
using System.Net;
using Newtonsoft.Json;
using System.IO;
using System.Diagnostics;
//Code from https://github.com/Azure-Samples/functions-dotnet-migrating-console-apps/edit/master/code/run.csx
//Written by Ambrose http://github.com/efficientHacks and Murali http://github.com/muralivp
public class ExeArg
{
public string Name { get; set; }
public string Type { get; set; }
public string Value { get; set; }
}
public static async Task<HttpResponseMessage> Run(HttpRequestMessage req, TraceWriter log)
{
log.Info("C# HTTP trigger function processed a request.");
string localPath = req.RequestUri.LocalPath;
string functionName = localPath.Substring(localPath.LastIndexOf('/')+1);
var json = File.ReadAllText(string.Format(#"D:\home\site\wwwroot\{0}\FunctionConfig.json",functionName));
var config = JsonConvert.DeserializeObject<dynamic>(json);
var functionArguments = config.input.arguments;
var localOutputFolder = Path.Combine(#"d:\home\data", config.output.folder.Value, Path.GetFileNameWithoutExtension(Path.GetTempFileName()));
Directory.CreateDirectory(localOutputFolder);
var workingDirectory = Path.Combine(#"d:\home\site\wwwroot", functionName + "\\bin");
Directory.SetCurrentDirectory(workingDirectory);//fun fact - the default working directory is d:\windows\system32
var command = config.input.command.Value;
var argList = new List<ExeArg>();
//Parse the config file's arguments
//handle file system, local file etc. and construct the input params for the actual calling of the .exe
foreach (var arg in functionArguments)
{
var exeArg = new ExeArg();
exeArg.Name = arg.Name;
var value = (Newtonsoft.Json.Linq.JObject)arg.Value;
var property = (Newtonsoft.Json.Linq.JProperty)value.First;
exeArg.Type = property.Name;
exeArg.Value = property.Value.ToString();
var valueFromQueryString = await getValueFromQuery(req, exeArg.Name);
log.Info("valueFromQueryString name=" + exeArg.Name);
log.Info("valueFromQueryString val=" + valueFromQueryString);
if(!string.IsNullOrEmpty(valueFromQueryString))
{
exeArg.Value = valueFromQueryString;
log.Info(exeArg.Name + " " + valueFromQueryString);
}
if(exeArg.Type.ToLower() == "localfile" || exeArg.Type.ToLower() == "localfolder")
{
exeArg.Value = Path.Combine(localOutputFolder, exeArg.Value);
exeArg.Type = "string";
}
if(string.IsNullOrEmpty(exeArg.Value))
{
//throw exception here
}
argList.Add(exeArg);
}
//call the exe
Dictionary<string, string> paramList = ProcessParameters(argList, localOutputFolder);
foreach (string parameter in paramList.Keys)
{
command = command.Replace(parameter, paramList[parameter]);
}
string commandName = command.Split(' ')[0];
string arguments = command.Split(new char[] { ' ' }, 2)[1];
log.Info("the command is " + command);
log.Info("the working dir is " + workingDirectory);
Process p = new Process();
p.StartInfo.UseShellExecute = false;
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.FileName = commandName;
p.StartInfo.Arguments = arguments;
p.Start();
string output = p.StandardOutput.ReadToEnd();
p.WaitForExit();
File.WriteAllText(localOutputFolder+"\\out.txt",output);
//handle return file
log.Info("handling return file localOutputFolder=" + localOutputFolder);
string outputFile = config.output.binaryFile.returnFile.Value;
string outputFileName = config.output.binaryFile.returnFileName.Value;
var path = Directory.GetFiles(localOutputFolder, outputFile)[0];
log.Info("returning this file " + path);
var result = new FileHttpResponseMessage(localOutputFolder);
var stream = new FileStream(path, FileMode.Open, FileAccess.Read);
result.Content = new StreamContent(stream);
result.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
result.Content.Headers.ContentDisposition = new System.Net.Http.Headers.ContentDispositionHeaderValue("attachment")
{
FileName = outputFileName
};
return result;
}
private static Dictionary<string, string> ProcessParameters(List<ExeArg> arguments, string outputFolder)
{
Dictionary<string, string> paramList = new Dictionary<string, string>();
foreach (var arg in arguments)
{
switch (arg.Type)
{
case "url":
string downloadedFileName = ProcessUrlType((string)arg.Value, outputFolder);
paramList.Add("{" + arg.Name + "}", downloadedFileName);
break;
case "string":
paramList.Add("{" + arg.Name + "}", arg.Value.ToString());
break;
default:
break;
}
}
return paramList;
}
//you can modify this method to handle different URLs if necessary
private static string ProcessUrlType(string url, string outputFolder)
{
Directory.CreateDirectory(outputFolder);
string downloadedFile = Path.Combine(outputFolder, Path.GetFileName(Path.GetTempFileName()));
//for oneDrive links
HttpWebRequest webRequest = (HttpWebRequest)HttpWebRequest.Create(url);
webRequest.AllowAutoRedirect = false;
WebResponse webResp = webRequest.GetResponse();
webRequest = (HttpWebRequest)HttpWebRequest.Create(webResp.Headers["Location"].Replace("redir", "download"));
webResp = webRequest.GetResponse();
string fileUrl = webResp.Headers["Content-Location"];
WebClient webClient = new WebClient();
webClient.DownloadFile(fileUrl, downloadedFile);
return downloadedFile;
}
private async static Task<string> getValueFromQuery(HttpRequestMessage req, string name)
{
// parse query parameter
string value = req.GetQueryNameValuePairs()
.FirstOrDefault(q => string.Compare(q.Key, name, true) == 0)
.Value;
//if not found in query string, look for it in the body (json)
// Get request body
dynamic data = await req.Content.ReadAsAsync<object>();
// Set name to query string or body data
value = value ?? data?[name];
return value;
}
//this is to delete the folder after the response
//thanks to: https://stackoverflow.com/a/30522890/2205372
public class FileHttpResponseMessage : HttpResponseMessage
{
private string filePath;
public FileHttpResponseMessage(string filePath)
{
this.filePath = filePath;
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
Content.Dispose();
Directory.Delete(filePath,true);
}
}
Here you can find more on this. Hope it helps.
I have the following function that uploads files to Drobox and returns shared links to these files.
private async Task<string> Upload(DropboxClient dbx, string localPath, string remotePath)
{
const int ChunkSize = 4096 * 1024;
using (var fileStream = File.Open(localPath, FileMode.Open))
{
if (fileStream.Length <= ChunkSize)
{
WriteMode mode = new WriteMode();
FileMetadata fileMetadata = await dbx.Files.UploadAsync(remotePath, body: fileStream, mode: mode.AsAdd, autorename: true);
//set the expiry date
var existingDoc = await dbx.Files.GetMetadataAsync(remotePath);
if (existingDoc.IsFile)
{
var sharedLink = dbx.Sharing.ListSharedLinksAsync(remotePath);
var settings = new ListSharedLinksArg(remotePath);
ListSharedLinksResult listSharedLinksResult = await dbx.Sharing.ListSharedLinksAsync(remotePath);
if (listSharedLinksResult.Links.Count > 0)
{
return listSharedLinksResult.Links[0].Url;
}
else
{
var settings2 = new SharedLinkSettings(expires: DateTime.Today.AddDays(7));
SharedLinkMetadata sharedLinkMetadata = await dbx.Sharing.CreateSharedLinkWithSettingsAsync(remotePath, settings2);
return sharedLinkMetadata.Url;
}
}
else
{
var settings = new SharedLinkSettings(expires: DateTime.Today.AddDays(7));
SharedLinkMetadata sharedLinkMetadata = await dbx.Sharing.CreateSharedLinkWithSettingsAsync(fileMetadata.PathLower, settings);
return sharedLinkMetadata.Url;
}
}
else
{
await this.ChunkUpload(dbx, remotePath, fileStream, ChunkSize);
}
return "error";
}
}
But it's not working properly, when it gets to the ListSharedLinksAsync function, it stops working witout throwing any error.
I notices that the files that I try to upload are not accessible after it crashes, I get a "used by another proccess error"...
What am I doing wrong?
It looks like you are missing an await on this row, thus causing a deadlock?
var sharedLink = dbx.Sharing.ListSharedLinksAsync(remotePath);
Should be
var sharedLink = await dbx.Sharing.ListSharedLinksAsync(remotePath);
I'm uploading all my local files to s3 using this code:
static string bucketName = "s3bucket";
static string directoryPath = #"C:\data\";
private void btnUpload_Click(object sender, EventArgs e) {
try {
TransferUtility directoryTransferUtility =
new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.USWest2));
TransferUtilityUploadDirectoryRequest request =
new TransferUtilityUploadDirectoryRequest {
BucketName = bucketName,
Directory = directoryPath,
SearchPattern = "*.xml"
};
directoryTransferUtility.UploadDirectory(request);
MessageBox.Show("Upload completed");
} catch (AmazonS3Exception ex) {
MessageBox.Show(ex.Message);
}
}
If I run the code again all files are re-uploaded to s3 and that's a bad idea if let's say we have 1000+ files in our local directory.
I know I can compare file by file because aws stores the md5 of each file. So my question is can I do this with a command that comes preinstalled? Do I have to recursively compare file by file? If the sync command exists on awscli bundle (aws s3 sync ./sourceDir s3://bucketname/) does it exists on c# as well?
All the files will be replaced. S3 docs , but if you still want to check files you can use some function like this
ListObjectsRequest request = new ListObjectsRequest()
{
BucketName = "Your Bucket name",
Delimiter = "/",
Prefix = "location"
};
public bool CheckFile(ListObjectsRequest request)
{
bool res = false;
try
{
ListObjectsResponse response = s3client.ListObjects(request);
if (response.S3Objects != null && response.S3Objects.Count > 0)
{
S3Object o = response.S3Objects.Where(x => x.Size != 0 && x.LastModified > DateTime.Now.AddHours(-24)).FirstOrDefault();
if (o != null)
{
//you can use thes fields
// o.Key; //o.Size, //o.LastModified
res = true;
}
}
else
{
res = false;
}
}
catch (Exception)
{
throw;
}
return res;
}
I need a good example on WCF Streaming File Transfer.
I have found several and tried them but the posts are old and I am wokding on .net 4 and IIS 7 so there are some problems.
Can you gives me a good and up-to-date example on that.
The following answers detail using a few techniques for a posting binary data to a restful service.
Post binary data to a RESTful application
What is a good way to transfer binary data to a HTTP REST API service?
Bad idea to transfer large payload using web services?
The following code is a sample of how you could write a RESTful WCF service and is by no means complete but does give you an indication on where you could start.
Sample Service, note that this is NOT production ready code.
[ServiceContract]
[AspNetCompatibilityRequirements(RequirementsMode = AspNetCompatibilityRequirementsMode.Allowed)]
[ServiceBehavior(InstanceContextMode = InstanceContextMode.PerCall)]
public class FileService
{
private IncomingWebRequestContext m_Request;
private OutgoingWebResponseContext m_Response;
[WebGet(UriTemplate = "{appName}/{id}?action={action}")]
public Stream GetFile(string appName, string id, string action)
{
var repository = new FileRepository();
var response = WebOperationContext.Current.OutgoingResponse;
var result = repository.GetById(int.Parse(id));
if (action != null && action.Equals("download", StringComparison.InvariantCultureIgnoreCase))
{
response.Headers.Add("Content-Disposition", string.Format("attachment; filename={0}", result.Name));
}
response.Headers.Add(HttpResponseHeader.ContentType, result.ContentType);
response.Headers.Add("X-Filename", result.Name);
return result.Content;
}
[WebInvoke(UriTemplate = "{appName}", Method = "POST")]
public void Save(string appName, Stream fileContent)
{
try
{
if (WebOperationContext.Current == null) throw new InvalidOperationException("WebOperationContext is null.");
m_Request = WebOperationContext.Current.IncomingRequest;
m_Response = WebOperationContext.Current.OutgoingResponse;
var file = CreateFileResource(fileContent, appName);
if (!FileIsValid(file)) throw new WebFaultException(HttpStatusCode.BadRequest);
SaveFile(file);
SetStatusAsCreated(file);
}
catch (Exception ex)
{
if (ex.GetType() == typeof(WebFaultException)) throw;
if (ex.GetType().IsGenericType && ex.GetType().GetGenericTypeDefinition() == typeof(WebFaultException<>)) throw;
throw new WebFaultException<string>("An unexpected error occurred.", HttpStatusCode.InternalServerError);
}
}
private FileResource CreateFileResource(Stream fileContent, string appName)
{
var result = new FileResource();
fileContent.CopyTo(result.Content);
result.ApplicationName = appName;
result.Name = m_Request.Headers["X-Filename"];
result.Location = #"C:\SomeFolder\" + result.Name;
result.ContentType = m_Request.Headers[HttpRequestHeader.ContentType] ?? this.GetContentType(result.Name);
result.DateUploaded = DateTime.Now;
return result;
}
private string GetContentType(string filename)
{
// this should be replaced with some form of logic to determine the correct file content type (I.E., use registry, extension, xml file, etc.,)
return "application/octet-stream";
}
private bool FileIsValid(FileResource file)
{
var validator = new FileResourceValidator();
var clientHash = m_Request.Headers[HttpRequestHeader.ContentMd5];
return validator.IsValid(file, clientHash);
}
private void SaveFile(FileResource file)
{
// This will persist the meta data about the file to a database (I.E., size, filename, file location, etc)
new FileRepository().AddFile(file);
}
private void SetStatusAsCreated(FileResource file)
{
var location = new Uri(m_Request.UriTemplateMatch.RequestUri.AbsoluteUri + "/" + file.Id);
m_Response.SetStatusAsCreated(location);
}
}
Sample Client, note that this is NOT production ready code.
// *********************************
// Sample Client
// *********************************
private void UploadButton_Click(object sender, EventArgs e)
{
var uri = "http://dev-fileservice/SampleApplication"
var fullFilename = #"C:\somefile.txt";
var fileContent = File.ReadAllBytes(fullFilename);
using (var webClient = new WebClient())
{
try
{
webClient.Proxy = null;
webClient.Headers.Add(HttpRequestHeader.ContentMd5, this.CalculateFileHash());
webClient.Headers.Add("X-DaysToKeep", DurationNumericUpDown.Value.ToString());
webClient.Headers.Add("X-Filename", Path.GetFileName(fullFilename));
webClient.UploadData(uri, "POST", fileContent);
var fileUri = webClient.ResponseHeaders[HttpResponseHeader.Location];
Console.WriteLine("File can be downloaded at" + fileUri);
}
catch (Exception ex)
{
var exception = ex.Message;
}
}
}
private string CalculateFileHash()
{
var hash = MD5.Create().ComputeHash(File.ReadAllBytes(#"C:\somefile.txt"));
var sb = new StringBuilder();
for (int i = 0; i < hash.Length; i++)
{
sb.Append(hash[i].ToString("x2"));
}
return sb.ToString();
}
private void DownloadFile()
{
var uri = "http://dev-fileservice/SampleApplication/1" // this is the URL returned by the Restful file service
using (var webClient = new WebClient())
{
try
{
webClient.Proxy = null;
var fileContent = webClient.DownloadData(uri);
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
}