I have 8 jobs in my project. They start in the Application_Start event and repeat forever. Two of them are using the same class. One job every 7 seconds and the other job every 30 seconds working. But they use the same method at 1 point. Reading a date from an xml file.
If the date is older than the current date, it changes the session id and adds 20 minutes to it. The session id is constantly changing as both are trying to enter and process at the same time.
The following is my need:
1 - Enter the method.
2 - If the session is to be changed, stop the tasks other than the current task.
3 - Change session and save to xml.
4 - Restart or resume all my passive tasks.
private DateTimeOffset g_canlimacgetir = DateTimeOffset.UtcNow.AddSeconds(0);
private DateTimeOffset g_canliorangetir = DateTimeOffset.UtcNow.AddSeconds(45);
private void CanliOranlariGetir()
{
try
{
ISchedulerFactory schfack = new StdSchedulerFactory();
IScheduler scheduler = schfack.GetScheduler();
IJobDetail jobdetay = JobBuilder.Create<CanliOranlar>()
.WithIdentity("canliorangetir")
.Build();
ITrigger trigger = TriggerBuilder.Create()
.WithSimpleSchedule(s => s.WithIntervalInSeconds(7).RepeatForever()).StartAt(g_canliorangetir).Build();
scheduler.ScheduleJob(jobdetay, trigger);
scheduler.Start();
}
}
private void CanliMaclariGetir()
{
try
{
ISchedulerFactory schfack = new StdSchedulerFactory();
IScheduler scheduler = schfack.GetScheduler();
IJobDetail jobdetay = JobBuilder.Create<CanliMaclar>()
.WithIdentity("canlimacgetir")
.Build();
ITrigger trigger = TriggerBuilder.Create()
.WithSimpleSchedule(s => s.WithIntervalInSeconds(30).RepeatForever()).StartAt(g_canlimacgetir).Build();
scheduler.ScheduleJob(jobdetay, trigger);
scheduler.Start();
}
}
private Headers GetTheToken()
{
Headers h = new Headers();
HttpWebResponse response = null;
try
{
const string session_site = "";
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(session_site);
Uri uri = new Uri("");
request.Method = "GET";
request.UserAgent = "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:50.0) Gecko/20100101 Firefox/50.0";
request.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
request.ContentType = "text/html; charset=utf-8";
request.ContentLength = 0;
Cookie trackerID = new Cookie("trackerId", ConfigurationManager.AppSettings["TrackerID"].ToString()) { Domain = uri.Host };
DateTime tarih = DateTime.Now;
request.Timeout = 5000;
ServicePointManager.Expect100Continue = false;
ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12;
ServicePointManager.ServerCertificateValidationCallback = new
RemoteCertificateValidationCallback(delegate { return true; });
response = (HttpWebResponse)request.GetResponse();
StreamReader reader = new StreamReader(response.GetResponseStream(), Encoding.UTF8);
response.Close();
response.Dispose();
reader.Close();
reader.Dispose();
DateTime sesstimeout = DateTime.Now.AddSeconds(1199);
string getCookieHeader = response.Headers[HttpResponseHeader.SetCookie];
char[] ayiricilar = { '=', ',', ';' };
string[] parcalar = getCookieHeader.Split(ayiricilar);
int ses = 0, rv = 0, xt = 0, xct = 0;
if (parcalar.Length > 20)
{
h.session_timeout = sesstimeout;
for (int i = 0; i < parcalar.Length; i++)
{
if (parcalar[i] == "ASP.NET_SessionId") { h.sessionID = parcalar[i + 1]; ses = 1; }
else if (parcalar[i] == "__RequestVerificationToken") { h.request_verification = parcalar[i + 1]; rv = 1; }
else if (parcalar[i] == "XSRF-TOKEN") { h.xsrf_token = parcalar[i + 1]; xt = 1; }
else if (parcalar[i] == "XSRF-COOKIE-TOKEN") { h.xsrf_cookie_token = parcalar[i + 1]; xct = 1; }
if (ses == 1 && rv == 1 && xt == 1 && xct == 1) i = parcalar.Length;
}
}
response.Close();
response.Dispose();
XmlDocument doc = new XmlDocument();
XmlReader xmlReader = new XmlTextReader(HostingEnvironment.MapPath("~/xml/values.xml"));
doc.Load(xmlReader);
xmlReader.Close();
XmlNodeList InfoNode = doc.SelectNodes("SessionInfo/Info");
InfoNode[0].Attributes["SessionExpires"].Value = h.session_timeout.ToString();
InfoNode[0].Attributes["SessionID"].Value = h.sessionID;
InfoNode[0].Attributes["XSRF-TOKEN"].Value = h.xsrf_token;
InfoNode[0].Attributes["XSRF-COOKIE-TOKEN"].Value = h.xsrf_cookie_token;
InfoNode[0].Attributes["_requestVerification"].Value = h.request_verification.ToString();
doc.Save(HostingEnvironment.MapPath("~/xml/values.xml"));
}
return h;
}
For all jobs you can use
scheduler.PauseAll();
scheduler.ResumeAll();
But if you want to stop and start a specific job you can use this:
scheduler.PauseJob(jobdetay.Key);
scheduler.ResumeJob(jobdetay.Key);
Related
I am having a problem calling a method in a list of Tasks. I have a method that creates N number of tasks. In each task I perform some operations that in the end results an fetching data via HttpWebRequest and writing that data into a file. I use lock objects to lock the access to shared resources like variables. Everything performs great except the call for a method that creates a executes an HttpWebRequest (method GetData). Whenever I don't lock that call for the method (GetData) it seems that some data/files are skipped. For example:
With the lock object I get file 1,2,3 and 4
Without the lock object I get file 2,4 and 3
Here's the code for the method that creates the tasks
private object lockObjectWebRequest= new object();
private object lockObjectTransactions = new object();
public List<Task> ExtractLoanTransactionsData(string URLReceived, string Headers, string Body)
{
List<Task> Tasks = new List<Task>();
try
{
int Limit = 0;
int OffsetItemsTotal = 0;
int NumberOftasks = 4;
// Create the task to run in parallel
for (int i = 0; i <= NumberOftasks; i++)
{
int OffsetCalculated = 0;
if (i > 0)
{
OffsetCalculated = Limit * i;
}
Tasks.Add(Task.Factory.StartNew(() =>
{
string URL = URLReceived+ "&offset=" + OffsetCalculated .ToString() + "&limit=" + Limit.ToString();
string Output = string.Empty;
lock (lockObjectWebRequest)
{
Output = GetData(URL, Headers,Body);
}
if (Output != "[]")
{
lock (lockObjectTransactions)
{
Identifier++;
Job.Identifier = Identifier;
// write to file
string json = JValue.Parse(Output).ToString(Formatting.Indented);
string FileName = OffSet.ToString() + Identifier;
string Path = #"C:\FileFolder\" + FileName + ".json";
File.WriteAllText(Path, json);
}
}
}));
}
}
catch (Exception ex)
{
Tasks = new List<Task>();
}
return Tasks;
}
Here's the code that performs the HttpWebRequest:
public string GetData(string URL, string Headers, string Body)
{
string Data = string.Empty;
Headers = Headers.Trim('{').Trim('}');
string[] HeadersSplit = Headers.Split(new char[] { ',', ':' });
HttpWebRequest WebRequest = (HttpWebRequest)HttpWebRequest.Create(URL);
WebRequest.Credentials = new NetworkCredential();
WebRequest.Method = "POST";
HttpWebResponse WebResponse;
// Set necessary Request Headers
for (int i = 0; i < HeadersSplit.Length; i = i + 2)
{
string HeaderPart1 = HeadersSplit[i].Replace("\"", "").Trim();
string HeaderPart2 = HeadersSplit[i + 1].Replace("\"", "").Trim();
if (HeaderPart1 == "Content-Type")
{
WebRequest.ContentType = HeaderPart2;
}
else if (HeaderPart1 == "Accept")
{
WebRequest.Accept = HeaderPart2;
}
else if (HeaderPart1 == "Authorization")
{
WebRequest.Headers["Authorization"] = HeaderPart2;
}
}
WebRequest.Headers.Add("cache-control", "no-cache");
// Add body to Request
using (var streamWriter = new StreamWriter(WebRequest.GetRequestStream()))
{
streamWriter.Write(Body);
streamWriter.Flush();
streamWriter.Close();
}
// Execute Request
WebResponse = (HttpWebResponse)WebRequest.GetResponse();
// Validate Response
if (WebResponse.StatusCode == HttpStatusCode.OK)
{
using (var streamReader = new StreamReader(WebResponse.GetResponseStream()))
{
Data = streamReader.ReadToEnd();
}
}
return Data;
}
What am I doing wrong here? The method doesn't have global data that is shared between tasks.
But you do have data that is shared between the tasks: the local varibleIdentifier and the method argument Job.
You are writing to a file using the Identifier in the file-name. If the lock is not in place, that piece of code will be running simultaniously.
The implications for Job can't be deduced from your question.
I think you can solve the identifier problem by doing this:
var identifier = Interlocked.Increment(ref Identifier);
Job.Identifier = identifier; // Use 'identifier', not 'Identifier'
// write to file
string json = ...;
string FileName = OffSet.ToString() + "_" +
"MAMBU_LT_" + DateTime.Now.ToString("yyyyMMddHHmmss") + "_" +
identifier; // Use 'identifier', not 'Identifier'
...
I'm adding request headers with the following code example, and I'm expecting to see the information I've added in the request header.
When I follow and review the request header (Telerik Fiddler 4) I can not see the information I added.
I don't know what's wrong with the code. Can you help me?
Thank you in advance.
private HttpMapTileDataSource _dataSource;
public GmHttpTileDataSourceFactory()
{
_dataSource = new
HttpMapTileDataSource("https://tile.openstreetmap.org/{zoomlevel}/{x}/{y}.png");
_dataSource.AdditionalRequestHeaders.Add("Accept-Language", "en");
_dataSource.AdditionalRequestHeaders.Add("Key", "Value");
_dataSource.AdditionalRequestHeaders.Add("blabla", "blabla");
}
Here is a semi-finished code, perhaps to solve the request header problem you encountered.
public class CustomTileDataSource : CustomMapTileDataSource
{
private string _tileUrl;
public Dictionary<string, string> AdditionalRequestHeaders = new Dictionary<string, string>();
private Dictionary<string, string> DefaultRequestHeaders = new Dictionary<string, string>();
public CustomTileDataSource(string tileUrl)
{
_tileUrl = tileUrl;
DefaultRequestHeaders.Add("Cache-Control", "max-age=0");
DefaultRequestHeaders.Add("Accept-Language", "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7");
DefaultRequestHeaders.Add("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3");
DefaultRequestHeaders.Add("Accept-Encoding", "gzip, deflate, br");
DefaultRequestHeaders.Add("User-Agent", "ozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.10 Safari/537.36 Edg/77.0.235.5");
BitmapRequested += BitmapRequestedHandler;
}
private async void BitmapRequestedHandler(CustomMapTileDataSource sender, MapTileBitmapRequestedEventArgs args)
{
var deferral = args.Request.GetDeferral();
try
{
using (var imgStream = await GetTileAsStreamAsync(args.X, args.Y, args.ZoomLevel))
{
var memStream = imgStream.AsRandomAccessStream();
var decoder = await Windows.Graphics.Imaging.BitmapDecoder.CreateAsync(memStream);
var pixelProvider = await decoder.GetPixelDataAsync(Windows.Graphics.Imaging.BitmapPixelFormat.Rgba8, Windows.Graphics.Imaging.BitmapAlphaMode.Straight, new Windows.Graphics.Imaging.BitmapTransform(), Windows.Graphics.Imaging.ExifOrientationMode.RespectExifOrientation, Windows.Graphics.Imaging.ColorManagementMode.ColorManageToSRgb);
var pixels = pixelProvider.DetachPixelData();
var width = decoder.OrientedPixelWidth;
var height = decoder.OrientedPixelHeight;
Parallel.For(0, height, i =>
{
for (int j = 0; j <= width - 1; j++)
{
// Alpha channel Index (RGBA)
var idx = (i * height + j) * 4 + 3;
}
});
var randomAccessStream = new InMemoryRandomAccessStream();
var outputStream = randomAccessStream.GetOutputStreamAt(0);
var writer = new DataWriter(outputStream);
writer.WriteBytes(pixels);
await writer.StoreAsync();
await writer.FlushAsync();
args.Request.PixelData = RandomAccessStreamReference.CreateFromStream(randomAccessStream);
}
}
catch
{
}
deferral.Complete();
}
private Task<MemoryStream> GetTileAsStreamAsync(int x, int y, int zoom)
{
var tcs = new TaskCompletionSource<MemoryStream>();
var quadkey = TileXYZoomToQuadKey(x, y, zoom);
string url;
url = _tileUrl.Replace("{x}", x.ToString()).Replace("{y}", y.ToString()).Replace("{zoomlevel}", zoom.ToString()).Replace("{quadkey}", quadkey);
var request = WebRequest.Create(url);
foreach (var defaultHeader in DefaultRequestHeaders)
{
request.Headers.Add(defaultHeader.Key, defaultHeader.Value);
}
if (AdditionalRequestHeaders.Count > 0)
{
foreach (var addHeader in AdditionalRequestHeaders)
{
request.Headers.Add(addHeader.Key, addHeader.Value);
}
}
request.BeginGetResponse(async a =>
{
var r = (HttpWebRequest)a.AsyncState;
HttpWebResponse response = (HttpWebResponse)r.EndGetResponse(a);
using (var s = response.GetResponseStream())
{
var ms = new MemoryStream();
await s.CopyToAsync(ms);
ms.Position = 0;
tcs.SetResult(ms);
}
}, request);
return tcs.Task;
}
private string TileXYZoomToQuadKey(int tileX, int tileY, int zoom)
{
var quadKey = new StringBuilder();
for (int i = zoom; i >= 1; i += -1)
{
char digit = '0';
int mask = 1 << (i - 1);
if ((tileX & mask) != 0)
Strings.ChrW(Strings.AscW(digit) + 1);
if ((tileY & mask) != 0)
{
Strings.ChrW(Strings.AscW(digit) + 1);
Strings.ChrW(Strings.AscW(digit) + 1);
}
quadKey.Append(digit);
}
return quadKey.ToString();
}
}
Usage
var dataSource = new CustomTileDataSource("https://tile.openstreetmap.org/{zoomlevel}/{x}/{y}.png");
dataSource.AdditionalRequestHeaders.Add("header_name", "header_value");
// other code
var mySource = new MapTileSource(dataSource);
myMap.TileSources.Add(mySource);
During the test, I also encountered the problem that HttpMapTileDataSource.AdditionalRequestHeaders does not display. I tried to use CustomMapTileDataSource to derive and rewrite the related methods so that it can work normally.
The reason for saying that it is a semi-finished product is that it does not establish a good caching mechanism, and the initial loading time is very long.
Best regards.
This is my code which is returning the expected output on my friend's PC, but not on mine.
We are both working with Visual Studio 2017 Community:
enter image description here
This is the code that will return latitude and longitude of the entered address:
[enter image description here][2]
The first time it works fine but after that its throwing (403 forbidden error !!! / mainly problem is on the request.getResponse())
private static String[] x = new String[3];
public static String[] GetFirstLastName(string address)
{
try {
string url = "http://maps.google.com/maps/api/geocode/xml?address=" + address + "&sensor=false";
WebRequest request = WebRequest.Create(url);
// request.UseDefaultCredentials = true;
// request.Proxy.Credentials = System.Net.CredentialCache.DefaultCredentials;
// request.Proxy.Credentials = System.Net.CredentialCache.DefaultCredentials;
using (WebResponse response = (HttpWebResponse)request.GetResponse())
{
using (var reader = new StreamReader(response.GetResponseStream(), Encoding.UTF8))
{
var ds = new DataSet("Employee");
ds.ReadXml(reader);
DataRow dr = null;
var dt = new DataTable("Employee");
dt.Columns.AddRange(new DataColumn[2]
{
new DataColumn("Latitude", typeof (string)),
new DataColumn("Longitude", typeof (string))
});
int i = 0;
try
{
foreach (DataRow row in ds.Tables["result"].Rows)
{
}
}
catch (Exception e)
{
Console.WriteLine(e.ToString());
return x;
}
foreach (DataRow row in ds.Tables["result"].Rows)
{
if (i == 0)
{
string geometry_id = ds.Tables["geometry"].Select("result_id = " + row["result_id"])[0]["geometry_id"].ToString();
dr = ds.Tables["location"].Select("geometry_id = " + geometry_id)[0];
dt.Rows.Add(dr["lat"], dr["lng"]);
// Console.WriteLine(dr["lat"].ToString() + " " + dr["lng"].ToString());
i = 1;
break;
}
}
x[0] = dr["lat"].ToString();
x[1] = dr["lng"].ToString();
reader.Close();
}
// request.Timeout = 0;
// request.Abort();
response.Close();
return x;
}
}
catch(Exception e)
{
Console.WriteLine(e);
x[0] = "";
x[1] = "";
return x;
}
}
public static String[] GetFirstLastName1(string address)
{
try
{
string url = "http://maps.google.com/maps/api/geocode/xml?address=" + address + "&sensor=false";
WebRequest request = WebRequest.Create(url);
// request.UseDefaultCredentials = true;
// request.Proxy.Credentials = System.Net.CredentialCache.DefaultCredentials;
// request.Proxy.Credentials = System.Net.CredentialCache.DefaultCredentials;
using (WebResponse response = request.GetResponse())
{
using (var reader = new StreamReader(response.GetResponseStream(), Encoding.UTF8))
{
var ds = new DataSet("Employee");
ds.ReadXml(reader);
DataRow dr = null;
var dt = new DataTable("Employee");
dt.Columns.AddRange(new DataColumn[2]
{
new DataColumn("Latitude", typeof (string)),
new DataColumn("Longitude", typeof (string))
});
int i = 0;
try
{
foreach (DataRow row in ds.Tables["result"].Rows)
{
}
}
catch (Exception e)
{
Console.WriteLine(e.ToString());
return x;
}
foreach (DataRow row in ds.Tables["result"].Rows)
{
if (i == 0)
{
string geometry_id = ds.Tables["geometry"].Select("result_id = " + row["result_id"])[0]["geometry_id"].ToString();
dr = ds.Tables["location"].Select("geometry_id = " + geometry_id)[0];
dt.Rows.Add(dr["lat"], dr["lng"]);
// Console.WriteLine(dr["lat"].ToString() + " " + dr["lng"].ToString());
i = 1;
break;
}
}
x[0] = dr["lat"].ToString();
x[1] = dr["lng"].ToString();
reader.Close();
}
//// request.Timeout = 0;
/// request.Abort();
response.Close();
return x;
}
}
catch (Exception e)
{
Console.WriteLine(e);
x[0] = "";
x[1] = "";
return x;
}
}
static void Main(string[] args)
{
int i = 0;
for (;;)
{
String x = Console.ReadLine();
if (i == 0)
{
String[] y = GetFirstLastName(x);
Console.WriteLine(y[0] + " " + y[1]);
}
else
{
String[] y = GetFirstLastName1(x);
Console.WriteLine(y[0] + " " + y[1]);
}
i++;
}
//Console.ReadKey();
}
}
}
/*(Same Code above)
enter code here
///My Friends Output
/// My Output
[2]: https://i.stack.imgur.com/qeDcz.png */
Glad to see you've joined StackOverflow!
Now a 403 error occurs usually not in relation to a syntax error in your code but in relation to the response received from Google's servers.
Now Google in particular is very restrictive on how many API calls you can make a day (Google makes a lot of money off developers who pay for lots of API calls). This page contains the limits. If you've made more than the numbers in here, that's why you're getting the error and you'll have to wait until tomorrow. Do keep in mind not to send too many http requests and accidentally DOS them, as they'll blacklist you for this.
Make sure you are not caching their page or storing the js script locally as this will also cause a blacklist.
Make sure you use https: and not http: here.
In mono 4.6.2 / linux, I'm noticing huge differences between the speed wget can download files, vs webclient.DownloadString, so I made a little test to investigate. Why is wget significantly faster than C#? From my own experiments, it's faster to swallow the overhead of downloading using wget, reading in the files manually, and finally deleting the downloaded files, than simply using .DownloadString. Am I using the HttpWebRequest incorrectly?
Update: On mono/linux, it would seem that using AutomaticDecompression makes no difference at all. I can't see any problems reported about it in mono though.
Update2: Due to a typo, I didn't notice that the native WebClient class is far faster than a simple extended class. Why does mono have a huge performance gap between a simple extended class and its parent?
class WC1 : System.Net.WebClient
{
protected override WebRequest GetWebRequest(Uri address)
{
var r = (HttpWebRequest) base.GetWebRequest(address);
r.Pipelined = true;
r.KeepAlive = true;
r.AutomaticDecompression = DecompressionMethods.Deflate | DecompressionMethods.GZip;
return r;
}
}
class WC2 : System.Net.WebClient
{
protected override WebRequest GetWebRequest(Uri address)
{
var r = (HttpWebRequest)base.GetWebRequest(address);
r.Pipelined = true;
r.KeepAlive = false;
r.AutomaticDecompression = DecompressionMethods.Deflate | DecompressionMethods.GZip;
return r;
}
}
class WC3 : System.Net.WebClient
{
protected override WebRequest GetWebRequest(Uri address)
{
var r = (HttpWebRequest)base.GetWebRequest(address);
r.Pipelined = false;
r.KeepAlive = true;
r.AutomaticDecompression = DecompressionMethods.Deflate | DecompressionMethods.GZip;
return r;
}
}
class WC4 : System.Net.WebClient
{
protected override WebRequest GetWebRequest(Uri address)
{
var r = (HttpWebRequest)base.GetWebRequest(address);
r.Pipelined = false;
r.KeepAlive = false;
r.AutomaticDecompression = DecompressionMethods.Deflate | DecompressionMethods.GZip;
return r;
}
}
class Program
{
static List<string> CreateUrls(int c)
{
var urls = new List<string>();
for (var i = 0; i < c; i++)
{
urls.Add("http://foo.com/?" + i);
}
return urls;
}
static TimeSpan Test(WebClient wc, IEnumerable<string> urls)
{
var sw = new System.Diagnostics.Stopwatch();
sw.Start();
foreach (var u in urls)
{
wc.DownloadString(u);
Console.Write(".");
}
sw.Stop();
return sw.Elapsed;
}
static void Main(string[] args)
{
var urlsPerTest = 200;
var urls = CreateUrls(urlsPerTest * 6);
var wc1 = new WC1();
var urls1 = urls.Take(urlsPerTest);
var elapsed1 = Test(wc1, urls1);
Console.WriteLine("WC1:" + elapsed1);
var wc2 = new WC2();
var urls2 = urls.Skip(urlsPerTest * 1).Take(urlsPerTest);
var elapsed2 = Test(wc2, urls2);
Console.WriteLine("WC2:" + elapsed2);
var wc3 = new WC3();
var urls3 = urls.Skip(urlsPerTest * 2).Take(urlsPerTest);
var elapsed3 = Test(wc3, urls3);
Console.WriteLine("WC3:" + elapsed3);
var wc4 = new WC4();
var urls4 = urls.Skip(urlsPerTest * 3).Take(urlsPerTest);
var elapsed4 = Test(wc4, urls4);
Console.WriteLine("WC4:" + elapsed4);
var wc5 = new WebClient();
var urls5 = urls.Skip(urlsPerTest * 4).Take(urlsPerTest);
var elapsed5 = Test(wc5, urls5);
Console.WriteLine("Webclient:" + elapsed5);
var urls6 = urls.Skip(urlsPerTest * 5).Take(urlsPerTest);
File.WriteAllLines("/tmp/foo.txt", urls6);
var sw = new Stopwatch();
sw.Start();
var p = new Process();
p.StartInfo = new ProcessStartInfo();
p.StartInfo.Arguments = "--silent -i /tmp/foo.txt";
p.StartInfo.CreateNoWindow = true;
p.StartInfo.FileName = "wget";
p.StartInfo.WorkingDirectory = "/tmp";
p.StartInfo.UseShellExecute = false;
p.Start();
p.WaitForExit();
sw.Stop();
File.Delete("/tmp/foo.txt");
Console.WriteLine("Wget:" + sw.Elapsed);
Console.ReadLine();
}
}
output
WC1:00:01:20.6518416
WC2:00:01:16.3561090
WC3:00:01:18.4278756
WC4:00:01:25.5372973
Webclient:00:01:04.6749124
Wget:00:01:03.4862053
I have a program which send a couple of thousands of post requests to the server. Everything is working fine until the end of the transmission. After all but one request are sent the program hangs.
Always there is only one request left to make. I am using HttpClient and everything is done async.
Here I create the httpclient:
ServicePointManager.ServerCertificateValidationCallback +=
(sender, cert, chain, sslPolicyErrors) => true;
ServicePointManager.DefaultConnectionLimit = 45;
ServicePointManager.Expect100Continue = false;
var CurrentCredentials = new System.Net.NetworkCredential(Repo.Username, Repo.Password);
var Handler = new HttpClientHandler
{
Credentials = CurrentCredentials,
ClientCertificateOptions = ClientCertificateOption.Automatic,
UseProxy = false,
};
var Client = new HttpClient(Handler);
Client.Timeout = TimeSpan.FromSeconds(2500);
Client.DefaultRequestHeaders.Add("Connection", "Keep-alive");
WebServ = new WebService(Client, ref Repo);
Here is where I send the requests:
private async Task<string> SendData(string Url, HttpContent[] content, string[] name)
{
using (var FormData = new MultipartFormDataContent())
{
for (int i = 0; i < content.Length; ++i)
{
if (name[i] == "file")
FormData.Add(content[i], name[i], name[i] + i.ToString() + ".jpg");
else
FormData.Add(content[i], name[i]);
}
try
{
using (var Response = await Client
.PostAsync(Url, FormData)
.ConfigureAwait(continueOnCapturedContext:false))
{
await App.Current.Dispatcher.BeginInvoke(
(Action)delegate
{
Repo.CurrentItem++;
});
using (var StreamResume = new StreamWriter("resume.bkp"))
{
await StreamResume.WriteLineAsync(Repo.CurrentItem.ToString());
}
if (Response.IsSuccessStatusCode)
{
await App.Current.Dispatcher.BeginInvoke(
(Action)delegate
{
Repo.Log.Add(Url + " OK" + Repo.CurrentItem);
});
}
else
{
await App.Current.Dispatcher.BeginInvoke(
(Action)delegate
{
Repo.Log.Add(Url + " Error "
+ Response.Content.ToString());
});
}
if (Repo.CurrentItem == Repo.NumberOfItems)
{
await App.Current.Dispatcher.BeginInvoke(
(Action)delegate
{
Repo.Log.Add("Data has been imported");
Repo.CurrentState = "Finished!";
Repo.CurrentItem = 0;
Repo.Uploading = false;
Repo.NotUploading = true;
Repo.Resumable = false;
File.Delete("resume.bkp");
});
}
}
return null;
}
catch (OperationCanceledException)
{
}
catch (InvalidOperationException)
{
App.Current.Dispatcher.Invoke(
(Action)delegate
{
Repo.Log.Add("The url is not valid");
Repo.CurrentItem = 0;
});
Client.CancelPendingRequests();
}
return null;
}
}
I encounter no errors whatsoever only that some threads never exit and the program never terminates. If I have a smaller data set, where only about 180 requests are made, the program does the job never hanging. Thanks in advance for your help