Context
I am importing IMDB database files into an SQLite database with the help of EntityFrameworkCore. In fact, two files, the titles.basics and the titles.akas (which is linked to basics via its movie ID).
At first, I had a single thread reading lines from basics and loop through akas until it changes of ID. Though, there was an issue there and most of all, it was too slow. So, I decided to create a multithread code that would read both files at the same time and another combining akas with the appropriate movie.
I am currently importing so I still do not know if my issue is fixed (probably it is). Though, it is still too much slow for me.
Issue
The combining part is still very slow, but more importantly, I can see my process is only using around 12% of CPU which corresponds to only 1/8 of total usage and I have 8 physical cores. So, it really seems the process is only using 1 core.
I am not giving any code here, as having a minimal testable code wouldn't mean anything. Though, you can see both versions here:
https://cints.net/public/Imdb-MultiThread.cs.txt
using com.cyberinternauts.all.MediaRecognizer.Database;
using com.cyberinternauts.all.MediaRecognizer.Models.Metas;
using Microsoft.EntityFrameworkCore;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
namespace com.cyberinternauts.all.MediaRecognizer.MetaSources
{
class Imdb : MediaSource
{
private const string TITLES_FILE = "title.basics.tsv.gz";
private const string AKAS_FILE = "title.akas.tsv.gz";
private readonly string temporaryFolder = #"c:\temp\";
private readonly string baseUrl = "https://datasets.imdbws.com/";
private readonly WebClient webClient = new();
MediaRecognizerContext db = new();
private IQueryable<MetaMovie> imdbMovies = null;
private async Task<bool> GatherFilesAsync()
{
var totalFilesGathered = 0;
var filesToDownload = new string[] { AKAS_FILE, TITLES_FILE };
foreach(var fileToDownload in filesToDownload)
{
var compressedFile = temporaryFolder + fileToDownload;
if (!File.Exists(compressedFile) || !File.GetLastWriteTime(compressedFile).Date.Equals(DateTime.Today))
{
await GatherFileAsync(fileToDownload);
totalFilesGathered++;
}
}
return totalFilesGathered != 0;
}
private async Task GatherFileAsync(string fileName)
{
var compressedFile = temporaryFolder + fileName;
var uncompressedFile = temporaryFolder + Path.GetFileNameWithoutExtension(compressedFile);
await webClient.DownloadFileTaskAsync(baseUrl + fileName, compressedFile);
using Stream fd = File.Create(uncompressedFile);
using Stream fs = File.OpenRead(compressedFile);
using Stream csStream = new GZipStream(fs, CompressionMode.Decompress);
var buffer = new byte[1024];
int nRead;
while ((nRead = await csStream.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
await fd.WriteAsync(buffer, 0, nRead);
}
}
private async Task LoadMetaDataAsync()
{
//return; //TODO: Remove this line
//TODO: Reactivate this line
//if (!await GatherFilesAsync()) return;
var titlesFile = temporaryFolder + Path.GetFileNameWithoutExtension(TITLES_FILE);
var akasFile = temporaryFolder + Path.GetFileNameWithoutExtension(AKAS_FILE);
var dbLock = new SemaphoreSlim(1);
var akasLock = new SemaphoreSlim(1);
var currentTitlesAkasLock = new SemaphoreSlim(1);
var associateLock = new SemaphoreSlim(1);
using (var db = new MediaRecognizerContext())
{
db.ChangeTracker.AutoDetectChangesEnabled = false;
var titles = new ConcurrentDictionary<string, MetaMovie>();
var readTitles = Task.Factory.StartNew(() =>
{
Parallel.ForEach(File.ReadLines(titlesFile), (titleLine, _, readingIndex) =>
{
if (readingIndex == 0) return; // Skipping columns titles line
var movieInfos = titleLine.Split("\t", StringSplitOptions.None);
dbLock.Wait();
MetaMovie metaMovie = db.MetaMovies.Where(m => m.ExternalId == movieInfos[0]).Include(m => m.Titles).FirstOrDefault();
dbLock.Release();
if (metaMovie == null)
{
int totalMinutes = -1;
if (!int.TryParse(movieInfos[7], out totalMinutes))
{
totalMinutes = -1;
}
metaMovie = new MetaMovie
{
ExternalId = movieInfos[0],
MetaSource = nameof(Imdb),
MovieType = movieInfos[1],
Title = movieInfos[3],
TotalMinutes = totalMinutes,
Genres = movieInfos[8]
};
metaMovie.Titles = new List<MetaTitle>();
if (int.TryParse(movieInfos[5], out int startYear))
{
metaMovie.StartYear = new DateTime(startYear, 1, 1);
}
else
{
metaMovie.StartYear = new DateTime(9999, 1, 1);
}
if (int.TryParse(movieInfos[6], out int endYear))
{
metaMovie.EndYear = new DateTime(endYear, 1, 1);
}
else
{
metaMovie.EndYear = metaMovie.StartYear;
}
}
titles.TryAdd(metaMovie.ExternalId, metaMovie);
});
});
var akas = new Dictionary<string, List<MetaTitle>>();
var currentTitlesAkas = new ConcurrentDictionary<string, int>();
var readAkas = Task.Factory.StartNew(() =>
{
Parallel.ForEach(File.ReadLines(akasFile), (akaLine, _, readingIndex) =>
{
if (readingIndex == 0) return; // Skipping columns titles line
currentTitlesAkasLock.Wait();
var titleInfos = akaLine.Split("\t", StringSplitOptions.None);
var externalId = titleInfos[0];
if (!currentTitlesAkas.ContainsKey(externalId))
{
currentTitlesAkas.TryAdd(externalId, 1);
}
else
{
currentTitlesAkas[externalId]++;
}
currentTitlesAkasLock.Release();
var metaTitle = new MetaTitle
{
MetaMovie = null,
Text = titleInfos[2],
Region = titleInfos[3],
Language = titleInfos[4]
};
akasLock.Wait();
List<MetaTitle> titleAkas;
if (!akas.ContainsKey(externalId))
{
titleAkas = new List<MetaTitle>();
akas.Add(externalId, titleAkas);
}
else
{
titleAkas = akas[externalId];
}
titleAkas.Add(metaTitle);
akasLock.Release();
currentTitlesAkasLock.Wait();
currentTitlesAkas[externalId]--;
currentTitlesAkasLock.Release();
});
});
var savingCounter = 0;
var associate = Task.Factory.StartNew(() =>
{
Parallel.For(1, Environment.ProcessorCount * 10, async (_) =>
{
var isAssociating = true;
do
{
var externalId = string.Empty;
var currentTitleAkaRemoved = false;
currentTitlesAkasLock.Wait();
foreach (var curExternalId in currentTitlesAkas.Keys.OrderBy(t => t))
{
if (currentTitlesAkas[curExternalId] == 0)
{
externalId = curExternalId;
break;
}
}
if (externalId != String.Empty)
{
currentTitleAkaRemoved = currentTitlesAkas.TryRemove(externalId, out int useless0); // Removing so other threads won't take it
}
isAssociating = !readAkas.IsCompleted || !readTitles.IsCompleted || !currentTitlesAkas.IsEmpty;
currentTitlesAkasLock.Release();
if (String.IsNullOrEmpty(externalId) || !currentTitleAkaRemoved) continue;
if (titles.TryGetValue(externalId, out MetaMovie metaMovie))
{
akasLock.Wait();
var titleAkas = akas[externalId];
akas.Remove(externalId);
akasLock.Release();
var changedMovie = false;
var movieAkas = metaMovie.Titles.Select(t => t).ToList(); // Clone list
foreach (var metaTitle in titleAkas)
{
var existingTitle = movieAkas.Where(t => t.Text == metaTitle.Text && t.Region == metaTitle.Region && t.Language == metaTitle.Language).FirstOrDefault();
if (existingTitle == null)
{
changedMovie = true;
metaMovie.Titles.Add(metaTitle);
}
else
{
movieAkas.Remove(existingTitle);
}
}
foreach (var movieTitle in movieAkas)
{
changedMovie = true;
metaMovie.Titles.Remove(movieTitle);
}
dbLock.Wait();
if (metaMovie.Id == 0)
{
db.Add(metaMovie);
}
else if (changedMovie)
{
db.Update(metaMovie);
}
dbLock.Release();
currentTitlesAkasLock.Wait();
currentTitlesAkas.TryRemove(externalId, out int uselessOut); // Free memory
isAssociating = !readAkas.IsCompleted || !readTitles.IsCompleted || !currentTitlesAkas.IsEmpty;
currentTitlesAkasLock.Release();
titles.TryRemove(externalId, out MetaMovie uselessOut2); // Free memory
associateLock.Wait();
savingCounter++;
var localSavingCounter = savingCounter;
associateLock.Release();
if (localSavingCounter != 0 && localSavingCounter % 1000 == 0)
{
var ttt = currentTitlesAkas.Where(t => t.Value > 0);
dbLock.Wait();
await db.SaveChangesAsync();
dbLock.Release();
Console.WriteLine("Saved " + localSavingCounter);
}
}
else if (!readTitles.IsCompleted) // If reading titles is not ended, then maybe it was not read yet... otherwise, it doesn't exist
{
currentTitlesAkasLock.Wait();
currentTitlesAkas.TryAdd(externalId, 0); // Readd because still no movie associated
currentTitlesAkasLock.Release();
}
} while (isAssociating);
});
});
Task.WaitAll(readTitles, readAkas, associate);
await db.SaveChangesAsync();
}
}
public async override Task<IEnumerable<MetaMovie>> FindMediasAsync(DirectoryInfo directory)
{
await LoadMetaDataAsync();
var movie = await ExtractInfosAsync(directory);
if (movie == null) return null;
if (imdbMovies == null)
{
imdbMovies = db.MetaMovies.Where(m => m.MetaSource == nameof(Imdb) && m.MovieType == "movie");
}
return FindCorrespondances(imdbMovies, movie);
}
}
}
https://cints.net/public/Imdb-SingleThread.cs.txt
using com.cyberinternauts.all.MediaRecognizer.Database;
using com.cyberinternauts.all.MediaRecognizer.Models.Metas;
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
namespace com.cyberinternauts.all.MediaRecognizer.MetaSources
{
class Imdb : MediaSource
{
private const string TITLES_FILE = "title.basics.tsv.gz";
private const string AKAS_FILE = "title.akas.tsv.gz";
private readonly string temporaryFolder = #"c:\temp\";
private readonly string baseUrl = "https://datasets.imdbws.com/";
private readonly WebClient webClient = new();
MediaRecognizerContext db = new();
private IQueryable<MetaMovie> imdbMovies = null;
private async Task<bool> GatherFilesAsync()
{
var totalFilesGathered = 0;
var filesToDownload = new string[] { AKAS_FILE, TITLES_FILE };
foreach(var fileToDownload in filesToDownload)
{
var compressedFile = temporaryFolder + fileToDownload;
if (!File.Exists(compressedFile) || !File.GetLastWriteTime(compressedFile).Date.Equals(DateTime.Today))
{
await GatherFileAsync(fileToDownload);
totalFilesGathered++;
}
}
return totalFilesGathered != 0;
}
private async Task GatherFileAsync(string fileName)
{
var compressedFile = temporaryFolder + fileName;
var uncompressedFile = temporaryFolder + Path.GetFileNameWithoutExtension(compressedFile);
await webClient.DownloadFileTaskAsync(baseUrl + fileName, compressedFile);
using Stream fd = File.Create(uncompressedFile);
using Stream fs = File.OpenRead(compressedFile);
using Stream csStream = new GZipStream(fs, CompressionMode.Decompress);
var buffer = new byte[1024];
int nRead;
while ((nRead = await csStream.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
await fd.WriteAsync(buffer, 0, nRead);
}
}
private async Task LoadMetaDataAsync()
{
//return; //TODO: Remove this line
//TODO: Reactivate this line
//if (!await GatherFilesAsync()) return;
var titlesFile = temporaryFolder + Path.GetFileNameWithoutExtension(TITLES_FILE);
var akasFile = temporaryFolder + Path.GetFileNameWithoutExtension(AKAS_FILE);
var titlesLines = File.ReadLines(titlesFile);
var akasLines = File.ReadLines(akasFile);
var titlesIterator = titlesLines.GetEnumerator();
titlesIterator.MoveNext(); // Skip columns headers
var akasIterator = akasLines.GetEnumerator();
akasIterator.MoveNext();
akasIterator.MoveNext(); // Done twice to skip columns headers
var currentAka = akasIterator.Current;
var savingCounter = 0;
using (var db = new MediaRecognizerContext())
{
db.ChangeTracker.AutoDetectChangesEnabled = false;
while (titlesIterator.MoveNext())
{
var titleLine = titlesIterator.Current;
var movieInfos = titleLine.Split("\t", StringSplitOptions.None);
MetaMovie metaMovie = db.MetaMovies.Where(m => m.ExternalId == movieInfos[0]).FirstOrDefault();
var isNewMovie = false;
if (metaMovie == null)
{
int totalMinutes = -1;
if (!int.TryParse(movieInfos[7], out totalMinutes))
{
totalMinutes = -1;
}
isNewMovie = true;
metaMovie = new MetaMovie
{
ExternalId = movieInfos[0],
MetaSource = nameof(Imdb),
MovieType = movieInfos[1],
Title = movieInfos[3],
TotalMinutes = totalMinutes,
Genres = movieInfos[8]
};
metaMovie.Titles = new List<MetaTitle>();
if (int.TryParse(movieInfos[5], out int startYear))
{
metaMovie.StartYear = new DateTime(startYear, 1, 1);
}
else
{
metaMovie.StartYear = new DateTime(9999, 1, 1);
}
if (int.TryParse(movieInfos[6], out int endYear))
{
metaMovie.EndYear = new DateTime(endYear, 1, 1);
}
else
{
metaMovie.EndYear = metaMovie.StartYear;
}
}
var movieAkasIds = metaMovie.Titles.Select(t => t.Id).ToList();
var titleInfos = currentAka?.Split("\t", StringSplitOptions.None);
while (currentAka != null && int.Parse(titleInfos[0][2..]) <= int.Parse(metaMovie.ExternalId[2..]))
{
if (titleInfos[0] == metaMovie.ExternalId)
{
var metaTitle = new MetaTitle
{
MetaMovie = metaMovie,
Text = titleInfos[2],
Region = titleInfos[3],
Language = titleInfos[4]
};
var existingTitle = metaMovie.Titles.Where(t => t.Text == metaTitle.Text && t.Region == metaTitle.Region && t.Language == metaTitle.Language).FirstOrDefault();
if (existingTitle == null)
{
metaMovie.Titles.Add(metaTitle);
}
else
{
movieAkasIds.Remove(existingTitle.Id);
}
}
else
{
var a = 1;
}
akasIterator.MoveNext();
currentAka = akasIterator.Current;
titleInfos = currentAka.Split("\t", StringSplitOptions.None);
}
foreach(var movieTitleId in movieAkasIds)
{
metaMovie.Titles.Remove(metaMovie.Titles.Where(t => t.Id == movieTitleId).FirstOrDefault());
}
if (isNewMovie)
{
db.Add(metaMovie);
}
else
{
db.Update(metaMovie);
}
savingCounter++;
if (savingCounter % 10000 == 0)
{
await db.SaveChangesAsync();
Console.WriteLine("Saved " + savingCounter);
}
}
await db.SaveChangesAsync();
}
}
public async override Task<IEnumerable<MetaMovie>> FindMediasAsync(DirectoryInfo directory)
{
await LoadMetaDataAsync();
var movie = await ExtractInfosAsync(directory);
if (movie == null) return null;
if (imdbMovies == null)
{
imdbMovies = db.MetaMovies.Where(m => m.MetaSource == nameof(Imdb) && m.MovieType == "movie");
}
return FindCorrespondances(imdbMovies, movie);
}
}
}
In the multithread version, the slow part is in the method LoadMetaDataAsync and more precisely in var associate = Task.Factory.StartNew(() => code part.
This is in development and cleaning, splitting will done after I have the appropriate result/speed.
Case closed. I returned to the single thread version and I found my initial issue (my code was supposing the files were in order, which they were partially).
Thank you for all people that participated.
I try to implement processing frames from webcam to the WPF application using UWP API.
There is article how to work with MediaCapture & MediaFrameReader:
https://learn.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader#handle-the-frame-arrived-event
If I set up MemoryPreference to cpu, SoftwareBitmaps are initialized to the null in the event. When I place Auto, I can see IDirect3DSurface objects are in the event, but in conversion to the SoftwareBitmap the exception "Specified cast is not valid." is raised.
How to convert IDirect3DSurface to SoftwareBitmap?
private async void MediaCaptureExample()
{
var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();
MediaFrameSourceGroup selectedGroup = null;
MediaFrameSourceInfo colorSourceInfo = null;
foreach (var sourceGroup in frameSourceGroups)
{
foreach (var sourceInfo in sourceGroup.SourceInfos)
{
if (sourceInfo.MediaStreamType == MediaStreamType.VideoRecord && sourceInfo.SourceKind == MediaFrameSourceKind.Color)
{
colorSourceInfo = sourceInfo;
break;
}
}
if (colorSourceInfo != null)
{
selectedGroup = sourceGroup;
break;
}
}
capture = new MediaCapture();
var settings = new MediaCaptureInitializationSettings()
{
SourceGroup = selectedGroup,
SharingMode = MediaCaptureSharingMode.ExclusiveControl,
MemoryPreference = MediaCaptureMemoryPreference.Auto,
StreamingCaptureMode = StreamingCaptureMode.Video
};
await capture.InitializeAsync(settings);
var colorFrameSource = capture.FrameSources[colorSourceInfo.Id];
var preferredFormat = colorFrameSource.SupportedFormats.Where(format =>
{
return format.VideoFormat.Width >= 1080
&& String.Compare(format.Subtype, MediaEncodingSubtypes.Mjpg, true) == 0;
}).FirstOrDefault();
if (preferredFormat == null)
{
// Our desired format is not supported
return;
}
await colorFrameSource.SetFormatAsync(preferredFormat);
mediaFrameReader = await capture.CreateFrameReaderAsync(colorFrameSource);
mediaFrameReader.FrameArrived += MediaFrameReader_FrameArrived;
var result = await mediaFrameReader.StartAsync();
Console.WriteLine("Result = " + result.ToString());
}
private void MediaFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
{
try
{
var mediaFrameReference = sender.TryAcquireLatestFrame();
var videoMediaFrame = mediaFrameReference?.VideoMediaFrame;
var softwareBitmap = videoMediaFrame?.SoftwareBitmap;
var direct3DSurface = videoMediaFrame?.Direct3DSurface;
if (direct3DSurface != null)
{
var softwareBitmapTask = SoftwareBitmap.CreateCopyFromSurfaceAsync(mediaFrameReference.VideoMediaFrame.Direct3DSurface).AsTask();
softwareBitmap = softwareBitmapTask.Result;
}
if (softwareBitmap != null)
{
using (var stream = new Windows.Storage.Streams.InMemoryRandomAccessStream())
{
var encoderTask = BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, stream).AsTask();
encoderTask.Wait();
var encoder = encoderTask.Result;
encoder.SetSoftwareBitmap(softwareBitmap);
Task t = encoder.FlushAsync().AsTask();
t.Wait();
var image = new System.Windows.Media.Imaging.BitmapImage();
image.BeginInit();
image.StreamSource = stream.AsStream();
image.CacheOption = System.Windows.Media.Imaging.BitmapCacheOption.OnLoad;
image.EndInit();
imageElement.Source = image;
}
}
}
catch(Exception e)
{
Console.WriteLine(e.Message);
}
}
The issue was in format subtype. I changed format from Mjpg to Nv12, and everything start working properly (even for MediaCaptureMemoryPreference.Auto):
var preferredFormat = colorFrameSource.SupportedFormats.Where(format =>
{
return format.VideoFormat.Width >= 1080 && String.Compare(format.Subtype, MediaEncodingSubtypes.Nv12, true) == 0;
}).FirstOrDefault();
I Have to Upload the file to my teamdrive, the file has been created to team drive but I can not upload the file chunks to it. So, please help to solve it.
On Writing a Chunk I am facing the Error of "The remote server returned an error: (404) Not Found."
I am getting the Teamdrive ID and the File ID which has been created.
/*** Creation of a File to Team Drive ***/
f_ObjFile.TeamDriveId = "/*TeamDrive ID*/";
try
{
f_ObjNewFile.Parents = f_ObjFile.Parents; // f_ObjFile = <Team Driv ID>
f_ObjNewFile.Name = f_ObjFile.Name;
f_ObjNewFile.MimeType = f_ObjFile.MimeType;
f_ObjNewFile.TeamDriveId = f_ObjFile.TeamDriveId;
f_CreateRequest = GoogleHelper.InvokeApiCall(() => { return this.DriveServiceObj.Files.Create(f_ObjNewFile); }, this);
if (f_CreateRequest != null)
{
f_CreateRequest.SupportsTeamDrives = true;
f_CreateRequest.Fields = "*";
f_ObjNewFile = GoogleHelper.InvokeApiCall(() => { return f_CreateRequest.Execute(); }, this);
}
f_ObjDocumentItem = new DocumentItem(UserEmailID, f_ObjNewFile);
f_ObjDocumentItem.ItemID = f_ObjNewFile.Id;
string f_Url = GoogleHelper.CreateChunkURL("https://www.googleapis.com/upload/drive/v3/files/{0}?uploadType=resumable", f_ObjNewFile.Id);
f_ObjDocumentItem.ChunkUploadURL = InitiateResumeRequest(f_Url, f_ObjNewFile.Id);
}
catch(Exception ex) { }
finally
{
f_ObjNewFile = null;
f_CreateRequest = null;
}
/* Writing the chunks to the file in TeamDrive */
try
{
httpRequest = GoogleHelper.CreateHttpWebRequestObj(f_ObjChunkData.ChunkUploadURL,true);
httpRequest.Method = GoogleConstant.PATCH;
httpRequest.ContentLength = f_ObjChunkData.FileData.Length;
httpRequest.SendChunked = true;
httpRequest.Headers["Content-Range"] = "bytes " + f_ObjChunkData.StartOffset +
"-" +
f_ObjChunkData.EndOffset + "/" +
f_ObjChunkData.FileSize.ToString();
using (System.IO.Stream f_ObjHttpStream = GoogleHelper.InvokeApiCall(() => { return httpRequest.GetRequestStream(); }, this))
{
if (f_ObjHttpStream != null)
{
System.IO.MemoryStream f_ChunkStream = null;
f_ChunkStream = new System.IO.MemoryStream(f_ObjChunkData.FileData);
f_ChunkStream.CopyTo(f_ObjHttpStream);
f_ObjHttpStream.Flush();
f_ObjHttpStream.Close();
f_ChunkStream.Close();
f_ChunkStream = null;
}
}
using (HttpWebResponse httpResponse = GoogleHelper.InvokeApiCall(() => { return (HttpWebResponse)(httpRequest.GetResponse()); }, this))
{
if (httpResponse != null)
{
if (httpResponse.StatusCode == HttpStatusCode.OK)
{
httpResponse.Close();
}
}
}
}
catch (Exception ex) { }
In Followin Line :
string f_Url = GoogleHelper.CreateChunkURL("https://www.googleapis.com/upload/drive/v3/files/{0}?uploadType=resumable", f_ObjNewFile.Id);
Insted Of
"https://www.googleapis.com/upload/drive/v3/files/{0}?uploadType=resumable"
Use following URL :
https://www.googleapis.com/upload/drive/v3/files/{0}?uploadType=resumable&supportsTeamDrives=true
and its Done...
Now you can upload the chunks...
I am calling the Sharepoint REST API from the C# application multiple times becaues of pagination and it cannot return more the 5000 records at a time. I am calling the API through the loop like
for (int i = 0; i < 10000; i = i + 5000)
{
SP_StrainCodes = "GetByTitle('S%20Codes')/items?$skiptoken=Paged=TRUE%26p_ID=" + i + "&$top=1";
core_URL = BaseURL_SP + SP_StrainCodes;
using (var client_sharePoint = new HttpClient(handler))
{
var response = client_sharePoint.GetAsync(core_URL).Result;
var responsedata = await response.Content.ReadAsStringAsync();
returnObj = JsonConvert.DeserializeObject<SharepointDTO.RootObject>(responsedata);
if (returnObj.d.Next == null)
continue;
}
}
return returnObj;
}
How do I combine the the returnObj from 1st call and the 2nd Call and return as one Object
Something like this :
List<SharepointDTO.RootObject> results = new List<SharepointDTO.RootObject>();
for (int i = 0; i < 10000; i = i + 5000)
{
SP_StrainCodes = "GetByTitle('S%20Codes')/items?$skiptoken=Paged=TRUE%26p_ID=" + i + "&$top=1";
core_URL = BaseURL_SP + SP_StrainCodes;
using (var client_sharePoint = new HttpClient(handler))
{
var response = client_sharePoint.GetAsync(core_URL).Result;
var responsedata = await response.Content.ReadAsStringAsync();
returnObj = JsonConvert.DeserializeObject<SharepointDTO.RootObject>(responsedata);
if (returnObj.d.Next == null)
continue;
}
results.Add(returnObj);
}
return results;
I am working on Windows 10 UWP app and my requirement is to upload 5 images on the server with unique value. So, I have used System.Threading.Tasks.Task.Factory.StartNew().Now, when I checked while debugging, I found that randomly sometimes for 2 images, it sends same unique key. Can someone suggest is it better to use System.Threading.Tasks.Task.Factory.StartNew()?
All the images are sent using a web service. My sample code for this is following
WebServiceUtility serviceUtility = new WebServiceUtility();
List<System.Threading.Tasks.Task> tasks = new List<System.Threading.Tasks.Task>();
var cancelSource = new CancellationTokenSource();
cancellationToken = cancelSource.Token;
System.Threading.Tasks.Task currentTask = null;
List<System.Threading.Tasks.Task> uploadTasks = new List<System.Threading.Tasks.Task>();
List<string> uploadedImageIdList = new List<string>();
foreach (var image in _imageCollection)
{
if (!cancellationToken.IsCancellationRequested)
{
currentTask = await System.Threading.Tasks.Task.Factory.StartNew(async () =>
{
string imageName = string.Empty;
string imagePath = string.Empty;
if (image.IsEvidenceImage)
{
imageName = image.EvidencePath.Split('\\')[1];
imagePath = image.EvidencePath;
}
else
{
imageName = image.EvidencePath.Split('#')[1].Split('\\')[1];
imagePath = image.EvidencePath.Split('#')[1];
}
byte[] _imageAsByteArray = await GetEvidenceFromIsoStore(imagePath);
if (null != _imageAsByteArray && _imageAsByteArray.Length > 0)
{
IRestResponse response = await serviceUtility.UploadImage
(_imageAsByteArray, imageName,
new RequestDataGenerator().generateRequestDataForMediaUpload(
(null != _imageItem.I_IS_PRIMARY && "1".Equals(_imageItem.I_IS_PRIMARY) ? "1" : "0"),
evidenceName
));
if (response != null && response.RawBytes.Length > 0)
{
var successMessage = MCSExtensions.CheckWebserviceResponseCode(response.StatusCode);
if (successMessage.Equals(Constants.STATUS_CODE_SUCCESS))
{
byte[] decryptedevidenceresponse = WebserviceED.finaldecryptedresponse(response.RawBytes);
string responseString = Encoding.UTF8.GetString(decryptedevidenceresponse, 0, decryptedevidenceresponse.Length);
JObject reponseObject = JObject.Parse(responseString);
//Debug.WriteLine("Evidence Upload Response : " + Environment.NewLine);
uploadedimageIdList.Add(reponseObject["P_RET_ID"].ToString());
try
{
if (image.IsEvidenceImage)
{
if (await FileExists(image.EvidencePath))
{
StorageFile file = await localFolder.GetFileAsync(image.EvidencePath);
await file.DeleteAsync();
}
}
else
{
string[] evidenceMedia = image.EvidencePath.Split('#');
foreach (string evidenceItem in evidenceMedia)
{
if (await FileExists(evidenceItem))
{
StorageFile file = await localFolder.GetFileAsync(evidenceItem);
await file.DeleteAsync();
}
}
}
}
catch (Exception ex)
{
Debug.WriteLine(ex.Message);
}
}
else
{
UserMessageUtil.ShowMessage(successMessage);
}
}
}
}, cancellationToken);
uploadTasks.Add(currentTask);
}
}
await System.Threading.Tasks.Task.WhenAll(uploadTasks.ToArray());
Just make it a separate method:
...
foreach (var image in _imageCollection)
{
if (!cancellationToken.IsCancellationRequested)
{
currentTask = UploadAsync(...);
uploadTasks.Add(currentTask);
}
}
await Task.WhenAll(uploadTasks);
async Task UploadAsync(...)
{
string imageName = string.Empty;
string imagePath = string.Empty;
...
}
Or, a bit more simply at the call site:
...
var uploadTasks = _imageCollection.Select(x => UploadAsync(...));
await Task.WhenAll(uploadTasks);