I have 3000 emails in my gmail account. I want to create an aggregated list of all the senders so that I can more effectively clean up my inbox. I dont need to download the message bodys or the attachments.
I used this sample to get me started (https://developers.google.com/gmail/api/quickstart/dotnet) althought now I cant figure out how to return more than 100 message ids when i execute this code:
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Google.Apis.Auth.OAuth2;
using Google.Apis.Gmail.v1;
using Google.Apis.Gmail.v1.Data;
using Google.Apis.Requests;
using Google.Apis.Services;
using Google.Apis.Util;
using Google.Apis.Util.Store;
namespace GmailQuickstart
{
class Program
{
static string[] Scopes = { GmailService.Scope.GmailReadonly };
static string ApplicationName = "Gmail API .NET Quickstart";
static void Main(string[] args)
{
UserCredential credential;
using (var stream = new FileStream("credentials.json", FileMode.Open, FileAccess.Read))
{
string credPath = "token.json";
credential = GoogleWebAuthorizationBroker.AuthorizeAsync(
GoogleClientSecrets.Load(stream).Secrets,
Scopes,
"user",
CancellationToken.None,
new FileDataStore(credPath, true)).Result;
Console.WriteLine("Credential file saved to: " + credPath);
}
// Create Gmail API service.
var service = new GmailService(new BaseClientService.Initializer()
{
HttpClientInitializer = credential,
ApplicationName = ApplicationName,
});
////get all of the message ids for the messages in the inbox
var messageRequest = service.Users.Messages.List("me");
messageRequest.LabelIds = "INBOX";
var messageList = new List<Message>();
ListMessagesResponse messageResponse1 = new ListMessagesResponse();
var k = 0;
do
{
messageResponse1 = messageRequest.Execute();
messageList.AddRange(messageResponse1.Messages);
var output = $"Request {k} - Message Count: {messageList.Count()} Page Token: {messageRequest.PageToken} - Next Page Token: {messageResponse1.NextPageToken}";
Console.WriteLine(output);
System.IO.File.AppendAllText(#"C:\000\log.txt", output);
messageRequest.PageToken = messageResponse1.NextPageToken;
k++;
//this switch allowed me to walk through getting multiple pages of emails without having to get them all
//if (k == 5)
//{
// break;
//}
} while (!String.IsNullOrEmpty(messageRequest.PageToken));
//once i created the list of all the message ids i serialized the list to JSON and wrote it to a file
//so I could test the next portions without having to make the calls against the above each time
var serializedMessageIdList = Newtonsoft.Json.JsonConvert.SerializeObject(messageList);
System.IO.File.WriteAllText(#"C:\000\MessageIds.json", serializedMessageIdList);
//read in the serialized list and rehydrate it to test the next portion
var mIdList = Newtonsoft.Json.JsonConvert.DeserializeObject<List<Message>>(System.IO.File.ReadAllText(#"C:\000\MessageIds.json"));
//this method takes those message ids and gets the message object from the api for each of them
//1000 is the maximum number of requests google allows in a batch request
var messages = BatchDownloadEmails(service, mIdList.Select(m => m.Id), 1000);
//again i'm serializing the message list and writing them to a file
var serializedMessageList = Newtonsoft.Json.JsonConvert.SerializeObject(messages);
System.IO.File.WriteAllText(#"C:\000\Messages.json", serializedMessageList);
//and then reading them in and rehydrating the list to test the next portion
var mList = Newtonsoft.Json.JsonConvert.DeserializeObject<IList<Message>>(System.IO.File.ReadAllText(#"C:\000\Messages.json"));
//then i loop through each message and pull the values out of the payload header i'm looking for
var emailList = new List<EmailItem>();
foreach (var message in mList)
{
if (message != null)
{
var from = message.Payload.Headers.SingleOrDefault(h => h.Name == "From")?.Value;
var date = message.Payload.Headers.SingleOrDefault(h => h.Name == "Date")?.Value;
var subject = message.Payload.Headers.SingleOrDefault(h => h.Name == "Subject")?.Value;
emailList.Add(new EmailItem() { From = from, Subject = subject, Date = date });
}
}
//i serialized this list as well
var serializedEmailItemList = Newtonsoft.Json.JsonConvert.SerializeObject(emailList);
System.IO.File.WriteAllText(#"C:\000\EmailItems.json", serializedEmailItemList);
//rehydrate for testing
var eiList = Newtonsoft.Json.JsonConvert.DeserializeObject<List<EmailItem>>(System.IO.File.ReadAllText(#"C:\000\EmailItems.json"));
//here is where i do the actual aggregation to determine which senders i have the most email from
var senderSummary = eiList.GroupBy(g => g.From).Select(g => new { Sender = g.Key, Count = g.Count() }).OrderByDescending(g => g.Count);
//serialize and output the results
var serializedSummaryList = Newtonsoft.Json.JsonConvert.SerializeObject(senderSummary);
System.IO.File.WriteAllText(#"C:\000\SenderSummary.json", serializedSummaryList);
}
public static IList<Message> BatchDownloadEmails(GmailService service, IEnumerable<string> messageIds, int chunkSize)
{
// Create a batch request.
var messages = new List<Message>();
//because the google batch request will only allow 1000 requests per batch the list needs to be split
//based on chunk size
var lists = messageIds.ChunkBy(chunkSize);
//double batchRequests = (2500 + 999) / 1000;
//for each list create a request with teh message id and add it to the batch request queue
for (int i = 0; i < lists.Count(); i++)
{
var list = lists.ElementAt(i);
Console.WriteLine($"list: {i}...");
var request = new BatchRequest(service);
foreach (var messageId in list)
{
//Console.WriteLine($"message id: {messageId}...");
var messageBodyRequest = service.Users.Messages.Get("me", messageId);
//messageBodyRequest.Format = UsersResource.MessagesResource.GetRequest.FormatEnum.Metadata;
request.Queue<Message>(messageBodyRequest,
(content, error, index, message) =>
{
messages.Add(content);
});
}
Console.WriteLine("");
Console.WriteLine("ExecuteAsync");
//execute all the requests in the queue
request.ExecuteAsync().Wait();
System.Threading.Thread.Sleep(5000);
}
return messages;
}
}
public class EmailItem
{
public string From { get; set; }
public string Subject { get; set; }
public string Date { get; set; }
}
public static class IEnumerableExtensions
{
public static IEnumerable<IEnumerable<T>> ChunkBy<T>(this IEnumerable<T> source, int chunkSize)
{
return source
.Select((x, i) => new { Index = i, Value = x })
.GroupBy(x => x.Index / chunkSize)
.Select(x => x.Select(v => v.Value));
}
}
}
The research I've done says I need to use a batch request and based on the information I've found Im not able to adapt it to what I'm trying to accomplish. My understanding is that I would use the batch request to get all of the message ids and then 3000 individual calls to get the actual from, subject, and date received from each email in my inbox??
You can use paging to get a full list.
Pass the page token from the previous page to get the next call to Users.Messages.List (don't pass into the first call to get things started). Detect the end when the result contains no messages.
This allows you to get all the messages in the mailbox.
NB. I suggest you make the code async: if there are more than a few messages to read, it can take an appreciable time to get them all.
You can also use PageStreamer to get the remainder of the results.
var pageStreamer = new PageStreamer<Google.Apis.Gmail.v1.Data.Message, UsersResource.MessagesResource.ListRequest, ListMessagesResponse, string>(
(request, token) => request.PageToken = token,
response => response.NextPageToken,
response => response.Messages);
var req = service.Users.Messages.List("me");
req.MaxResults = 1000;
foreach (var result in pageStreamer.Fetch(req))
{
Console.WriteLine(result.Id);
}
This code will continue to run as long as there are additional results to request. Batching isnt really going to help you here as there is no way to know what the next page token will be.
Related
I am trying to connect to the new Google Analytics Data api using C# to request data from the new google analytics GA4. The only sample i can find is
Quickstart client libraries .net This does work but it uses a service account. The cloud .net client library google-cloud-dotnet only has examples for using a service account.
When i try to pass it desktop app credentials for using Oauth" authorization i get
Error creating credential from JSON. Unrecognized credential type.
using System;
using System.Threading;
using System.Threading.Tasks;
using Google.Analytics.Data.V1Beta;
namespace GoogleAnalyticsExamplesData
{
class Program
{
private const string PropertyId = "250796939";
private const string PathToCreds = #"C:\dev\ServiceAccountCred.json";
static async Task Main(string[] args)
{
Console.WriteLine("Hello World!");
// Check whether the environment variable exists.
var environmentVariable = Environment.GetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS");
// If necessary, create it.
if (environmentVariable == null)
Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", PathToCreds);
await SampleRunReport(PropertyId);
}
static async Task SampleRunReport(string propertyId = "YOUR-GA4-PROPERTY-ID")
{
// Using a default constructor instructs the client to use the credentials
// specified in GOOGLE_APPLICATION_CREDENTIALS environment variable.
var client = await BetaAnalyticsDataClient.CreateAsync(CancellationToken.None);
var request = new RunReportRequest
{
Property = "properties/" + PropertyId,
Dimensions = {new Dimension {Name = "date"},},
Metrics = {new Metric {Name = "totalUsers"}, new Metric {Name = "newUsers"}},
DateRanges = {new DateRange {StartDate = "2021-04-01", EndDate = "today"},},
};
var response = await client.RunReportAsync(request);
Console.WriteLine("Report result:");
foreach (var row in response.Rows)
{
Console.WriteLine(
$"{row.DimensionValues[0].Value}, {row.MetricValues[0].Value}, {row.MetricValues[1].Value}");
}
}
}
}
Links to Google.Analytics.Data.V1Beta Web client credentials, desktop credentials
After several hours of digging around i found that you can use ICredential using a builder. This works with a Desktop app credentials, for installed applications.
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Google.Analytics.Data.V1Beta;
using Google.Apis.Auth.OAuth2;
using Google.Apis.Util.Store;
namespace GoogleAnalyticsExamplesData
{
class Program
{
private const string PropertyId = "250796939";
private const string PathToCreds = #"C:\dev\credentials.json";
static async Task Main(string[] args)
{
Console.WriteLine("Hello World!");
await SampleRunReport(PropertyId);
}
static async Task SampleRunReport(string propertyId = "YOUR-GA4-PROPERTY-ID")
{
// Using a default constructor instructs the client to use the credentials
// specified in GOOGLE_APPLICATION_CREDENTIALS environment variable.
//var client = await BetaAnalyticsDataClient.CreateAsync(CancellationToken.None);
BetaAnalyticsDataClient client ;
await using (var stream = new FileStream(PathToCreds, FileMode.Open, FileAccess.Read))
{
// Requesting Authentication or loading previously stored authentication for userName
var credential = GoogleWebAuthorizationBroker.AuthorizeAsync(GoogleClientSecrets.Load(stream).Secrets,
new[] { "https://www.googleapis.com/auth/analytics.readonly"},
"userName",
CancellationToken.None,
new FileDataStore("credPath", true)).Result;
client = await new BetaAnalyticsDataClientBuilder
{
TokenAccessMethod = credential.GetAccessTokenForRequestAsync
}.BuildAsync();
}
var request = new RunReportRequest
{
Property = "properties/" + PropertyId,
Dimensions = {new Dimension {Name = "date"},},
Metrics = {new Metric {Name = "totalUsers"}, new Metric {Name = "newUsers"}},
DateRanges = {new DateRange {StartDate = "2021-04-01", EndDate = "today"},},
};
var response = await client.RunReportAsync(request);
Console.WriteLine("Report result:");
foreach (var row in response.Rows)
{
Console.WriteLine(
$"{row.DimensionValues[0].Value}, {row.MetricValues[0].Value}, {row.MetricValues[1].Value}");
}
}
}
}
I'm using the Azure Management Libraries (specifically fluent) to create web request towards their api to get a list of my databases under my subscription. I'm able to get an instance of the sqlserver using fluent but am unable to get a list of all databases under a specific server.
Define and delete work fine it is just the list() function.
I've tried using it for sqlserver.firewallrules and the list function doesn't work there as well.
Here is some code:
The log at some point just pauses then writes "has exited with code 0"
public async Task<List<String>> getSqlDatabaseList()
{
System.Diagnostics.Debug.WriteLine("Starting to get database list");
List<string> dbNameList = new List<string>();
//the var azure is defined earlier in the project and is authenticated.
var sqlServer = await azure.SqlServers.GetByResourceGroupAsync("<resource group name>", "<server Name>");
//The code below successfully writes the server name
System.Diagnostics.Debug.WriteLine(sqlServer.Name);
//The code below here is where everyting stop and "has exited with code 0" happens after a few seconds of delay
var dbList = sqlServer.Databases.List();
//Never reaches this line
System.Diagnostics.Debug.WriteLine("This line never is written");
foreach (ISqlDatabase db in dbList)
{
dbNameList.Add(db.Name);
}
return dbNameList;
}
Clarification:
I'm using ASP.NET MVC
Here is how my controller method accesses the class method. Resource Manager is the name of the class that implements getSQlDatabaseList();
// GET: Home
public async Task<ActionResult> Index()
{
ResourceManager rm = new ResourceManager();
List<string> test = await rm.getSqlDatabaseList();
//Never Gets to this line of code and never calls the for each or anything after
foreach (var item in test)
{
System.Diagnostics.Debug.WriteLine(item);
}
System.Diagnostics.Debug.WriteLine("Is past for each");
//AzureManager azm = await AzureManager.createAzureManager();
//await azm.getResourceGroupList();
return View(new UserLogin());
}
According to your code and description, I guess the reason why your code couldn't create the table is about your async getSqlDatabaseList.
I guess you call this method in console main method or something else.
If your main method is executed completely, your async method getSqlDatabaseList isn't execute the completely and return the list of the string. It will end all async method.
I suggest you could add await or result key keyword when calling the getSqlDatabaseList method to wait the thread execute the method completely.
More details, you could refer to below test demo.
static void Main(string[] args)
{
//use result to wait the mehtod executed completely
List<String> test = getSqlDatabaseList().Result;
foreach (var item in test)
{
Console.WriteLine(item);
}
Console.Read();
}
public static async Task<List<String>> getSqlDatabaseList()
{
//System.Diagnostics.Debug.WriteLine("Starting to get database list");
List<string> dbNameList = new List<string>();
var credentials = SdkContext.AzureCredentialsFactory.FromFile(#"D:\Auth.txt");
var azure = Azure
.Configure()
.WithLogLevel(HttpLoggingDelegatingHandler.Level.Basic)
.Authenticate(credentials)
.WithDefaultSubscription();
var sqlServer = await azure.SqlServers.GetByResourceGroupAsync("groupname", "brandotest");
var dbList = sqlServer.Databases.List();
foreach (ISqlDatabase db in dbList)
{
dbNameList.Add(db.Name);
}
return dbNameList;
}
Update:
According to your description, I have created a test MVC application. As you say I have reproduce your issue.
I think there are something wrong with the azure management fluent SDK.
Here is a workaround, I suggest you could directly send rest api to get the database.
More details, you could refer to below codes:
Send the request to below url:
https://management.azure.com/subscriptions/{subscriptionsid}/resourceGroups/{resourceGroupsname}/providers/Microsoft.Sql/servers/{servername}/databases?api-version={apiversion}
public static List<String> getSqlDatabaseList()
{
//System.Diagnostics.Debug.WriteLine("Starting to get database list");
List<string> dbNameList = new List<string>();
string tenantId = "yourtenantid";
string clientId = "yourclientId";
string clientSecret = "clientSecret";
string subscriptionid = "subscriptionid";
string resourcegroup = "resourcegroupname";
string sqlservername = "brandotest";
string version = "2014-04-01";
string authContextURL = "https://login.windows.net/" + tenantId;
var authenticationContext = new AuthenticationContext(authContextURL);
var credential = new ClientCredential(clientId, clientSecret);
var result = authenticationContext.AcquireToken(resource: "https://management.azure.com/", clientCredential: credential);
if (result == null)
{
throw new InvalidOperationException("Failed to obtain the JWT token");
}
string token = result.AccessToken;
HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(string.Format("https://management.azure.com/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Sql/servers/{2}/databases?api-version={3}", subscriptionid, resourcegroup, sqlservername, version));
request.Method = "GET";
request.Headers["Authorization"] = "Bearer " + token;
request.ContentType = "application/json";
var httpResponse = (HttpWebResponse)request.GetResponse();
using (var streamReader = new StreamReader(httpResponse.GetResponseStream()))
{
string jsonResponse = streamReader.ReadToEnd();
dynamic json = JsonConvert.DeserializeObject(jsonResponse);
dynamic resultList = json.value.Children();
foreach (var item in resultList)
{
dbNameList.Add(((Newtonsoft.Json.Linq.JValue)item.name).Value.ToString());
}
}
return dbNameList;
}
Result:
Another workaround.
I suggest you could use thread.join to wait the list method execute completely.
Code:
public static async Task<List<String>> getSqlDatabaseList()
{
//System.Diagnostics.Debug.WriteLine("Starting to get database list");
List<string> dbNameList = new List<string>();
var credentials = SdkContext.AzureCredentialsFactory.FromFile(#"D:\Auth.txt");
var azure = Azure
.Configure()
.WithLogLevel(HttpLoggingDelegatingHandler.Level.Basic)
.Authenticate(credentials)
.WithDefaultSubscription();
var sqlServer = await azure.SqlServers.GetByResourceGroupAsync("brandosecondtest", "brandotest");
IReadOnlyList<ISqlDatabase> dbList = null;
Thread thread = new Thread(() => { dbList = sqlServer.Databases.List(); });
thread.Start();
//wait the thread
thread.Join();
foreach (ISqlDatabase db in dbList)
{
dbNameList.Add(db.Name);
}
return dbNameList;
}
Result:
I have the following code using CSOM to download a file from a sharepoint site:
public IEnumerable<FileUpload> DownloadFiles(string client, Guid userGuid, IEnumerable<Guid> fileUploadGuids)
{
using (var context = new ClientContext(documentStore))
{
client = client.ToLower();
var result = IntialSetUp(client, userGuid, fileUploadGuids, context);
context.Load(result, items => items.Include(
item => item.File.Name,
item => item[FileTitle],
item => item[FileRef]
));
context.ExecuteQuery();
List<FileUpload> fileUploads = new List<FileUpload>();
foreach (var item in result)
{
using (var fileInfo = Microsoft.SharePoint.Client.File.OpenBinaryDirect(context, item[FileRef].ToString()))
{
using (var memory = new MemoryStream())
{
fileInfo.Stream.CopyTo(memory);
fileUploads.Add(new FileUpload()
{
Name = item[FileTitle].ToString(),
FileUploadGuid = new Guid(item.File.Name),
FileBytes = memory.ToArray()
});
}
}
}
return fileUploads;
}
}
public void AddNetworkCredentials(ClientContext context)
{
string login = ConfigurationManager.AppSettings["UserName"];
string password = ConfigurationManager.AppSettings["Password"];
context.Credentials = new NetworkCredential(login, password, ConfigurationManager.AppSettings["Domain"]);
context.ExecuteQuery();
}
public ListItemCollection IntialSetUp(string client, Guid userGuid, IEnumerable<Guid> fileUploadGuids, ClientContext context)
{
AddNetworkCredentials(context);
var list = context.Web.Lists.GetByTitle(client);
context.ExecuteQuery();
return GetItems(FileLeafRef, Array.ConvertAll(fileUploadGuids.ToArray(), x => x.ToString("N")), list, FieldType.File, documentStore + "/" + client + "/" + userGuid.ToString("N"));
}
This code will run fine the first time round, but every consecutive request will throw the following error:
The underlying connection was closed: A connection that was expected to be kept alive was closed by the server.
This only happens for this service request and doesn't effect other requests that do not download the actual file, this leads me to believe it has something to do with the OpenBinaryDirect method but I haven't found anything describing this issue.
Any ideas as to what might be causing this and how it could be remedied?
notes:
Left in a bunch of ExecteQuerys for testing purposes.
The title of the file is actually a guid so we could have multiple
files with the same name (the file name is a separate column)
The task i want to accomplish is to create a Web API service in order to upload a file to Azure storage. At the same time, i would like to have a progress indicator that reflects the actual upload progress. After some research and studying i found out two important things:
First is that i have to split the file manually into chunks, and upload them asynchronously using the PutBlockAsync method from Microsoft.WindowsAzure.Storage.dll.
Second, is that i have to receive the file in my Web API service in Streamed mode and not in Buffered mode.
So until now i have the following implementation:
UploadController.cs
using System.Configuration;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using System.Web.Http;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
using WebApiFileUploadToAzureStorage.Infrastructure;
using WebApiFileUploadToAzureStorage.Models;
namespace WebApiFileUploadToAzureStorage.Controllers
{
public class UploadController : ApiController
{
[HttpPost]
public async Task<HttpResponseMessage> UploadFile()
{
if (!Request.Content.IsMimeMultipartContent("form-data"))
{
return Request.CreateResponse(HttpStatusCode.UnsupportedMediaType,
new UploadStatus(null, false, "No form data found on request.", string.Empty, string.Empty));
}
var streamProvider = new MultipartAzureBlobStorageProvider(GetAzureStorageContainer());
var result = await Request.Content.ReadAsMultipartAsync(streamProvider);
if (result.FileData.Count < 1)
{
return Request.CreateResponse(HttpStatusCode.BadRequest,
new UploadStatus(null, false, "No files were uploaded.", string.Empty, string.Empty));
}
return Request.CreateResponse(HttpStatusCode.OK);
}
private static CloudBlobContainer GetAzureStorageContainer()
{
var storageConnectionString = ConfigurationManager.AppSettings["AzureBlobStorageConnectionString"];
var storageAccount = CloudStorageAccount.Parse(storageConnectionString);
var blobClient = storageAccount.CreateCloudBlobClient();
blobClient.DefaultRequestOptions.SingleBlobUploadThresholdInBytes = 1024 * 1024;
var container = blobClient.GetContainerReference("photos");
if (container.Exists())
{
return container;
}
container.Create();
container.SetPermissions(new BlobContainerPermissions
{
PublicAccess = BlobContainerPublicAccessType.Container
});
return container;
}
}
}
MultipartAzureBlobStorageProvider.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.Storage.Blob;
namespace WebApiFileUploadToAzureStorage.Infrastructure
{
public class MultipartAzureBlobStorageProvider : MultipartFormDataStreamProvider
{
private readonly CloudBlobContainer _blobContainer;
public MultipartAzureBlobStorageProvider(CloudBlobContainer blobContainer) : base(Path.GetTempPath())
{
_blobContainer = blobContainer;
}
public override Task ExecutePostProcessingAsync()
{
const int blockSize = 256 * 1024;
var fileData = FileData.First();
var fileName = Path.GetFileName(fileData.Headers.ContentDisposition.FileName.Trim('"'));
var blob = _blobContainer.GetBlockBlobReference(fileName);
var bytesToUpload = (new FileInfo(fileData.LocalFileName)).Length;
var fileSize = bytesToUpload;
blob.Properties.ContentType = fileData.Headers.ContentType.MediaType;
blob.StreamWriteSizeInBytes = blockSize;
if (bytesToUpload < blockSize)
{
var cancellationToken = new CancellationToken();
using (var fileStream = new FileStream(fileData.LocalFileName, FileMode.Open, FileAccess.ReadWrite))
{
var upload = blob.UploadFromStreamAsync(fileStream, cancellationToken);
Debug.WriteLine($"Status {upload.Status}.");
upload.ContinueWith(task =>
{
Debug.WriteLine($"Status {task.Status}.");
Debug.WriteLine("Upload is over successfully.");
}, TaskContinuationOptions.OnlyOnRanToCompletion);
upload.ContinueWith(task =>
{
Debug.WriteLine($"Status {task.Status}.");
if (task.Exception != null)
{
Debug.WriteLine("Task could not be completed." + task.Exception.InnerException);
}
}, TaskContinuationOptions.OnlyOnFaulted);
upload.Wait(cancellationToken);
}
}
else
{
var blockIds = new List<string>();
var index = 1;
long startPosition = 0;
long bytesUploaded = 0;
do
{
var bytesToRead = Math.Min(blockSize, bytesToUpload);
var blobContents = new byte[bytesToRead];
using (var fileStream = new FileStream(fileData.LocalFileName, FileMode.Open))
{
fileStream.Position = startPosition;
fileStream.Read(blobContents, 0, (int)bytesToRead);
}
var manualResetEvent = new ManualResetEvent(false);
var blockId = Convert.ToBase64String(Encoding.UTF8.GetBytes(index.ToString("d6")));
Debug.WriteLine($"Now uploading block # {index.ToString("d6")}");
blockIds.Add(blockId);
var upload = blob.PutBlockAsync(blockId, new MemoryStream(blobContents), null);
upload.ContinueWith(task =>
{
bytesUploaded += bytesToRead;
bytesToUpload -= bytesToRead;
startPosition += bytesToRead;
index++;
var percentComplete = (double)bytesUploaded / fileSize;
Debug.WriteLine($"Percent complete: {percentComplete.ToString("P")}");
manualResetEvent.Set();
});
manualResetEvent.WaitOne();
} while (bytesToUpload > 0);
Debug.WriteLine("Now committing block list.");
var putBlockList = blob.PutBlockListAsync(blockIds);
putBlockList.ContinueWith(task =>
{
Debug.WriteLine("Blob uploaded completely.");
});
putBlockList.Wait();
}
File.Delete(fileData.LocalFileName);
return base.ExecutePostProcessingAsync();
}
}
}
I also enabled Streamed mode as this blog post suggests. This approach works great, in terms that the file is uploaded successfully to Azure storage. Then, when i make a call to this service making use of XMLHttpRequest (and subscribing to the progress event) i see the indicator moving to 100% very quickly. If a 5MB file needs around 1 minute to upload, my indicator moves to the end in just 1 second. So probably the problem resides in the way that the server informs the client about the upload progress. Any thoughts about this? Thank you.
================================ Update 1 ===================================
That is the JavaScript code i use to call the service
function uploadFile(file, index, uploadCompleted) {
var authData = localStorageService.get("authorizationData");
var xhr = new XMLHttpRequest();
xhr.upload.addEventListener("progress", function (event) {
fileUploadPercent = Math.floor((event.loaded / event.total) * 100);
console.log(fileUploadPercent + " %");
});
xhr.onreadystatechange = function (event) {
if (event.target.readyState === event.target.DONE) {
if (event.target.status !== 200) {
} else {
var parsedResponse = JSON.parse(event.target.response);
uploadCompleted(parsedResponse);
}
}
};
xhr.open("post", uploadFileServiceUrl, true);
xhr.setRequestHeader("Authorization", "Bearer " + authData.token);
var data = new FormData();
data.append("file-" + index, file);
xhr.send(data);
}
your progress indicator might be moving rapidly fast, might be because of
public async Task<HttpResponseMessage> UploadFile()
i have encountered this before, when creating an api of async type, im not even sure if it can be awaited, it will just of course just finish your api call on the background, reason your progress indicator instantly finish, because of the async method (fire and forget). the api will immediately give you a response, but will actually finish on the server background (if not awaited).
please kindly try making it just
public HttpResponseMessage UploadFile()
and also try these ones
var result = Request.Content.ReadAsMultipartAsync(streamProvider).Result;
var upload = blob.UploadFromStreamAsync(fileStream, cancellationToken).Result;
OR
var upload = await blob.UploadFromStreamAsync(fileStream, cancellationToken);
hope it helps.
Other way to acomplish what you want (I don't understand how the XMLHttpRequest's progress event works) is using the ProgressMessageHandler to get the upload progress in the request. Then, in order to notify the client, you could use some cache to store the progress, and from the client request the current state in other endpoint, or use SignalR to send the progress from the server to the client
Something like:
//WebApiConfigRegister
var progress = new ProgressMessageHandler();
progress.HttpSendProgress += HttpSendProgress;
config.MessageHandlers.Add(progress);
//End WebApiConfig Register
private static void HttpSendProgress(object sender, HttpProgressEventArgs e)
{
var request = sender as HttpRequestMessage;
//todo: check if request is not null
//Get an Id from the client or something like this to identify the request
var id = request.RequestUri.Query[0];
var perc = e.ProgressPercentage;
var b = e.TotalBytes;
var bt = e.BytesTransferred;
Cache.InsertOrUpdate(id, perc);
}
You can check more documentation on this MSDN blog post (Scroll down to "Progress Notifications" section)
Also, you could calculate the progress based on the data chunks, store the progress in a cache, and notify in the same way as above. Something like this solution
I need some help. Probably it is easy, but I don't get it.
In our firestation is a screen where the alerts are shown (address, keyword, ...). When there is no alert I'd like to show upcoming events like birthdays, etc. The software running the screen needs ".txt"-files for input.
So what I want to do is: I create a google calendar and put all the events in it. Every night the task scheduler runns a little piece of software and downloads the events from google, writes everything in one file and the alert-software reads this file.
Until now I got so far:
I created a google calendar api.
I downloaded a code sample from google and got it working with two calendars.
And that's it. Now I got stuck all day long and I guess I'm a better firefighter than programmer. So I need help. Please give me some hints how to do it.
using Google.Apis.Auth.OAuth2;
using Google.Apis.Calendar.v3;
using Google.Apis.Calendar.v3.Data;
using Google.Apis.Services;
using Google.Apis.Util.Store;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace CalendarQuickstart
{
class Program
{
static string[] Scopes = { CalendarService.Scope.CalendarReadonly };
static string ApplicationName = "Google Calendar API Quickstart";
static void Main(string[] args)
{
UserCredential credential;
using (var stream =
new FileStream("client_secret.json", FileMode.Open, FileAccess.Read))
{
string credPath = System.Environment.GetFolderPath(
System.Environment.SpecialFolder.Personal);
credPath = Path.Combine(credPath, ".credentials");
credential = GoogleWebAuthorizationBroker.AuthorizeAsync(
GoogleClientSecrets.Load(stream).Secrets,
Scopes,
"user",
CancellationToken.None,
new FileDataStore(credPath, true)).Result;
Console.WriteLine("Credential file saved to: " + credPath);
}
// Create Google Calendar API service.
var service = new CalendarService(new BaseClientService.Initializer()
{
HttpClientInitializer = credential,
ApplicationName = ApplicationName,
});
// Define parameters of request. CALENDAR
EventsResource.ListRequest request = service.Events.List("primary");
request.TimeMin = DateTime.Now;
request.ShowDeleted = false;
request.SingleEvents = true;
request.MaxResults = 10;
request.OrderBy = EventsResource.ListRequest.OrderByEnum.StartTime;
// END REQUEST
// Define parameters of request. BIRTHDAY
EventsResource.ListRequest request1 = service.Events.List("#contacts#group.v.calendar.google.com");
request1.TimeMin = DateTime.Now;
request1.ShowDeleted = false;
request1.SingleEvents = true;
request1.MaxResults = 10;
request1.OrderBy = EventsResource.ListRequest.OrderByEnum.StartTime;
// END REQUEST
// List events. CALENDAR
Events events = request.Execute();
Console.WriteLine("Upcoming events:");
if (events.Items != null && events.Items.Count > 0)
{
foreach (var eventItem in events.Items)
{
string when = eventItem.Start.DateTime.ToString();
if (String.IsNullOrEmpty(when))
{
when = eventItem.Start.Date;
}
Console.WriteLine("{0} ({1})", eventItem.Summary, when);
}
} // END LISTE
// List events. BIRTHDAY
Events events1 = request1.Execute();
Console.WriteLine("Upcoming events:");
if (events1.Items != null && events1.Items.Count > 0)
{
foreach (var eventItem in events1.Items)
{
string when = eventItem.Start.DateTime.ToString();
if (String.IsNullOrEmpty(when))
{
when = eventItem.Start.Date;
}
Console.WriteLine("{0} ({1})", eventItem.Summary, when);
}
} // END LISTE
else
{
Console.WriteLine("No upcoming events found.");
}
Console.Read();
}
}
}
I would suggest you to create a List<String> object (possibly named ListOfLines, initially empty, and then go on adding one string for each line you'd like your file to have. Then, when you already have all your lines in the list (in appropriate order), you just use File.WriteAllLines(filename, ListOfStrings), and a file will be created with the given filename, whose lines will be the strings in the list.
From what I have seen, you'd just replace Console.Writeline(somestring) by ListOfLines.Add(somestring) on your current code.
Also, mind that you don't need to explicitly add a newline to your strings, File.WriteLines does that for you.