Display a local PDF file with CefSharp - c#

I'm trying to display a local pdf file using a custom LocalSchemeHandler which reads a memory stream from the file.
The file exists and the memory stream is being read properly. But there is nothing being displayed in the browser window. Displaying the same file via file scheme works.
ResourceHandler:
public class LocalSchemeHandler : ResourceHandler
{
public override bool ProcessRequestAsync(IRequest request, ICallback callback)
{
var uri = new Uri(request.Url);
var file = uri.AbsolutePath;
Task.Run(() =>
{
using (callback)
{
if (!File.Exists(file))
{
callback.Cancel();
return;
}
byte[] bytes = File.ReadAllBytes(file);
var stream = new MemoryStream(bytes);
if (stream == null)
{
callback.Cancel();
}
else
{
stream.Position = 0;
ResponseLength = stream.Length;
var fileExtension = Path.GetExtension(file);
MimeType = GetMimeType(fileExtension);
StatusCode = (int)HttpStatusCode.OK;
Stream = stream;
callback.Continue();
}
}
});
return true;
}
}
ISchemeHandlerFactory:
public class CustomProtocolSchemeHandlerFactory : ISchemeHandlerFactory
{
public const string SchemeName = "local";
public IResourceHandler Create(IBrowser browser, IFrame frame, string schemeName, IRequest request)
{
return new LocalSchemeHandler();
}
}
Settings:
var settings = new CefSettings();
settings.RegisterScheme(new CefCustomScheme
{
SchemeName = CustomProtocolSchemeHandlerFactory.SchemeName,
SchemeHandlerFactory = new CustomProtocolSchemeHandlerFactory()
});
// Increase the log severity so CEF outputs detailed information, useful for debugging
settings.LogSeverity = LogSeverity.Default;
Cef.Initialize(settings);
EDIT
Trying to display the PDF file via ResourceHandler.FromFilePath also doesn't work (nothing is displayed).
public class CustomProtocolSchemeHandlerFactory : ISchemeHandlerFactory
{
public const string SchemeName = "local";
public IResourceHandler Create(IBrowser browser, IFrame frame, string schemeName, IRequest request)
{
var uri = new Uri(request.Url);
var file = uri.AbsolutePath;
var fileExtension = Path.GetExtension(file);
var mimeType = ResourceHandler.GetMimeType(fileExtension);
return ResourceHandler.FromFilePath(file, mimeType);
}
}
EDIT2
After setting LogSeverity to Default the log says: [0524/150955.108:INFO:CONSOLE(20)] "Refused to load plugin data from 'local://c/Users/abidh/Desktop/pdf.pdf' because it violates the following Content Security Policy directive: "object-src * blob: externalfile: file: filesystem: data:".

Didn't find a solution using google. Thanks to amaitland, using the IsCSPBypassing property solved the problem:
var settings = new CefSettings();
settings.RegisterScheme(new CefCustomScheme
{
SchemeName = CustomProtocolSchemeHandlerFactory.SchemeName,
SchemeHandlerFactory = new CustomProtocolSchemeHandlerFactory(),
IsCSPBypassing = true
});
settings.LogSeverity = LogSeverity.Error;
Cef.Initialize(settings);

Set the PDF file path to ChromiumWebBrowser.Address as format like file:///C:/Users/xxx/yyy.pdf, the CEFSharp will render PDF just like Chrome browser.

Related

CefSharp doesn't display local PDF file after upgrade to 105.3.390

After upgrading CefSharp to Version 105.3.390 the browser doesn't display local pdf files anymore (displaying local html file works).
The browser window is black and Windows OS opens a window: "You'll need a new app to open this chrome-extension link".
As I am using a custom resource handler all urls look like "local://C:/path/to/file.pdf".
Settings:
var cefSettings = new CefSettings()
{
LogSeverity = LogSeverity.Verbose,
CachePath = SystemService.CefCacheDir,
LogFile = SystemService.CefSharpLogFile
};
CefCustomScheme localScheme = new CefCustomScheme
{
SchemeName = LocalSchemeHandlerFactory.SchemeName,
SchemeHandlerFactory = new LocalSchemeHandlerFactory(),
IsCSPBypassing = true
};
cefSettings.RegisterScheme(localScheme);
CefSharp.Cef.Initialize(cefSettings);
LocalSchemeHandlerFactory:
public class LocalSchemeHandlerFactory : ISchemeHandlerFactory
{
public const string SchemeName = "local";
public IResourceHandler Create(IBrowser browser, IFrame frame, string schemeName, IRequest request)
{
return new CustomSchemeHandler();
}
}
CustomSchemeHandler:
public class CustomSchemeHandler : ResourceHandler
{
public override CefReturnValue ProcessRequestAsync(IRequest request, ICallback callback)
{
string file = null;
var uri = new Uri(request.Url);
if (uri.Scheme == LocalSchemeHandlerFactory.SchemeName || uri.Scheme == SmbSchemeHandlerFactory.SchemeName)
{
if (uri.Scheme == LocalSchemeHandlerFactory.SchemeName)
{
var driveInfo = new DriveInfo(uri.Authority);
var path = uri.LocalPath.Substring(1);
file = Path.Combine(driveInfo.Name, path);
}
}
else
{
// handle invalid scheme
}
if (file == null)
return CefReturnValue.Cancel;
Task.Run(() =>
{
using (callback)
{
if (!File.Exists(file))
{
callback.Cancel();
return;
}
byte[] bytes = File.ReadAllBytes(file);
var stream = bytes != null ? new MemoryStream(bytes) : null;
if (stream == null)
{
callback.Cancel();
}
else
{
stream.Position = 0;
ResponseLength = stream.Length;
var fileExtension = Path.GetExtension(file);
MimeType = GetMimeType(fileExtension); // application/pdf
StatusCode = (int)HttpStatusCode.OK;
Stream = stream;
callback.Continue();
}
}
});
return CefReturnValue.ContinueAsync;
}
}
You find the cef log here.
What am I missing?
UPDATE: Similar to this, when trying to open dev tools in CefSharp, a window opens: "You'll need a new app to open this dev-tools link".
I found it myself. The reason was an implementation of RequestHandler:
internal class BrowserRequestHandler : RequestHandler
{
protected override bool OnBeforeBrowse(IWebBrowser chromiumWebBrowser, IBrowser browser, IFrame frame, IRequest request, bool userGesture, bool isRedirect)
{
// delegate all none local URL to OS default browser
if (!request.Url.StartsWith("file:") &&
!request.Url.StartsWith(LocalSchemeHandlerFactory.SchemeName))
{
Process.Start(request.Url);
return true;
}
return false;
}
}
But, when opening a PDF file, an additional URL is called: chrome-extension://mhjfbmdgcfjbbpaeojofohoefgiehjai/index.html (why?).
Following the logic above, this is handled by the OS which causes opening a window: "You'll need a new app to open this chrome-extension link".
After extending the if statement everything works fine:
if (!request.Url.StartsWith("chrome-extension") &&
!request.Url.StartsWith("file:") &&
!request.Url.StartsWith(LocalSchemeHandlerFactory.SchemeName))

Calling WEB API to download excel file

below is C# WEB API Code to generate Excel :
public class FileExportController : ApiController
{
[HttpGet]
public HttpResponseMessage Get()
{
var callerContext = CallerContext.DefaultCallerContext;
ReportingInput userInput = new ReportingInput();
userInput.ClientOneCode = "AVON";
string handle = Guid.NewGuid().ToString();
var #event = new GetJobReprotDataBlEvent(callerContext, userInput);
WebApiApplication.ApplicationInitializerObj.EventBus.Publish(#event);
XLWorkbook wb = new FileExportEngine().ExportExcel(#event.ReportData); //this is returning XLWorkbook
string fileName = "JobReport_" + DateTime.Now.ToString("yyyy -MM-dd HH':'mm':'ss") + ".xlsx";
using (MemoryStream memoryStream = new MemoryStream())
{
wb.SaveAs(memoryStream);
var result = new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(memoryStream.ToArray())
};
result.Content.Headers.ContentDisposition =
new System.Net.Http.Headers.ContentDispositionHeaderValue("attachment")
{
FileName = fileName
};
result.Content.Headers.ContentType =
new MediaTypeHeaderValue("application/octet-stream");
return result;
}
}
When I call this API from browser, I am able to generate excel file.
http://localhost/ETLScheduler/api/FileExport -- this is working when hit direct in browser
Now I want to use consume this API in angular 5 application.I have a button.On click button I call the component method downloadFile() to download the file.
Below is the code :
downloadReport() {
this._service.downloadJobReport('AVON');
}
where downloadJobReport() is in my service file as below :
downloadJobReport(clientCode: string) {
return this._http.get(APIs.downloadJobReport);
}
When I am running the application and click on Download button, I am getting nothing, I mean file is not downloading. Can anyone have idea,how should I update my angular code to consume the API.
Thanks in advance.
As you mentioned above in comments you are using below angular code to download file:
downloadFile(data: Blob) {
const contentType = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet';
const blob = new Blob([data], { type: contentType });
const url = window.URL.createObjectURL(blob);
window.open(url);
}
As I also have tried this code, it is working in chrome browser but not working in IE and edge.
You may update your method somthing like below:
var downloadFile=function (file_name, content) {
var csvData = new Blob([content], { type: 'text/csv' });
if (window.navigator && window.navigator.msSaveOrOpenBlob) { // for IE
window.navigator.msSaveOrOpenBlob(csvData, file_name);
} else { // for Non-IE (chrome, firefox etc.)
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
var csvUrl = URL.createObjectURL(csvData);
a.href = csvUrl;
a.download = file_name;
a.click();
URL.revokeObjectURL(a.href)
a.remove();
}
};
you can refer below link for more information:
Open links made by createObjectURL in IE11
Problem is, that Angular expects JSON as a result. You need to configure your GET request so, that it expects something different.
public downloadJobReport(clientCode: string)): Observable<Blob> {
return this._http.get(APIs.downloadJobReport, { responseType: 'blob' });
}
Just a tiny question, you pass an argument clientCode to the downloadJobReport, but never use it. Maybe wise to leave that out?

Web API Upload Files

I have some data to save into a database.
I have created a web api post method to save data. Following is my post method:
[Route("PostRequirementTypeProcessing")]
public IEnumerable<NPAAddRequirementTypeProcessing> PostRequirementTypeProcessing(mdlAddAddRequirementTypeProcessing requTypeProcess)
{
mdlAddAddRequirementTypeProcessing rTyeProcessing = new mdlAddAddRequirementTypeProcessing();
rTyeProcessing.szDescription = requTypeProcess.szDescription;
rTyeProcessing.iRequirementTypeId = requTypeProcess.iRequirementTypeId;
rTyeProcessing.szRequirementNumber = requTypeProcess.szRequirementNumber;
rTyeProcessing.szRequirementIssuer = requTypeProcess.szRequirementIssuer;
rTyeProcessing.szOrganization = requTypeProcess.szOrganization;
rTyeProcessing.dIssuedate = requTypeProcess.dIssuedate;
rTyeProcessing.dExpirydate = requTypeProcess.dExpirydate;
rTyeProcessing.szSignedBy = requTypeProcess.szSignedBy;
rTyeProcessing.szAttachedDocumentNo = requTypeProcess.szAttachedDocumentNo;
if (String.IsNullOrEmpty(rTyeProcessing.szAttachedDocumentNo))
{
}
else
{
UploadFile();
}
rTyeProcessing.szSubject = requTypeProcess.szSubject;
rTyeProcessing.iApplicationDetailsId = requTypeProcess.iApplicationDetailsId;
rTyeProcessing.iEmpId = requTypeProcess.iEmpId;
NPAEntities context = new NPAEntities();
Log.Debug("PostRequirementTypeProcessing Request traced");
var newRTP = context.NPAAddRequirementTypeProcessing(requTypeProcess.szDescription, requTypeProcess.iRequirementTypeId,
requTypeProcess.szRequirementNumber, requTypeProcess.szRequirementIssuer, requTypeProcess.szOrganization,
requTypeProcess.dIssuedate, requTypeProcess.dExpirydate, requTypeProcess.szSignedBy,
requTypeProcess.szAttachedDocumentNo, requTypeProcess.szSubject, requTypeProcess.iApplicationDetailsId,
requTypeProcess.iEmpId);
return newRTP.ToList();
}
There is a field called 'szAttachedDocumentNo' which is a document that's being saved in the database as well.
After saving all data, I want the physical file of the 'szAttachedDocumentNo' to be saved on the server. So i created a method called "UploadFile" as follows:
[HttpPost]
public void UploadFile()
{
if (HttpContext.Current.Request.Files.AllKeys.Any())
{
// Get the uploaded file from the Files collection
var httpPostedFile = HttpContext.Current.Request.Files["UploadedFile"];
if (httpPostedFile != null)
{
// Validate the uploaded image(optional)
string folderPath = HttpContext.Current.Server.MapPath("~/UploadedFiles");
//string folderPath1 = Convert.ToString(ConfigurationManager.AppSettings["DocPath"]);
//Directory not exists then create new directory
if (!Directory.Exists(folderPath))
{
Directory.CreateDirectory(folderPath);
}
// Get the complete file path
var fileSavePath = Path.Combine(folderPath, httpPostedFile.FileName);
// Save the uploaded file to "UploadedFiles" folder
httpPostedFile.SaveAs(fileSavePath);
}
}
}
Before running the project, i debbugged the post method, so when it comes to "UploadFile" line, it takes me to its method.
From the file line, it skipped the remaining lines and went to the last line; what means it didn't see any file.
I am able to save everything to the database, just that i didn't see the physical file in the specified location.
Any help would be much appreciated.
Regards,
Somad
Makes sure the request "content-type": "multipart/form-data" is set
[HttpPost()]
public async Task<IHttpActionResult> UploadFile()
{
if (!Request.Content.IsMimeMultipartContent())
throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
try
{
MultipartMemoryStreamProvider provider = new MultipartMemoryStreamProvider();
await Request.Content.ReadAsMultipartAsync(provider);
if (provider.Contents != null && provider.Contents.Count == 0)
{
return BadRequest("No files provided.");
}
foreach (HttpContent file in provider.Contents)
{
string filename = file.Headers.ContentDisposition.FileName.Trim('\"');
byte[] buffer = await file.ReadAsByteArrayAsync();
using (MemoryStream stream = new MemoryStream(buffer))
{
// save the file whereever you want
}
}
return Ok("files Uploded");
}
catch (Exception ex)
{
return InternalServerError(ex);
}
}

EntityTooSmall in CompleteMultipartUploadResponse

using .NET SDK v.1.5.21.0
I'm trying to upload a large file (63Mb) and I'm following the example at:
http://docs.aws.amazon.com/AmazonS3/latest/dev/LLuploadFileDotNet.html
But using a helper instead the hole code and using jQuery File Upload
https://github.com/blueimp/jQuery-File-Upload/blob/master/basic-plus.html
what I have is:
string bucket = "mybucket";
long totalSize = long.Parse(context.Request.Headers["X-File-Size"]),
maxChunkSize = long.Parse(context.Request.Headers["X-File-MaxChunkSize"]),
uploadedBytes = long.Parse(context.Request.Headers["X-File-UloadedBytes"]),
partNumber = uploadedBytes / maxChunkSize + 1,
fileSize = partNumber * inputStream.Length;
bool lastPart = inputStream.Length < maxChunkSize;
// http://docs.aws.amazon.com/AmazonS3/latest/dev/LLuploadFileDotNet.html
if (partNumber == 1) // initialize upload
{
iView.Utilities.Amazon_S3.S3MultipartUpload.InitializePartToCloud(fileName, bucket);
}
try
{
// upload part
iView.Utilities.Amazon_S3.S3MultipartUpload.UploadPartToCloud(fs, fileName, bucket, (int)partNumber, uploadedBytes, maxChunkSize);
if (lastPart)
// wrap it up and go home
iView.Utilities.Amazon_S3.S3MultipartUpload.CompletePartToCloud(fileName, bucket);
}
catch (System.Exception ex)
{
// Huston, we have a problem!
//Console.WriteLine("Exception occurred: {0}", exception.Message);
iView.Utilities.Amazon_S3.S3MultipartUpload.AbortPartToCloud(fileName, bucket);
}
and
public static class S3MultipartUpload
{
private static string accessKey = System.Configuration.ConfigurationManager.AppSettings["AWSAccessKey"];
private static string secretAccessKey = System.Configuration.ConfigurationManager.AppSettings["AWSSecretKey"];
private static AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(accessKey, secretAccessKey);
public static InitiateMultipartUploadResponse initResponse;
public static List<UploadPartResponse> uploadResponses;
public static void InitializePartToCloud(string destinationFilename, string destinationBucket)
{
// 1. Initialize.
uploadResponses = new List<UploadPartResponse>();
InitiateMultipartUploadRequest initRequest =
new InitiateMultipartUploadRequest()
.WithBucketName(destinationBucket)
.WithKey(destinationFilename.TrimStart('/'));
initResponse = client.InitiateMultipartUpload(initRequest);
}
public static void UploadPartToCloud(Stream fileStream, string destinationFilename, string destinationBucket, int partNumber, long uploadedBytes, long maxChunkedBytes)
{
// 2. Upload Parts.
UploadPartRequest request = new UploadPartRequest()
.WithBucketName(destinationBucket)
.WithKey(destinationFilename.TrimStart('/'))
.WithUploadId(initResponse.UploadId)
.WithPartNumber(partNumber)
.WithPartSize(maxChunkedBytes)
.WithFilePosition(uploadedBytes)
.WithInputStream(fileStream) as UploadPartRequest;
uploadResponses.Add(client.UploadPart(request));
}
public static void CompletePartToCloud(string destinationFilename, string destinationBucket)
{
// Step 3: complete.
CompleteMultipartUploadRequest compRequest =
new CompleteMultipartUploadRequest()
.WithBucketName(destinationBucket)
.WithKey(destinationFilename.TrimStart('/'))
.WithUploadId(initResponse.UploadId)
.WithPartETags(uploadResponses);
CompleteMultipartUploadResponse completeUploadResponse =
client.CompleteMultipartUpload(compRequest);
}
public static void AbortPartToCloud(string destinationFilename, string destinationBucket)
{
// abort.
client.AbortMultipartUpload(new AbortMultipartUploadRequest()
.WithBucketName(destinationBucket)
.WithKey(destinationFilename.TrimStart('/'))
.WithUploadId(initResponse.UploadId));
}
}
my maxChunckedSize is 6Mb (6 * (1024*1024)) as I have read that the minimum is 5Mb...
why am I getting "Your proposed upload is smaller than the minimum allowed size" exception? What am I doing wrong?
The error is:
<Error>
<Code>EntityTooSmall</Code>
<Message>Your proposed upload is smaller than the minimum allowed size</Message>
<ETag>d41d8cd98f00b204e9800998ecf8427e</ETag>
<MinSizeAllowed>5242880</MinSizeAllowed>
<ProposedSize>0</ProposedSize>
<RequestId>C70E7A23C87CE5FC</RequestId>
<HostId>pmhuMXdRBSaCDxsQTHzucV5eUNcDORvKY0L4ZLMRBz7Ch1DeMh7BtQ6mmfBCLPM2</HostId>
<PartNumber>1</PartNumber>
</Error>
How can I get ProposedSize if I'm passing the stream and stream length?
Here is a working solution for the latest Amazon SDK (as today: v.1.5.37.0)
Amazon S3 Multipart Upload works like:
Initialize the request using client.InitiateMultipartUpload(initRequest)
Send chunks of the file (loop until the end) using client.UploadPart(request)
Complete the request using client.CompleteMultipartUpload(compRequest)
If anything goes wrong, remember to dispose the client and request, as well fire the abort command using client.AbortMultipartUpload(abortMultipartUploadRequest)
I keep the client in Session as we need this for each chunk upload as well, keep an hold of the ETags that are now used to complete the process.
You can see an example and simple way of doing this in Amazon Docs itself, I ended up having a class to do everything, plus, I have integrated with the lovely jQuery File Upload plugin (Handler code below as well).
The S3MultipartUpload is as follow
public class S3MultipartUpload : IDisposable
{
string accessKey = System.Configuration.ConfigurationManager.AppSettings.Get("AWSAccessKey");
string secretAccessKey = System.Configuration.ConfigurationManager.AppSettings.Get("AWSSecretKey");
AmazonS3 client;
public string OriginalFilename { get; set; }
public string DestinationFilename { get; set; }
public string DestinationBucket { get; set; }
public InitiateMultipartUploadResponse initResponse;
public List<PartETag> uploadPartETags;
public string UploadId { get; private set; }
public S3MultipartUpload(string destinationFilename, string destinationBucket)
{
if (client == null)
{
System.Net.WebRequest.DefaultWebProxy = null; // disable proxy to make upload quicker
client = Amazon.AWSClientFactory.CreateAmazonS3Client(accessKey, secretAccessKey, new AmazonS3Config()
{
RegionEndpoint = Amazon.RegionEndpoint.EUWest1,
CommunicationProtocol = Protocol.HTTP
});
this.OriginalFilename = destinationFilename.TrimStart('/');
this.DestinationFilename = string.Format("{0:yyyy}{0:MM}{0:dd}{0:HH}{0:mm}{0:ss}{0:fffff}_{1}", DateTime.UtcNow, this.OriginalFilename);
this.DestinationBucket = destinationBucket;
this.InitializePartToCloud();
}
}
private void InitializePartToCloud()
{
// 1. Initialize.
uploadPartETags = new List<PartETag>();
InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest();
initRequest.BucketName = this.DestinationBucket;
initRequest.Key = this.DestinationFilename;
// make it public
initRequest.AddHeader("x-amz-acl", "public-read");
initResponse = client.InitiateMultipartUpload(initRequest);
}
public void UploadPartToCloud(Stream fileStream, long uploadedBytes, long maxChunkedBytes)
{
int partNumber = uploadPartETags.Count() + 1; // current part
// 2. Upload Parts.
UploadPartRequest request = new UploadPartRequest();
request.BucketName = this.DestinationBucket;
request.Key = this.DestinationFilename;
request.UploadId = initResponse.UploadId;
request.PartNumber = partNumber;
request.PartSize = fileStream.Length;
//request.FilePosition = uploadedBytes // remove this line?
request.InputStream = fileStream; // as UploadPartRequest;
var up = client.UploadPart(request);
uploadPartETags.Add(new PartETag() { ETag = up.ETag, PartNumber = partNumber });
}
public string CompletePartToCloud()
{
// Step 3: complete.
CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest();
compRequest.BucketName = this.DestinationBucket;
compRequest.Key = this.DestinationFilename;
compRequest.UploadId = initResponse.UploadId;
compRequest.PartETags = uploadPartETags;
string r = "Something went badly wrong";
using (CompleteMultipartUploadResponse completeUploadResponse = client.CompleteMultipartUpload(compRequest))
r = completeUploadResponse.ResponseXml;
return r;
}
public void AbortPartToCloud()
{
// abort.
client.AbortMultipartUpload(new AbortMultipartUploadRequest()
{
BucketName = this.DestinationBucket,
Key = this.DestinationFilename,
UploadId = initResponse.UploadId
});
}
public void Dispose()
{
if (client != null) client.Dispose();
if (initResponse != null) initResponse.Dispose();
}
}
I use DestinationFilename as the destination file so I can avoid the same name, but I keep the OriginalFilename as I needed later.
Using jQuery File Upload Plugin, all works inside a Generic Handler, and the process is something like this:
// Upload partial file
private void UploadPartialFile(string fileName, HttpContext context, List<FilesStatus> statuses)
{
if (context.Request.Files.Count != 1)
throw new HttpRequestValidationException("Attempt to upload chunked file containing more than one fragment per request");
var inputStream = context.Request.Files[0].InputStream;
string contentRange = context.Request.Headers["Content-Range"]; // "bytes 0-6291455/14130271"
int fileSize = int.Parse(contentRange.Split('/')[1]);,
maxChunkSize = int.Parse(context.Request.Headers["X-Max-Chunk-Size"]),
uploadedBytes = int.Parse(contentRange.Replace("bytes ", "").Split('-')[0]);
iView.Utilities.AWS.S3MultipartUpload s3Upload = null;
try
{
// ######################################################################################
// 1. Initialize Amazon S3 Client
if (uploadedBytes == 0)
{
HttpContext.Current.Session["s3-upload"] = new iView.Utilities.AWS.S3MultipartUpload(fileName, awsBucket);
s3Upload = (iView.Utilities.AWS.S3MultipartUpload)HttpContext.Current.Session["s3-upload"];
string msg = System.String.Format("Upload started: {0} ({1:N0}Mb)", s3Upload.DestinationFilename, (fileSize / 1024));
this.Log(msg);
}
// cast current session object
if (s3Upload == null)
s3Upload = (iView.Utilities.AWS.S3MultipartUpload)HttpContext.Current.Session["s3-upload"];
// ######################################################################################
// 2. Send Chunks
s3Upload.UploadPartToCloud(inputStream, uploadedBytes, maxChunkSize);
// ######################################################################################
// 3. Complete Upload
if (uploadedBytes + maxChunkSize > fileSize)
{
string completeRequest = s3Upload.CompletePartToCloud();
this.Log(completeRequest); // log S3 response
s3Upload.Dispose(); // dispose all objects
HttpContext.Current.Session["s3-upload"] = null; // we don't need this anymore
}
}
catch (System.Exception ex)
{
if (ex.InnerException != null)
while (ex.InnerException != null)
ex = ex.InnerException;
this.Log(string.Format("{0}\n\n{1}", ex.Message, ex.StackTrace)); // log error
s3Upload.AbortPartToCloud(); // abort current upload
s3Upload.Dispose(); // dispose all objects
statuses.Add(new FilesStatus(ex.Message));
return;
}
statuses.Add(new FilesStatus(s3Upload.DestinationFilename, fileSize, ""));
}
Keep in mind that to have a Session object inside a Generic Handler, you need to implement IRequiresSessionState so your handler will look like:
public class UploadHandlerSimple : IHttpHandler, IRequiresSessionState
Inside fileupload.js (under _initXHRData) I have added an extra header called X-Max-Chunk-Size so I can pass this to Amazon and calculate if it's the last part of the uploaded file.
Fell free to comment and make smart edits for everyone to use.
I guess you didn't set the content-length of the part inside the UploadPartToCloud() function.

Imagestream to file resulting in partially gray image (sometimes)

I having an issue uploading / sending images from a stream to a generic handler in ASP.NET. I’ve build a windows phone app that can take an image and send it to the generic handler on my website.
Unfortunately in some occasions the image turns out to be (partially) gray. I think it might have something to to with a lost/bad internet connection of the mobile device. This problem happens every 50 images or so.
When the faulty image is send I do not get an error of any kind. I’m looking for two possible solutions.
How do i prevent the windows phone uploading a partially gray image to the generic handler.
How do i check if an image is partially gray on the server so can send a error message back to the phone.
to make this question more compleet I included the code of the generic handler and an example image. Second I’m very curious why this occus. TCPIP has an handshake so above isseu should not be possible ?
public class UploadImages : IHttpHandler
{
private IWorkOrderRepository _workOrderRepository;
private IDigitalFileRepository _digitalFileRepository;
private IUserRepository _userRepository;
public UploadImages()
{
_workOrderRepository = ((Global)HttpContext.Current.ApplicationInstance).Kernel.Get<IWorkOrderRepository>();
_digitalFileRepository = ((Global)HttpContext.Current.ApplicationInstance).Kernel.Get<IDigitalFileRepository>();
_userRepository = ((Global)HttpContext.Current.ApplicationInstance).Kernel.Get<IUserRepository>();
}
public void ProcessRequest(HttpContext context)
{
var cookie = HttpContext.Current.Request.Cookies[FormsAuthentication.FormsCookieName];
var user = (Domain.Users.User)HttpContext.Current.User;
string WorkOrderId = context.Request.QueryString["workOrderId"];
string latitude = context.Request.QueryString["LATITUDE"];
string Longitude = context.Request.QueryString["LONGITUDE"];
if (latitude != "0" && Longitude != "0")
{
string file = "Filename.jpg";
string uploadPath = context.Server.MapPath("~/Temp/");
using (var stream = new MemoryStream())
{
var image = ImageResizer.Resize(Image.FromStream(context.Request.InputStream), 700);
image.Save(stream, ImageFormat.Jpeg);
stream.Position = 0;
var workOrder = _workOrderRepository.GetAll(x => x.Id == Convert.ToInt32(WorkOrderId)).FirstOrDefault();
workOrder.AddPhoto(_workOrderRepository, _digitalFileRepository, new AuditInfo((Domain.Users.User)user), stream, file, "image/jpeg", Convert.ToDouble(latitude), Convert.ToDouble(Longitude));
}
}
else
{
string file = "Filename.jpg";
string uploadPath = context.Server.MapPath("~/Temp/");
using (var stream = new MemoryStream())
{
var image = ImageResizer.Resize(Image.FromStream(context.Request.InputStream), 700);
image.Save(stream, ImageFormat.Jpeg);
stream.Position = 0;
var workOrder = _workOrderRepository.GetAll(x => x.Id == Convert.ToInt32(WorkOrderId)).FirstOrDefault();
workOrder.AddPhoto(_workOrderRepository, _digitalFileRepository, new AuditInfo((Domain.Users.User)user), stream, file, "image/jpeg");
}
}
}
public bool IsReusable
{
get
{
return false;
}
}
}

Categories