Dumping Observable<byte[]> to a stream - c#

I currently have an Observable<byte[]> that is, in fact, a sequence of chunks of a source file, using this method.
It "chunkifies" the stream as a sequence of byte[].
The problem is that, given this sequence, I would like to write it to a destination stream. In other words, I have to dump each byte[] to a file stream until the sequence is finished, but also, I should await until the sequence finishes.
So far, this code I created works, but I'm afraid it's not the correct way to do it way to do it. The relevant part in which the IObservable<byte[]> is handled is the Download method.
async Task Main()
{
using (var httpClient = new HttpClient())
{
var downloader = new HttpDownloader(httpClient);
var destinationPath = Path.Combine(Path.GetTempPath(), "test.zip");
await downloader.Download("https://github.com/gus33000/MSM8994-8992-NT-ARM64-Drivers/archive/master.zip", destinationPath);
Console.WriteLine("File downloaded to " + destinationPath);
}
}
public class HttpDownloader
{
private readonly HttpClient client;
public HttpDownloader(HttpClient client)
{
this.client = client;
}
public async Task Download(string url, string path, IDownloadProgress progressObserver = null, int timeout = 30)
{
using (var fileStream = File.OpenWrite(path))
{
await Download(url, fileStream, progressObserver, timeout);
}
}
private async Task Download(string url, Stream destination, IDownloadProgress progressObserver = null,
int timeout = 30)
{
long? totalBytes = 0;
long bytesWritten = 0;
await ObservableMixin.Using(() => client.GetAsync(url, HttpCompletionOption.ResponseHeadersRead),
s =>
{
totalBytes = s.Content.Headers.ContentLength;
if (!totalBytes.HasValue)
{
progressObserver?.Percentage.OnNext(double.PositiveInfinity);
}
return ObservableMixin.Using(() => s.Content.ReadAsStreamAsync(),
contentStream => contentStream.ReadToEndObservable());
})
.Do(bytes =>
{
bytesWritten += bytes.Length;
if (totalBytes.HasValue)
{
progressObserver?.Percentage.OnNext((double)bytesWritten / totalBytes.Value);
}
progressObserver?.BytesDownloaded?.OnNext(bytesWritten);
})
.Timeout(TimeSpan.FromSeconds(timeout))
.Select(bytes => Observable.FromAsync(async () =>
{
await destination.WriteAsync(bytes, 0, bytes.Length);
return Unit.Default;
}))
.Merge(1);
}
private static readonly int BufferSize = 8192;
public async Task<Stream> GetStream(string url, IDownloadProgress progress = null, int timeout = 30)
{
var tmpFile = Path.Combine(Path.GetTempPath(), Path.GetTempFileName());
var stream = File.Create(tmpFile, BufferSize, FileOptions.DeleteOnClose);
await Download(url, stream, progress, timeout);
return stream;
}
}
public interface IDownloadProgress
{
ISubject<double> Percentage { get; set; }
ISubject<long> BytesDownloaded { get; set; }
}
public static class ObservableMixin
{
public static IObservable<TSource> Using<TSource, TResource>(
Func<Task<TResource>> resourceFactoryAsync,
Func<TResource, IObservable<TSource>> observableFactory)
where TResource : IDisposable =>
Observable.FromAsync(resourceFactoryAsync).SelectMany(
resource => Observable.Using(() => resource, observableFactory));
}
public static class StreamExtensions
{
internal const int defaultBufferSize = 4096;
public static IObservable<byte[]> ReadToEndObservable(this Stream stream)
{
return stream.ReadToEndObservable(new byte[defaultBufferSize]);
}
public static IObservable<byte[]> ReadToEndObservable(this Stream stream, int bufferSize)
{
return stream.ReadToEndObservable(new byte[bufferSize]);
}
internal static IObservable<byte[]> ReadToEndObservable(this Stream stream, byte[] buffer)
{
return Observable.Create<byte[]>(
observer =>
{
var subscription = new SerialDisposable();
return new CompositeDisposable(
subscription,
Scheduler.Immediate.Schedule(
self =>
{
bool continueReading = true;
subscription.SetDisposableIndirectly(() =>
stream.ReadObservable(buffer).SubscribeSafe(
data =>
{
if (data.Length > 0)
{
observer.OnNext(data);
}
else
{
continueReading = false;
}
},
observer.OnError,
() =>
{
if (continueReading)
{
self();
}
else
{
observer.OnCompleted();
}
}));
}));
});
}
internal static IObservable<byte[]> ReadObservable(this Stream stream, byte[] buffer)
{
return stream.ReadObservable(buffer, 0, buffer.Length).Select(
read =>
{
byte[] data;
if (read <= 0)
{
data = new byte[0];
}
else if (read == buffer.Length)
{
data = (byte[])buffer.Clone();
}
else
{
data = new byte[read];
Array.Copy(buffer, data, read);
}
return data;
});
}
public static IObservable<int> ReadObservable(this Stream stream, byte[] buffer, int offset, int count)
{
return Observable.StartAsync(cancel => stream.ReadAsync(buffer, offset, count, cancel));
}
}
public static class SerialDisposableExtensions
{
public static void SetDisposableIndirectly(this SerialDisposable disposable, Func<IDisposable> factory)
{
var indirection = new SingleAssignmentDisposable();
disposable.Disposable = indirection;
indirection.Disposable = factory();
}
}
public static class SafeObservableExtensions
{
public static IDisposable SubscribeSafe<T>(this IObservable<T> source, Action<T> onNext,
Action<Exception> onError, Action onCompleted)
{
return source.SubscribeSafe(Observer.Create<T>(onNext, onError, onCompleted));
}
}
Does it look OK?

I initially thought your ReadToEndObservable must have had a bug, so I wrote this instead:
public static IObservable<byte[]> ReadToEndObservable(this Stream stream, int bufferSize)
=>
Observable.Defer<byte[]>(() =>
{
var bytesRead = -1;
var bytes = new byte[bufferSize];
return
Observable.While<byte[]>(
() => bytesRead != 0,
Observable
.FromAsync(() => stream.ReadAsync(bytes, 0, bufferSize))
.Do(x =>
{
bytesRead = x;
})
.Select(x => bytes.Take(x).ToArray()));
});
It still didn't seem to work.
I then tried it with this simple code:
IObservable<byte[]> test1 =
Observable
.Using(
() => File.Open(#"{path}\HttpDownloader-master\HttpDownloader-master\HttpDownloader.sln", FileMode.Open),
s => s.ReadToEndObservable(24));
And that worked with my code. And I tried it with yours. It worked.
I thought that there might be something wrong with the stream you're trying to download. There wasn't a problem as such - just that the file is 555MB in size.
I think your code is fine, but the size was simply too big and it was timing out.

Related

Why is it not possible to delete files quickly (<60s) across threads in aspnet?

I get error
System.IO.IOException: 'The process cannot access the file 'xxx' because it is being used by another process.'
when I try to delete a temp file in a background worker service in aspnet core.
I am eventually allowed to delete the file after about a minute (52s, 73s).
If I change garbage collection to workstation mode, I may instead delete after ~1s (but still, a delay).
I have tried a combination of FileOptions to no avail, including FileOptions.WriteThrough.
When the controller writes the file, I use
FlushAsync(), Close(), Dispose() and 'using' (I know it's overkill.)
I also tried using just File.WriteAllBytesAsync, with same result.
In the background reader, I as well use Close() and Dispose().
(hint: background reader will not allow me to use DeleteOnClose,
which would have been ideal.)
As I search stackoverflow for similar 'used by another process' issues,
all those I have found eventually resolve to
'argh it turns out I/he still had an extra open instance/reference
he forgot about',
but I have not been able to figure out that I am doing that.
Another hint:
In the writing controller, I am able to delete the file immediately
after writing it, I presume because I am still on the same thread?
Is there some secret knowledge I should read somewhere,
about being able to delete recently open files, across threads?
UPDATE: Here relevant(?) code snippets:
// (AspNet Controller)
[RequestSizeLimit(9999999999)]
[DisableFormValueModelBinding]
[RequestFormLimits(MultipartBodyLengthLimit = MaxFileSize)]
[HttpPost("{sessionId}")]
public async Task<IActionResult> UploadRevisionChunk(Guid sessionId) {
log.LogWarning($"UploadRevisionChunk: {sessionId}");
string uploadFolder = UploadFolder.sessionFolderPath(sessionId);
if (!Directory.Exists(uploadFolder)) { throw new Exception($"chunk-upload failed"); }
var cr = parseContentRange(Request);
if (cr == null) { return this.BadRequest("no content range header specified"); }
string chunkName = $"{cr.From}-{cr.To}";
string saveChunkPath = Path.Combine(uploadFolder,chunkName);
await streamToChunkFile_WAB(saveChunkPath); // write-all-bytes.
//await streamToChunkFile_MAN(saveChunkPath); // Manual.
long crTo = cr.To ?? 0;
long crFrom = cr.From ?? 0;
long expected = (crTo - crFrom) + 1;
var fi = new FileInfo(saveChunkPath);
var dto = new ChunkResponse { wrote = fi.Length, expected = expected, where = "?" };
string msg = $"at {crFrom}, wrote {dto.wrote} bytes (expected {dto.expected}) to {dto.where}";
log.LogWarning(msg);
return Ok(dto);
}
private async Task streamToChunkFile_WAB(string saveChunkPath) {
using (MemoryStream ms = new MemoryStream()) {
Request.Body.CopyTo(ms);
byte[] allBytes = ms.ToArray();
await System.IO.File.WriteAllBytesAsync(saveChunkPath, allBytes);
}
}
// stream reader in the backgroundService:
public class MyMultiStream : Stream {
string[] filePaths;
FileStream curStream = null;
IEnumerator<string> i;
ILogger log;
QueueItem qItem;
public MyMultiStream(string[] filePaths_, Stream[] streams_, ILogger log_, QueueItem qItem_) {
qItem = qItem_;
log = log_;
filePaths = filePaths_;
log.LogWarning($"filepaths has #items: {filePaths.Length}");
IEnumerable<string> enumerable = filePaths;
i = enumerable.GetEnumerator();
i.MoveNext();// necessary to prime the iterator.
}
public override bool CanRead { get { return true; } }
public override bool CanWrite { get { return false; } }
public override bool CanSeek { get { return false; } }
public override long Length { get { throw new Exception("dont get length"); } }
public override long Position {
get { throw new Exception("dont get Position"); }
set { throw new Exception("dont set Position"); }
}
public override void SetLength(long value) { throw new Exception("dont set length"); }
public override long Seek(long offset, SeekOrigin origin) { throw new Exception("dont seek"); }
public override void Write(byte[] buffer, int offset, int count) { throw new Exception("dont write"); }
public override void Flush() { throw new Exception("dont flush"); }
public static int openStreamCounter = 0;
public static int closedStreamCounter = 0;
string curFileName = "?";
private FileStream getNextStream() {
string nextFileName = i.Current;
if (nextFileName == null) { throw new Exception("getNextStream should not be called past file list"); }
//tryDelete(nextFileName,log);
FileStream nextStream = new FileStream(
path:nextFileName,
mode: FileMode.Open,
access: FileAccess.Read,
share: FileShare.ReadWrite| FileShare.Delete,
bufferSize:4096, // apparently default.
options: 0
| FileOptions.Asynchronous
| FileOptions.SequentialScan
// | FileOptions.DeleteOnClose // (1) this ought to be possible, (2) we should fix this approach (3) if we can fix this, our issue is solved, and our code much simpler.
); // None); // ReadWrite); // None); // ReadWrite); //| FileShare.Read);
log.LogWarning($"TELLUS making new stream [{nextFileName}] opened:[{++openStreamCounter}] closed:[{closedStreamCounter}]");
curFileName = nextFileName;
++qItem.chunkCount;
return nextStream;
}
public override int Read(byte[] buffer, int offset, int count) {
int bytesRead = 0;
while (true) {
bytesRead = 0;
if (curStream == null) { curStream = getNextStream(); }
try {
bytesRead = curStream.Read(buffer, offset, count);
log.LogWarning($"..bytesRead:{bytesRead} [{Path.GetFileName(curFileName)}]"); // (only show a short name.)
} catch (Exception e) {
log.LogError($"failed reading [{curFileName}] [{e.Message}]",e);
}
if (bytesRead > 0) { break; }
curStream.Close();
curStream.Dispose();
curStream = null;
log.LogWarning($"TELLUS closing stream [{curFileName}] opened:[{openStreamCounter}] closed:[{++closedStreamCounter}]");
//tryDelete(curFileName); Presumably we can't delete so soon.
bool moreFileNames = i.MoveNext();
log.LogWarning($"moreFileNames?{moreFileNames}");
if (!moreFileNames) {
break;
}
}
return bytesRead;
}
..
// Background worker operating multistream:
public class BackgroundChunkWorker: BackgroundService {
ILogger L;
ChunkUploadQueue q;
public readonly IServiceScopeFactory scopeFactory;
public BackgroundChunkWorker(ILogger<int> log_, ChunkUploadQueue q_, IServiceScopeFactory scopeFactory_) {
q = q_; L = log_;
scopeFactory = scopeFactory_;
}
override protected async Task ExecuteAsync(CancellationToken cancel) { await BackgroundProcessing(cancel); }
private async Task BackgroundProcessing(CancellationToken cancel) {
while (!cancel.IsCancellationRequested) {
try {
await Task.Delay(1000,cancel);
bool ok = q.q.TryDequeue(out var item);
if (!ok) { continue; }
L.LogInformation($"item found! {item}");
await treatItemScope(item);
} catch (Exception ex) {
L.LogCritical("An error occurred when processing. Exception: {#Exception}", ex);
}
}
}
private async Task<bool> treatItemScope(QueueItem Qitem) {
using (var scope = scopeFactory.CreateScope()) {
var ris = scope.ServiceProvider.GetRequiredService<IRevisionIntegrationService>();
return await treatItem(Qitem, ris);
}
}
private async Task<bool> treatItem(QueueItem Qitem, IRevisionIntegrationService ris) {
await Task.Delay(0);
L.LogWarning($"TryAddValue from P {Qitem.sessionId}");
bool addOK = q.p.TryAdd(Qitem.sessionId, Qitem);
if (!addOK) {
L.LogError($"why couldnt we add session {Qitem.sessionId} to processing-queue?");
return false;
}
var startTime = DateTime.UtcNow;
Guid revisionId = Qitem.revisionId;
string[] filePaths = getFilePaths(Qitem.sessionId);
Stream[] streams = filePaths.Select(fileName => new FileStream(fileName, FileMode.Open)).ToArray();
MyMultiStream multiStream = new MyMultiStream(filePaths, streams, this.L, Qitem);
BimRevisionStatus brs = await ris.UploadRevision(revisionId, multiStream, startTime);
// (launchDeletes is my current hack/workaround,
// it is not part of the problem)
// await multiStream.launchDeletes();
Qitem.status = brs;
return true;
}
..

GraphServiceClient: LargeFileUploadTask with a Stream

I'm using a library to encrypt files.
The library takes an input Stream and an output Stream.
I'd like to use the library to encrypt data and upload them to OneDrive.
I should use LargeFileUploadTask to upload the file to onedrive but I don't know if it is possible to avoid writing encrypted data to a temporary file before doing the actual upload.
Is there a way I can get a Stream to just write to while uploading the data to OneDrive?
var encrypter = new StreamEncrypter("password");
using (var sourceStream = await OpenStream(input)) {
using (var destStream = await OpenStreamForWrite(output)) {
await encrypter.Encrypt(sourceStream, destStream);
}
}
Found a way, as suggested I implemented my own Stream. The size of the destination file must be known before the upload (in my scenario I can compute the final size because I'm using AES encryption).
For now I didn't bother to implement the Seek method (probably it gets called only when the upload is resumed because I see it called just once with 0 as the requested position).
The test progam:
var appOptions = new PublicClientApplicationOptions()
{
ClientName = appName,
ClientId = appId,
TenantId = tenantId,
RedirectUri = redirectUri
};
var app = PublicClientApplicationBuilder.CreateWithApplicationOptions(appOptions).Build();
var storageProperties = new StorageCreationPropertiesBuilder("UserTokenCache.txt", "./cache")
.WithLinuxKeyring(
"com.contoso.devtools.tokencache",
MsalCacheHelper.LinuxKeyRingDefaultCollection,
"MSAL token cache for all Contoso dev tool apps.",
new KeyValuePair<string, string>("Version", "1"),
new KeyValuePair<string, string>("ProductGroup", "MyApps"))
.WithMacKeyChain(
"myapp_msal_service",
"myapp_msal_account")
.Build();
var cacheHelper = await MsalCacheHelper.CreateAsync(storageProperties);
cacheHelper.VerifyPersistence();
cacheHelper.RegisterCache(app.UserTokenCache);
var accounts = await app.GetAccountsAsync();
AuthenticationResult result;
try {
result = await app.AcquireTokenSilent(scopes, accounts.FirstOrDefault())
.ExecuteAsync();
} catch (MsalUiRequiredException) {
result = await app.AcquireTokenInteractive(scopes)
.ExecuteAsync();
}
var graphClient = new GraphServiceClient(new AuthenticationProvider(result.AccessToken));
using (var fileStream = System.IO.File.OpenRead(#"c:\test\test.zip")) {
UploadSession uploadSession = await graphClient.Me.Drive.Root.ItemWithPath("/test/test.zip").CreateUploadSession().Request().PostAsync();
using (var oneDriveStream = new OneDriveStream(uploadSession, fileStream.Length)) {
byte[] buffer = new byte[4096];
int read;
while ((read = await fileStream.ReadAsync(buffer, 0, buffer.Length)) > 0) {
await oneDriveStream.WriteAsync(buffer, 0, read);
}
await oneDriveStream.WaitTask();
}
}
The OneDriveStream class:
internal class OneDriveStream : Stream
{
private readonly Task? _writeTask;
private long _pos;
private readonly long _cacheSize = 1310720;
private readonly List<byte> _buffer = new();
private readonly SemaphoreSlim _bufferSema = new(1,1);
public WriterStream(UploadSession session, long length)
{
Length = length;
_writeTask = new LargeFileUploadTask<DriveItem>(session, this, (int)_cacheSize).UploadAsync();
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => true;
public override long Length { get; }
public override long Position {
get => _pos;
set => _pos = value;
}
protected override void Dispose(bool disposing)
{
_writeTask?.Dispose();
}
public override void Flush()
{
}
public override int Read(byte[] buffer, int offset, int count)
{
if (_pos >= Length)
return 0;
_bufferSema.Wait();
int readCount = 0;
for (int i = 0; i < count; i++) {
if (_buffer.Count > 0) {
buffer[offset + i] = _buffer[0];
_buffer.RemoveAt(0);
_pos++;
readCount++;
if (_pos >= Length)
break;
} else {
_bufferSema.Release();
Thread.Sleep(20);
_bufferSema.Wait();
i--;
}
}
_bufferSema.Release();
return readCount;
}
public override long Seek(long offset, SeekOrigin origin)
{
return offset;
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
while(_buffer.Count > 0)
Thread.Sleep(10);
_bufferSema.Wait();
for (int i = 0; i < count; i++) {
_buffer.Add(buffer[offset + i]);
}
_bufferSema.Release();
}
public async Task WaitTask()
{
if (_writeTask != null)
await _writeTask;
}
}

I use XLABS to pick images from a gallery. How to get the filepath of the image?

Using XLABS in my xamarin forms project and I try to reach the underlying data of the image but I am not sure how to get it with my current code. I have a viewmodel and a page where I use the function and the function itself works fine. I can pick an image and get it. But I want to get the path/filedata.
My viewmodel:
public ImageSource ImageSource
{
get { return _ImageSource; }
set { SetProperty (ref _ImageSource, value); }
}
private byte[] imageData;
public byte[] ImageData { get { return imageData; } }
private byte[] ReadStream(Stream input)
{
byte[] buffer = new byte[16*1024];
using (MemoryStream ms = new MemoryStream())
{
int read;
while ((read = input.Read(buffer, 0, buffer.Length)) > 0)
{
ms.Write(buffer, 0, read);
}
return ms.ToArray();
}
}
public async Task SelectPicture()
{
Setup ();
ImageSource = null;
try
{
var mediaFile = await _Mediapicker.SelectPhotoAsync(new CameraMediaStorageOptions
{
DefaultCamera = CameraDevice.Front,
MaxPixelDimension = 400
});
VideoInfo = mediaFile.Path;
ImageSource = ImageSource.FromStream(() => mediaFile.Source);
}
catch (System.Exception ex)
{
Status = ex.Message;
}
}
private static double ConvertBytesToMegabytes(long bytes)
{
double rtn_value = (bytes / 1024f) / 1024f;
return rtn_value;
}
My page where I use it:
MyViewModel photoGallery = null;
photoGallery = new MyViewModel ();
private async void btnPickPicture_Clicked (object sender, EventArgs e)
{
await photoGallery.SelectPicture ();
imgPicked.Source = photoGallery.ImageSource; //imgPicked is my image x:name from XAML.
}
MediaFile has a Path property. You even refer to it in your ViewModel
VideoInfo = mediaFile.Path;

Streaming video from an external service

I am working on a project (server side) where i need to stream data (videos, large files) to clients.
This worked perfect using ByteRangeStreamContent, as i was serving files from disk and could create a seekable stream (FileStream).
if (Request.Headers.Range != null)
{
try
{
HttpResponseMessage partialResponse = Request.CreateResponse(HttpStatusCode.PartialContent);
partialResponse.Content = new ByteRangeStreamContent(fs, Request.Headers.Range, mediaType);
return partialResponse;
}
catch (InvalidByteRangeException invalidByteRangeException)
{
return Request.CreateErrorResponse(invalidByteRangeException);
}
}
else
{
response.Content = new StreamContent(fs);
response.Content.Headers.ContentType = mediaType;
return response;
}
But, i moved the file provider from disk to an external service. The service allows me to get chunks of data (Range{0}-{1}).
Of course, it's not possible to download whole file in memory and then use a MemoryStream for ByteRangeStreamContent because of the obvious reasons (too many concurrent downloads will consume all the available memory at some point).
I found this article https://vikingerik.wordpress.com/2014/09/28/progressive-download-support-in-asp-net-web-api/ where the author says:
A change request I got for my library was to support reading only the
necessary data and sending that out rather than opening a stream for
the full data. I wasn’t sure what this would buy until the user
pointed out they are reading their resource data from a WCF stream
which does not support seeking and would need to read the whole stream
into a MemoryStream in order to allow the library to generate the
output.
That limitation still exists in this specific object but there is a
workaround. Instead of using a ByteRangeStreamContent, you could
instead use a ByteArrayContent object instead. Since the majority of
RANGE requests will be for a single start and end byte, you could pull
the range from the HttpRequestMessage, retrieve only the bytes you
need and send it back out as a byte stream. You’ll also need to add
the CONTENT-RANGE header and set the response code to 206
(PartialContent) but this could be a viable alternative (though I
haven’t tested it) for users who do not want or can’t easily get a
compliant stream object.
So, my question basically is: how can i do that ?
I finally managed to do it.
Here's how:
Custom implementation of a stream:
public class BufferedHTTPStream : Stream
{
private readonly Int64 cacheLength = 4000000;
private const Int32 noDataAvaiable = 0;
private MemoryStream stream = null;
private Int64 currentChunkNumber = -1;
private Int64? length;
private Boolean isDisposed = false;
private Func<long, long, Stream> _getStream;
private Func<long> _getContentLength;
public BufferedHTTPStream(Func<long, long, Stream> streamFunc, Func<long> lengthFunc)
{
_getStream = streamFunc;
_getContentLength = lengthFunc;
}
public override Boolean CanRead
{
get
{
EnsureNotDisposed();
return true;
}
}
public override Boolean CanWrite
{
get
{
EnsureNotDisposed();
return false;
}
}
public override Boolean CanSeek
{
get
{
EnsureNotDisposed();
return true;
}
}
public override Int64 Length
{
get
{
EnsureNotDisposed();
if (length == null)
{
length = _getContentLength();
}
return length.Value;
}
}
public override Int64 Position
{
get
{
EnsureNotDisposed();
Int64 streamPosition = (stream != null) ? stream.Position : 0;
Int64 position = (currentChunkNumber != -1) ? currentChunkNumber * cacheLength : 0;
return position + streamPosition;
}
set
{
EnsureNotDisposed();
EnsurePositiv(value, "Position");
Seek(value);
}
}
public override Int64 Seek(Int64 offset, SeekOrigin origin)
{
EnsureNotDisposed();
switch (origin)
{
case SeekOrigin.Begin:
break;
case SeekOrigin.Current:
offset = Position + offset;
break;
default:
offset = Length + offset;
break;
}
return Seek(offset);
}
private Int64 Seek(Int64 offset)
{
Int64 chunkNumber = offset / cacheLength;
if (currentChunkNumber != chunkNumber)
{
ReadChunk(chunkNumber);
currentChunkNumber = chunkNumber;
}
offset = offset - currentChunkNumber * cacheLength;
stream.Seek(offset, SeekOrigin.Begin);
return Position;
}
private void ReadNextChunk()
{
currentChunkNumber += 1;
ReadChunk(currentChunkNumber);
}
private void ReadChunk(Int64 chunkNumberToRead)
{
Int64 rangeStart = chunkNumberToRead * cacheLength;
if (rangeStart >= Length) { return; }
Int64 rangeEnd = rangeStart + cacheLength - 1;
if (rangeStart + cacheLength > Length)
{
rangeEnd = Length - 1;
}
if (stream != null) { stream.Close(); }
stream = new MemoryStream((int)cacheLength);
var responseStream = _getStream(rangeStart, rangeEnd);
responseStream.Position = 0;
responseStream.CopyTo(stream);
responseStream.Close();
stream.Position = 0;
}
public override void Close()
{
EnsureNotDisposed();
base.Close();
if (stream != null) { stream.Close(); }
isDisposed = true;
}
public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count)
{
EnsureNotDisposed();
EnsureNotNull(buffer, "buffer");
EnsurePositiv(offset, "offset");
EnsurePositiv(count, "count");
if (buffer.Length - offset < count) { throw new ArgumentException("count"); }
if (stream == null) { ReadNextChunk(); }
if (Position >= Length) { return noDataAvaiable; }
if (Position + count > Length)
{
count = (Int32)(Length - Position);
}
Int32 bytesRead = stream.Read(buffer, offset, count);
Int32 totalBytesRead = bytesRead;
count -= bytesRead;
while (count > noDataAvaiable)
{
ReadNextChunk();
offset = offset + bytesRead;
bytesRead = stream.Read(buffer, offset, count);
count -= bytesRead;
totalBytesRead = totalBytesRead + bytesRead;
}
return totalBytesRead;
}
public override void SetLength(Int64 value)
{
EnsureNotDisposed();
throw new NotImplementedException();
}
public override void Write(Byte[] buffer, Int32 offset, Int32 count)
{
EnsureNotDisposed();
throw new NotImplementedException();
}
public override void Flush()
{
EnsureNotDisposed();
}
private void EnsureNotNull(Object obj, String name)
{
if (obj != null) { return; }
throw new ArgumentNullException(name);
}
private void EnsureNotDisposed()
{
if (!isDisposed) { return; }
throw new ObjectDisposedException("BufferedHTTPStream");
}
private void EnsurePositiv(Int32 value, String name)
{
if (value > -1) { return; }
throw new ArgumentOutOfRangeException(name);
}
private void EnsurePositiv(Int64 value, String name)
{
if (value > -1) { return; }
throw new ArgumentOutOfRangeException(name);
}
private void EnsureNegativ(Int64 value, String name)
{
if (value < 0) { return; }
throw new ArgumentOutOfRangeException(name);
}
}
Usage:
var fs = new BufferedHTTPStream((start, end) =>
{
// return stream from external service
}, () =>
{
// return stream length from external service
});
HttpResponseMessage partialResponse = Request.CreateResponse(HttpStatusCode.PartialContent);
partialResponse.Content = new ByteRangeStreamContent(fs, Request.Headers.Range, mediaType);
partialResponse.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment")
{
FileName = fileName
};
return partialResponse;

how to upload a large file with ASP.NET MVC4 Web Api with progressbar

how can i upload a large file with ASP.NET MVC4 Web Api
and also get a progress?
i saw this post and i understand how to handle the uploaded file but how i can get the progress data?
How To Accept a File POST
please don't send me links to upload products.
i want to understand how handle this in the MVC4 Web Api way...
here is an example code of handling a file upload in MVC4 WebApi
public async Task<HttpResponseMessage> Post()
{
if (Request.Content.IsMimeMultipartContent())
{
var path = HttpContext.Current.Server.MapPath("~/App_Data");
var provider = new MultipartFormDataStreamProvider(path);
await Request.Content.ReadAsMultipartAsync(provider).ContinueWith(t =>
{
if (t.IsFaulted || t.IsCanceled)
throw new HttpResponseException(HttpStatusCode.InternalServerError);
});
return Request.CreateResponse(HttpStatusCode.OK);
}
else
{
throw new HttpResponseException(Request.CreateResponse(HttpStatusCode.NotAcceptable, "This request is not properly formatted"));
}
}
now when
await Request.Content.ReadAsMultipartAsync(provider)
how can i get how bytes loaded?
There is a limitation to the size of files to be uploaded by default at two places. One at the request level, and second , if you hosting on IIS, then on web server level. I added couple of configs as mentioned in this blog, and i was able to upload a 36mb file without any issues. I have posted the snippet below.
Basically
1.
<system.web>
<httpRuntime maxRequestLength="2097152"/>
</system.web>
2.
<system.webServer>
<security>
<requestFiltering>
<requestLimits maxAllowedContentLength="2147483648" />
</requestFiltering>
</security><system.webServer>
Its easy to find the size of the file loaded into the server if you wish. In your code
while reading through the filedata in the stream, for each item in you file data, you can read the local file name as shown below.
string savedFile = fileData.LocalFileName;
// use the file info class to derive properties of the uploaded file
FileInfo file = new FileInfo(savedFile);
//this will give the size of the uploaded file
long size = file.length/1024
Hope this helps. I wonder why this was marked down?
I use this solution:
public class UploadController : ApiController
{
private static ConcurrentDictionary<string, State> _state = new ConcurrentDictionary<string, State>();
public State Get(string id)
{
State state;
if (_state.TryGetValue(id, out state))
{
return state;
}
return null;
}
public async Task<HttpResponseMessage> Post([FromUri] string id)
{
if (Request.Content.IsMimeMultipartContent())
{
var state = new State(Request.Content.Headers.ContentLength);
if (!_state.TryAdd(id, state))
throw new HttpResponseException(Request.CreateResponse(HttpStatusCode.Conflict));
var path = System.Web.Hosting.HostingEnvironment.MapPath("~/App_Data");
var provider = new FileMultipartStreamProvider(path, state.Start, state.AddBytes);
await Request.Content.ReadAsMultipartAsync(provider).ContinueWith(t =>
{
_state.TryRemove(id, out state);
if (t.IsFaulted || t.IsCanceled)
throw new HttpResponseException(HttpStatusCode.InternalServerError);
});
return Request.CreateResponse(HttpStatusCode.OK);
}
else
{
throw new HttpResponseException(Request.CreateResponse(HttpStatusCode.NotAcceptable, "This request is not properly formatted"));
}
}
}
public class State
{
public long? Total { get; set; }
public long Received { get; set; }
public string Name { get; set; }
public State(long? total = null)
{
Total = total;
}
public void Start(string name)
{
Received = 0;
Name = name;
}
public void AddBytes(long size)
{
Received = size;
}
}
public class FileMultipartStreamProvider : MultipartStreamProvider
{
private string _rootPath;
private Action<string> _startUpload;
private Action<long> _uploadProgress;
public FileMultipartStreamProvider(string root_path, Action<string> start_upload, Action<long> upload_progress)
: base()
{
_rootPath = root_path;
_startUpload = start_upload;
_uploadProgress = upload_progress;
}
public override System.IO.Stream GetStream(HttpContent parent, System.Net.Http.Headers.HttpContentHeaders headers)
{
var name = (headers.ContentDisposition.Name ?? "undefined").Replace("\"", "").Replace("\\", "_").Replace("/", "_").Replace("..", "_");
_startUpload(name);
return new WriteFileStreamProxy(Path.Combine(_rootPath, name), _uploadProgress);
}
}
public class WriteFileStreamProxy : FileStream
{
private Action<long> _writeBytes;
public WriteFileStreamProxy(string file_path, Action<long> write_bytes)
: base(file_path, FileMode.Create, FileAccess.Write)
{
_writeBytes = write_bytes;
}
public override void EndWrite(IAsyncResult asyncResult)
{
base.EndWrite(asyncResult);
#if DEBUG
System.Threading.Thread.Sleep(100);
#endif
if (_writeBytes != null)
_writeBytes(base.Position);
}
public override void Write(byte[] array, int offset, int count)
{
base.Write(array, offset, count);
#if DEBUG
System.Threading.Thread.Sleep(100);
#endif
if (_writeBytes != null)
_writeBytes(base.Position);
}
}
and small configure for non-buffered input stream:
config.Services.Replace(typeof(IHostBufferPolicySelector), new CustomPolicy());
implemented this:
public class CustomPolicy : System.Web.Http.WebHost.WebHostBufferPolicySelector
{
public override bool UseBufferedInputStream(object hostContext)
{
return false;
}
}
I Ended Up using an HttpModule but even the HttpModule won't show the progress bar
I found out something very interesting it's seems that when i upload the file in a Secure Protocol(over https://) then the progress are working but in non secure protocl (http://) the progress is not working and the file is fully buffered i don't know way is like that i believe it's a bug somewhere between the IIS to Asp.net Framework when the Request are get Procced.
now because i success make it work over https with an HttpModule i believe it is possible to make it work also with Mvc Web Api but i currently don't have the time to check that.
for parsing Mutlipart form data i used Nancy HttpMultipart parser here:
https://github.com/NancyFx/Nancy/tree/master/src/Nancy
just grabbed the classes:
HttpMultipart.cs
HttpMultipartBoundary.cs
HttpMultipartBuffer.cs
HttpMultipartSubStream.cs
here is the HttpModule Source:
public class HttpUploadModule : IHttpModule
{
public static DateTime lastClean = DateTime.UtcNow;
public static TimeSpan cleanInterval = new TimeSpan(0,10,0);
public static readonly object cleanLocker = new object();
public static readonly Dictionary<Guid,UploadData> Uploads = new Dictionary<Guid,UploadData>();
public const int KB = 1024;
public const int MB = KB * 1024;
public static void CleanUnusedResources( HttpContext context)
{
if( lastClean.Add( cleanInterval ) < DateTime.UtcNow ) {
lock( cleanLocker )
{
if( lastClean.Add( cleanInterval ) < DateTime.UtcNow )
{
int maxAge = int.Parse(ConfigurationManager.AppSettings["HttpUploadModule.MaxAge"]);
Uploads.Where(u=> DateTime.UtcNow.AddSeconds(maxAge) > u.Value.createdDate ).ToList().ForEach(u=>{
Uploads.Remove(u.Key);
});
Directory.GetFiles(context.Server.MapPath(ConfigurationManager.AppSettings["HttpUploadModule.Folder"].TrimEnd('/'))).ToList().ForEach(f=>{
if( DateTime.UtcNow.AddSeconds(maxAge) > File.GetCreationTimeUtc(f)) File.Delete(f);
});
lastClean = DateTime.UtcNow;
}
}
}
}
public void Dispose()
{
}
public void Init(HttpApplication app)
{
app.BeginRequest += app_BeginRequest;
}
void app_BeginRequest(object sender, EventArgs e)
{
HttpContext context = ((HttpApplication)sender).Context;
Guid uploadId = Guid.Empty;
if (context.Request.HttpMethod == "POST" && context.Request.ContentType.ToLower().StartsWith("multipart/form-data"))
{
IServiceProvider provider = (IServiceProvider)context;
HttpWorkerRequest wr = (HttpWorkerRequest)provider.GetService(typeof(HttpWorkerRequest));
FileStream fs = null;
MemoryStream ms = null;
CleanUnusedResources(context);
string contentType = wr.GetKnownRequestHeader(HttpWorkerRequest.HeaderContentType);
NameValueCollection queryString = HttpUtility.ParseQueryString( wr.GetQueryString() );
UploadData upload = new UploadData { id = uploadId ,status = 0, createdDate = DateTime.UtcNow };
if(
!contentType.Contains("boundary=") ||
/*AT LAST 1KB */ context.Request.ContentLength < KB ||
/*MAX 5MB */ context.Request.ContentLength > MB*5 ||
/*IS UPLOADID */ !Guid.TryParse(queryString["upload_id"], out uploadId) || Uploads.ContainsKey( uploadId )) {
upload.id = uploadId;
upload.status = 2;
Uploads.Add(upload.id, upload);
context.Response.StatusCode = 400;
context.Response.StatusDescription = "Bad Request";
context.Response.End();
}
string boundary = Nancy.HttpMultipart.ExtractBoundary( contentType );
upload.id = uploadId;
upload.status = 0;
Uploads.Add(upload.id, upload);
try {
if (wr.HasEntityBody())
{
upload.bytesRemaining =
upload.bytesTotal = wr.GetTotalEntityBodyLength();
upload.bytesLoaded =
upload.BytesReceived = wr.GetPreloadedEntityBodyLength();
if (!wr.IsEntireEntityBodyIsPreloaded())
{
byte[] buffer = new byte[KB * 8];
int readSize = buffer.Length;
ms = new MemoryStream();
//fs = new FileStream(context.Server.MapPath(ConfigurationManager.AppSettings["HttpUploadModule.Folder"].TrimEnd('/')+'/' + uploadId.ToString()), FileMode.CreateNew);
while (upload.bytesRemaining > 0)
{
upload.BytesReceived = wr.ReadEntityBody(buffer, 0, readSize);
if(upload.bytesRemaining == upload.bytesTotal) {
}
ms.Write(buffer, 0, upload.BytesReceived);
upload.bytesLoaded += upload.BytesReceived;
upload.bytesRemaining -= upload.BytesReceived;
if (readSize > upload.bytesRemaining)
{
readSize = upload.bytesRemaining;
}
}
//fs.Flush();
//fs.Close();
ms.Position = 0;
//the file is in our hands
Nancy.HttpMultipart multipart = new Nancy.HttpMultipart(ms, boundary);
foreach( Nancy.HttpMultipartBoundary b in multipart.GetBoundaries()) {
if(b.Name == "data") {
upload.filename = uploadId.ToString()+Path.GetExtension( b.Filename ).ToLower();
fs = new FileStream(context.Server.MapPath(ConfigurationManager.AppSettings["HttpUploadModule.Folder"].TrimEnd('/')+'/' + upload.filename ), FileMode.CreateNew);
b.Value.CopyTo(fs);
fs.Flush();
fs.Close();
upload.status = 1;
context.Response.StatusCode = 200;
context.Response.StatusDescription = "OK";
context.Response.Write( context.Request.ApplicationPath.TrimEnd('/') + "/images/temp/" + upload.filename );
}
}
}
}
}
catch(Exception ex) {
upload.ex = ex;
}
if(upload.status != 1)
{
upload.status = 2;
context.Response.StatusCode = 400;
context.Response.StatusDescription = "Bad Request";
}
context.Response.End();
}
}
}
public class UploadData {
public Guid id { get;set; }
public string filename {get;set;}
public int bytesLoaded { get; set; }
public int bytesTotal { get; set; }
public int BytesReceived {get; set;}
public int bytesRemaining { get;set; }
public int status { get;set; }
public Exception ex { get;set; }
public DateTime createdDate { get;set; }
}

Categories