I am working on a legacy webservice that are used to transfer zip files between server, it works fine until recently where the size of files become larger, more than 700 mb. This causes exceptions as my console application (client) are not able to receive large byte array send by the webservice at once, so i tried to send and receive the byte array by chunk, but the zip file ended up being corrupt and cannot be unzipped, below is what i have tried, i am a bit unfamiliar with webservice so would appreciate if you guys can point out my mistake or maybe point me to a better solution :
console app(original) :
string saveto = MdlMain.FileName;
object file = (object)serviceSoapClient.GetFile( saveto);
FileStream fileStream = new FileStream(GlobalVariable.FileDirectory + str2, FileMode.Create);
fileStream.Write((byte[])file, 0, Conversions.ToInteger(NewLateBinding.LateGet(file, (Type)null, "Length", new object[0], (string[])null, (Type[])null, (bool[])null)));
fileStream.Flush();
fileStream.Close();
webservice (original) :
public byte[] GetFile( string strFilename)
{
FileStream fileStream = new FileStream(this.DJDownloadPath + strFilename, FileMode.Open, FileAccess.Read);
long length = fileStream.Length;
byte[] array = new byte[checked((int)(length + 1L - 1L) + 1)];
fileStream.Read(array, 0, checked((int)length));
fileStream.Close();
HttpContext.Current.Response.BufferOutput = false;
HttpContext.Current.Response.Buffer = false;
return array;
}
Console app (Modified):
FileStream fileStream= new FileStream(#"D:\example.zip", FileMode.Create);
int totalchunkNo = targetfilesize / 2000000;
int remainder = targetfilesize % 2000000;
if (remainder > 0 && remainder != totalchunkNo)
totalchunkNo++;
for (int i = 1; i <= totalchunkNo; i++)
{
byte[] bytetobeWritten = (byte[])serviceSoapClient.GetFileByChunk(MdlMain.FileName, i);
fileStream.Write(bytetobeWritten, 0, Conversions.ToInteger(NewLateBinding.LateGet(bytetobeWritten, (Type)null, "Length", new object[0], (string[])null, (Type[])null, (bool[])null)));
}
fileStream.Flush();
fileStream.Close();
webservice (Modified):
[WebMethod]
public byte[] GetFileByChunk(string strFilename, int requestChunkNo)
{
FileStream fileStream = new FileStream(this.DJDownloadPath + strFilename, FileMode.Open, FileAccess.Read);
long length = fileStream.Length;
byte[] array = new byte[length];
int incomingOffset = 0;
int chunkSize = 2000000;
fileStream.Read(array, 0, (int)length);
fileStream.Close();
int currentchunkNo = 0;
byte[] outboundBuffer = new byte[chunkSize];
while (incomingOffset < array.Length)
{
int lengthh = Math.Min(outboundBuffer.Length, array.Length - incomingOffset);
Buffer.BlockCopy(array, incomingOffset,
outboundBuffer, 0,
lengthh);
incomingOffset += lengthh;
currentchunkNo++;
if (currentchunkNo == requestChunkNo)
{
return outboundBuffer;
}
}
return null;
}
Related
int n = 0;
string encodeString = string.Empty;
using (FileStream fsSource = new FileStream("test.pdf", FileMode.Open, FileAccess.Read))
{
byte[] bytes = new byte[count];
n = fsSource.Read(bytes, offset, count);
encodeString = System.Convert.ToBase64String(bytes);
}
The above code is working fine if I provide offset-0 and length-1024, but the second time if I provide Offset-1024 and length-1024 it is returning an error.
My requirement is I want to get byte array data from offset to length.
1st chunk = 0-1024
2nd chunk = 1024-2048
..
Last chunk = SomeValue -Filesize.
Example in Node.js using readChunk.sync(file_path, Number(offset), Number(size)); - this code is able to get the byte array of data from offset to length.
public static string ReadFileStreamInChunks()
{
const int readChunkBufferLength = 1024;
string filePath = "test.pdf";
string encodeString = string.Empty;
var readChunk = new char[readChunkBufferLength];
int readChunkLength;
using (StringWriter sw = new StringWriter())
using (FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read))
using (StreamReader sr = new StreamReader(fs))
{
do
{
readChunkLength = sr.ReadBlock(readChunk, 0, readChunkBufferLength);
sw.Write(readChunk, 0, readChunkLength);
} while (readChunkLength > 0);
return sw.ToString();
}
}
actually i think your problem is understanding the concepts of these parameters in your code , Count is your Chunk Size and offset is where to start Reading so if you want to read (1): a part of File to end Just Add To Your Offset (Offset + count of Bytes you Want To Seek) but (2): if You Want to Read A Part Of File From Middle You Shouldn't Modify Count That Is Your Chunk Size You Should modify Where You Write Your Byte Array Usually It's a Do-While Loop like :
long position = 0;
do
{
// read bytes from input stream
int bytesRead = request.FileByteStream.Read(buffer, 0, chunkSize);
if (bytesRead == 0)
{
break;
}
// write bytes to output stream
writeStream.Write(buffer, 0, bytesRead);
position += bytesRead;
if(position == "the value you want")
break;
} while (true);
I want to read from a "start" to a "stop" from a raw image file that I've created with FKT Imager.
I have a code that works, but I dont know if it's the best way of doing it?
// Read file, byte at the time (example 00, 5A)
int start = 512;
int stop = 3345332;
FileStream fs = new FileStream("file.001", FileMode.Open, FileAccess.Read);
int hexIn;
String hex;
String data = "";
fs.Position = start;
for (int i = 0; i < stop; i++) { // i = offset in bytes
hexIn = fs.ReadByte();
hex = string.Format("{0:X2}", hexIn);
data = data + hex;
} //for
fs.Close();
Console.Writeline("data=" + data);
You want to read a range of bytes from within a file. Why not reading all bytes in one go into an array and then do the transformation?
private string ReadFile(string filename, int offset, int length)
{
byte[] data = new byte[length];
using (FileStream fs = new FileStream(filename, FileMode.Open))
{
fs.Position = offset;
fs.Read(data, 0, length);
}
return string.Join("", data.Select(x => x.ToString("X2")));
}
Because the maximum value of a byte array is 2GB, lets say i have a larger file and i need to convert it to a byte array. Since i can't hold the whole file, how should i convert it into two?
I tried:
long length = new System.IO.FileInfo(#"c:\a.mp4").Length;
int chunkSize = Convert.ToInt32(length / 2);
byte[] part2;
FileStream fileStream = new FileStream(filepath, FileMode.Open, FileAccess.Read);
try
{
part2 = new byte[chunkSize]; // create buffer
fileStream.Read(part2, 0, chunkSize);
}
finally
{
fileStream.Close();
}
byte[] part3;
fileStream = new FileStream(filepath, FileMode.Open, FileAccess.Read);
try
{
part3 = new byte[chunkSize]; // create buffer
fileStream.Read(part3, 5, (int)(length - (long)chunkSize));
}
finally
{
fileStream.Close();
}
but it's not working.
Any ideas?
You can use a StreamReader to read in file too large to read into a byte array
const int max = 1024*1024;
public void ReadALargeFile(string file, int start = 0)
{
FileStream fileStream = new FileStream(file, FileMode.Open,FileAccess.Read);
using (fileStream)
{
byte[] buffer = new byte[max];
fileStream.Seek(start, SeekOrigin.Begin);
int bytesRead = fileStream.Read(buffer, start, max);
while(bytesRead > 0)
{
DoSomething(buffer, bytesRead);
bytesRead = fileStream.Read(buffer, start, max);
}
}
}
If you are working with extremely large files, you should use MemoryMappedFile, which maps a physical file to a memory space:
using (var mmf = MemoryMappedFile.CreateFromFile(#"c:\path\to\big.file"))
{
using (var accessor = mmf.CreateViewAccessor())
{
byte myValue = accessor.ReadByte(someOffset);
accessor.Write((byte)someValue);
}
}
See also: MemoryMappedViewAccessor
You can also read/write chunks of the file with the different methods in MemoryMappedViewAccessor.
This was my solution:
byte[] part1;
byte[] part2;
bool odd = false;
int chunkSize = Convert.ToInt32(length/2);
if (length % 2 == 0)
{
part1 = new byte[chunkSize];
part2 = new byte[chunkSize];
}
else
{
part1 = new byte[chunkSize];
part2 = new byte[chunkSize + 1];
odd = true;
}
FileStream fileStream = new FileStream(filepath, FileMode.Open, FileAccess.Read);
using (fileStream)
{
fileStream.Seek(0, SeekOrigin.Begin);
int bytesRead = fileStream.Read(part1, 0, chunkSize);
if (odd)
{
bytesRead = fileStream.Read(part2, 0, chunkSize + 1);
}
else
{
bytesRead = fileStream.Read(part2, 0, chunkSize);
}
}
I am trying to send a FileStream of a file.
But I now want to add 40 byte Checksum to the start.
How can I do this? Ive tried creating my own stream class to concatinate two streams.. And Ive looked at stream writers.
Surely they must be an easy way. Or an alternative way. And I DONT want to load the entire file into a byte array, appead to that and write that back to a stream.
public Stream getFile(String basePath, String path) {
return new FileStream(basePath + path, FileMode.Open, FileAccess.Read);
}
See MergeStream.cs. Here's how you can use it:
var mergeStream = new MergeStream(new MemoryStream(checksum), File.OpenRead(path));
return mergeStream;
byte[] checksum = new byte[40];
//...
FileStream oldFileStream = new FileStream(oldFile, FileMode.Open, FileAccess.Read);
FileStream newFileStream = new FileStream(newFile, FileMode.Create, FileAccess.Write);
using(oldFileStream)
using(newFileStream)
{
newFileStream.Write(checksum, 0, checksum.Length);
oldFileStream.CopyTo(newFileStream);
}
File.Copy(newFile, oldFile, overwrite : true);
If you don't want to use a temporary file, the only solution is to open the file in ReadWrite mode and use two alternating buffers:
private static void Swap<T>(ref T obj1, ref T obj2)
{
T tmp = obj1;
obj1 = obj2;
obj2 = tmp;
}
public static void PrependToFile(string filename, byte[] bytes)
{
FileStream stream = new FileStream(filename, FileMode.Open, FileAccess.ReadWrite);
PrependToStream(stream, bytes);
}
public static void PrependToStream(Stream stream, byte[] bytes)
{
const int MAX_BUFFER_SIZE = 4096;
using(stream)
{
int bufferSize = Math.Max(MAX_BUFFER_SIZE, bytes.Length);
byte[] buffer1 = new byte[bufferSize];
byte[] buffer2 = new byte[bufferSize];
int readCount1;
int readCount2;
long totalLength = stream.Length + bytes.Length;
readCount1 = stream.Read(buffer1, 0, bytes.Length);
stream.Position = 0;
stream.Write(bytes, 0, bytes.Length);
int written = bytes.Length;
while (written < totalLength)
{
readCount2 = stream.Read(buffer2, 0, buffer2.Length);
stream.Position -= readCount2;
stream.Write(buffer1, 0, readCount1);
written += readCount1;
Swap(ref buffer1, ref buffer2);
Swap(ref readCount1, ref readCount2);
}
}
}
I am trying to develop an app that will upload large files to a web server running PHP. Almost immediately, I stumbled upon a problem that the file is not split correctly.
Currently I have this piece of code
string adrese = "c:\\directory\\file.jpg";
int garums = 16384;
String ext = Path.GetExtension(adrese);
FileStream file = /*/File.Open(adrese, FileMode.Open);/*/
new FileStream(adrese, FileMode.Open, System.IO.FileAccess.Read);
long fgar = file.Length; //100%
long counter = garums;
first = true;
byte[] chunk = new byte[garums];
while (true)
{
int index = 0;
//long Controll = counter+garums;
while (index < chunk.Length)
{
int bytesRead = file.Read(chunk, index, chunk.Length - index);
if (bytesRead == 0)
{
/*byte[] biti = new byte[index];
for (int i = 0; i < index; i++)
{
biti[i] = chunk[i];
}
chunk = new byte[index];
chunk = biti;*/
break;
}
index += bytesRead;
}
if (index != 0) // Our previous chunk may have been the last one
{
byte[] biti = new byte[index];
for (int i = 0; i < index; i++)
{
biti[i] = chunk[i];
}
chunk = new byte[index];
chunk = biti;
// index is the number of bytes in the chunk
sutam(Convert.ToBase64String(chunk),ext);
}
double procentuali = ((counter * 100) / fgar);
if (procentuali > 99)
{
procentuali = 100;
}
progressBar1.Value = (int)Math.Round(procentuali);
label1.Text = "" + procentuali;
counter = counter+garums;
if (index != garums) // We didn't read a full chunk: we're done
{
return;
}
}
file.Close();
Everything works if I set garums to 1, but who will wait for a year or so to upload a file sized multiple GB's.
I would be pleased if you could tell me what is wrong and how to fix this.
Try this instead to upload in chunks:
private void ConvertToChunks()
{
//Open file
string file = MapPath("~/temp/1.xps");
FileStream fileStream = new FileStream(file, FileMode.Open, FileAccess.Read);
//Chunk size that will be sent to Server
int chunkSize = 1024;
// Unique file name
string fileName = Guid.NewGuid() + Path.GetExtension(file);
int totalChunks = (int)Math.Ceiling((double)fileStream.Length / chunkSize);
// Loop through the whole stream and send it chunk by chunk;
for (int i = 0; i < totalChunks; i++)
{
int startIndex = i * chunkSize;
int endIndex = (int)(startIndex + chunkSize > fileStream.Length ? fileStream.Length : startIndex + chunkSize);
int length = endIndex - startIndex;
byte[] bytes = new byte[length];
fileStream.Read(bytes, 0, bytes.Length);
ChunkRequest(fileName, bytes);
}
}
private void ChunkRequest(string fileName,byte[] buffer)
{
//Request url, Method=post Length and data.
string requestURL = "http://localhost:63654/hello.ashx";
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(requestURL);
request.Method = "POST";
request.ContentType = "application/x-www-form-urlencoded";
// Chunk(buffer) is converted to Base64 string that will be convert to Bytes on the handler.
string requestParameters = #"fileName=" + fileName + "&data=" + HttpUtility.UrlEncode( Convert.ToBase64String(buffer) );
// finally whole request will be converted to bytes that will be transferred to HttpHandler
byte[] byteData = Encoding.UTF8.GetBytes(requestParameters);
request.ContentLength = byteData.Length;
Stream writer = request.GetRequestStream();
writer.Write(byteData, 0, byteData.Length);
writer.Close();
// here we will receive the response from HttpHandler
StreamReader stIn = new StreamReader(request.GetResponse().GetResponseStream());
string strResponse = stIn.ReadToEnd();
stIn.Close();
}