I have an issue when trying too upload a large file to a sub sharepoint folder.
The issue is related to the variable libraryName. I am not sure have i can change this so i can use an url instead.
Example:
var site = ""https://sharepoint.com/sites/Test_Site1/"
var relative = "Documents/Folder1/folder2/
https://learn.microsoft.com/en-us/sharepoint/dev/solution-guidance/upload-large-files-sample-app-for-sharepoint
public Microsoft.SharePoint.Client.File UploadFileSlicePerSlice(ClientContext ctx, string libraryName, string fileName, int fileChunkSizeInMB = 3)
{
// Each sliced upload requires a unique ID.
Guid uploadId = Guid.NewGuid();
// Get the name of the file.
string uniqueFileName = Path.GetFileName(fileName);
// Ensure that target library exists, and create it if it is missing.
if (!LibraryExists(ctx, ctx.Web, libraryName))
{
CreateLibrary(ctx, ctx.Web, libraryName);
}
// Get the folder to upload into.
List docs = ctx.Web.Lists.GetByTitle(libraryName);
ctx.Load(docs, l => l.RootFolder);
// Get the information about the folder that will hold the file.
ctx.Load(docs.RootFolder, f => f.ServerRelativeUrl);
ctx.ExecuteQuery();
// File object.
Microsoft.SharePoint.Client.File uploadFile = null;
// Calculate block size in bytes.
int blockSize = fileChunkSizeInMB * 1024 * 1024;
// Get the information about the folder that will hold the file.
ctx.Load(docs.RootFolder, f => f.ServerRelativeUrl);
ctx.ExecuteQuery();
// Get the size of the file.
long fileSize = new FileInfo(fileName).Length;
if (fileSize <= blockSize)
{
// Use regular approach.
using (FileStream fs = new FileStream(fileName, FileMode.Open))
{
FileCreationInformation fileInfo = new FileCreationInformation();
fileInfo.ContentStream = fs;
fileInfo.Url = uniqueFileName;
fileInfo.Overwrite = true;
uploadFile = docs.RootFolder.Files.Add(fileInfo);
ctx.Load(uploadFile);
ctx.ExecuteQuery();
// Return the file object for the uploaded file.
return uploadFile;
}
}
else
{
// Use large file upload approach.
ClientResult<long> bytesUploaded = null;
FileStream fs = null;
try
{
fs = System.IO.File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
using (BinaryReader br = new BinaryReader(fs))
{
byte[] buffer = new byte[blockSize];
Byte[] lastBuffer = null;
long fileoffset = 0;
long totalBytesRead = 0;
int bytesRead;
bool first = true;
bool last = false;
// Read data from file system in blocks.
while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
{
totalBytesRead = totalBytesRead + bytesRead;
// You've reached the end of the file.
if (totalBytesRead == fileSize)
{
last = true;
// Copy to a new buffer that has the correct size.
lastBuffer = new byte[bytesRead];
Array.Copy(buffer, 0, lastBuffer, 0, bytesRead);
}
if (first)
{
using (MemoryStream contentStream = new MemoryStream())
{
// Add an empty file.
FileCreationInformation fileInfo = new FileCreationInformation();
fileInfo.ContentStream = contentStream;
fileInfo.Url = uniqueFileName;
fileInfo.Overwrite = true;
uploadFile = docs.RootFolder.Files.Add(fileInfo);
// Start upload by uploading the first slice.
using (MemoryStream s = new MemoryStream(buffer))
{
// Call the start upload method on the first slice.
bytesUploaded = uploadFile.StartUpload(uploadId, s);
ctx.ExecuteQuery();
// fileoffset is the pointer where the next slice will be added.
fileoffset = bytesUploaded.Value;
}
// You can only start the upload once.
first = false;
}
}
else
{
if (last)
{
// Is this the last slice of data?
using (MemoryStream s = new MemoryStream(lastBuffer))
{
// End sliced upload by calling FinishUpload.
uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// Return the file object for the uploaded file.
return uploadFile;
}
}
else
{
using (MemoryStream s = new MemoryStream(buffer))
{
// Continue sliced upload.
bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// Update fileoffset for the next slice.
fileoffset = bytesUploaded.Value;
}
}
}
} // while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
}
}
finally
{
if (fs != null)
{
fs.Dispose();
}
}
}
return null;
}
This is the first page where i run the method
using Microsoft.SharePoint.Client;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security;
using System.Text;
using System.Threading.Tasks;
namespace Contoso.Core.LargeFileUpload
{
class Program
{
static void Main(string[] args)
{
// Request Office365 site from the user
string siteUrl = #"https://bundegruppen.sharepoint.com/sites/F24-2905/";
/* Prompt for Credentials */
//Console.WriteLine("Filer blir overført til site: {0}", siteUrl);
string userName = "xx.xx#bxxbygg.no";
SecureString pwd = new SecureString();
string password = "xxx";
foreach (char c in password.ToCharArray()) pwd.AppendChar(c);
/* End Program if no Credentials */
if (string.IsNullOrEmpty(userName) || (pwd == null))
return;
ClientContext ctx = new ClientContext(siteUrl);
ctx.AuthenticationMode = ClientAuthenticationMode.Default;
ctx.Credentials = new SharePointOnlineCredentials(userName, pwd);
// These should both work as expected.
try
{
// Alternative 3 for uploading large files: slice per slice which allows you to stop and resume a download
new FileUploadService().UploadFileSlicePerSliceToFolder(ctx, "Dokumenter/General", #"C:\Temp\F24_Sammenstillingsmodell.smc");
}
catch (Exception ex)
{
Console.WriteLine(string.Format("Exception while uploading files to the target site: {0}.", ex.ToString()));
Console.WriteLine("Press enter to continue.");
Console.Read();
}
// Just to see what we have in console
Console.ForegroundColor = ConsoleColor.White;
}
}
}
The code you have is written just to upload the specified file to the RootFolder of the named Library. If you pass in a full path to a folder instead of just a Library Name, it will fail.
The following is a modded version of the function that should allow you to pass a full serverRelativeUrl to the desired folder:
public Microsoft.SharePoint.Client.File UploadFileSlicePerSliceToFolder(ClientContext ctx, string serverRelativeFolderUrl, string fileName, int fileChunkSizeInMB = 3)
{
// Each sliced upload requires a unique ID.
Guid uploadId = Guid.NewGuid();
// Get the name of the file.
string uniqueFileName = Path.GetFileName(fileName);
// Get the folder to upload into.
Folder uploadFolder = ctx.web.GetFolderByServerRelativeUrl(serverRelativeFolderUrl);
// Get the information about the folder that will hold the file.
ctx.Load(uploadFolder);
ctx.ExecuteQuery();
// File object.
Microsoft.SharePoint.Client.File uploadFile = null;
// Calculate block size in bytes.
int blockSize = fileChunkSizeInMB * 1024 * 1024;
// Get the information about the folder that will hold the file.
ctx.Load(uploadFolder, f => f.ServerRelativeUrl);
ctx.ExecuteQuery();
// Get the size of the file.
long fileSize = new FileInfo(fileName).Length;
if (fileSize <= blockSize)
{
// Use regular approach.
using (FileStream fs = new FileStream(fileName, FileMode.Open))
{
FileCreationInformation fileInfo = new FileCreationInformation();
fileInfo.ContentStream = fs;
fileInfo.Url = uniqueFileName;
fileInfo.Overwrite = true;
uploadFile = uploadFolder.Files.Add(fileInfo);
ctx.Load(uploadFile);
ctx.ExecuteQuery();
// Return the file object for the uploaded file.
return uploadFile;
}
}
else
{
// Use large file upload approach.
ClientResult<long> bytesUploaded = null;
FileStream fs = null;
try
{
fs = System.IO.File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
using (BinaryReader br = new BinaryReader(fs))
{
byte[] buffer = new byte[blockSize];
Byte[] lastBuffer = null;
long fileoffset = 0;
long totalBytesRead = 0;
int bytesRead;
bool first = true;
bool last = false;
// Read data from file system in blocks.
while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
{
totalBytesRead = totalBytesRead + bytesRead;
// You've reached the end of the file.
if (totalBytesRead == fileSize)
{
last = true;
// Copy to a new buffer that has the correct size.
lastBuffer = new byte[bytesRead];
Array.Copy(buffer, 0, lastBuffer, 0, bytesRead);
}
if (first)
{
using (MemoryStream contentStream = new MemoryStream())
{
// Add an empty file.
FileCreationInformation fileInfo = new FileCreationInformation();
fileInfo.ContentStream = contentStream;
fileInfo.Url = uniqueFileName;
fileInfo.Overwrite = true;
uploadFile = uploadFolder.Files.Add(fileInfo);
// Start upload by uploading the first slice.
using (MemoryStream s = new MemoryStream(buffer))
{
// Call the start upload method on the first slice.
bytesUploaded = uploadFile.StartUpload(uploadId, s);
ctx.ExecuteQuery();
// fileoffset is the pointer where the next slice will be added.
fileoffset = bytesUploaded.Value;
}
// You can only start the upload once.
first = false;
}
}
else
{
if (last)
{
// Is this the last slice of data?
using (MemoryStream s = new MemoryStream(lastBuffer))
{
// End sliced upload by calling FinishUpload.
uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// Return the file object for the uploaded file.
return uploadFile;
}
}
else
{
using (MemoryStream s = new MemoryStream(buffer))
{
// Continue sliced upload.
bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// Update fileoffset for the next slice.
fileoffset = bytesUploaded.Value;
}
}
}
} // while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
}
}
finally
{
if (fs != null)
{
fs.Dispose();
}
}
}
return null;
}
Related
I have to prepare zip file and secure it with password.
I'm using C# and SharpZipLib library.
I have to return the created file using WebApi. The code below does not work - I can open file without providing password. What do I do wrong?
[HttpGet]
[Route("zip")]
public async Task ZipFile()
{
Response.StatusCode = 200;
Response.Headers.Add("ContentDisposition", $"attachment; filename=\"{Path.GetFileName("z.zip")}\"");
Response.Headers.Add("ContentType", "application/octet-stream");
compressDirectory("C:\\temp\\files");
}
private void compressDirectory(string DirectoryPath, int CompressionLevel = 9)
{
string[] filenames = Directory.GetFiles(DirectoryPath);
using (ZipOutputStream OutputStream = new ZipOutputStream(Response.Body))
{
OutputStream.SetLevel(CompressionLevel);
byte[] buffer = new byte[4096];
for (int i=0; i<filenames.Length; i++)
{
ZipEntry entry = new ZipEntry($"{i}\\" +Path.GetFileName(filenames[i]));
entry.DateTime = DateTime.Now;
OutputStream.PutNextEntry(entry);
using (FileStream fs = System.IO.File.OpenRead(filenames[i]))
{
int sourceBytes;
do
{
sourceBytes = fs.Read(buffer, 0, buffer.Length);
OutputStream.Write(buffer, 0, sourceBytes);
} while (sourceBytes > 0);
}
}
OutputStream.Password = "123";
OutputStream.Finish();
OutputStream.Close();
}
}
I'm trying to use a FileStream with a relative path but it is not working.
var pic = ReadFile("~/Images/money.png");
It is working when I use something like:
var p = GetFilePath();
var pic = ReadFile(p);
the rest of the code(from SO):
public static byte[] ReadFile(string filePath)
{
byte[] buffer;
FileStream fileStream = new FileStream(filePath, FileMode.Open, FileAccess.Read);
try
{
int length = (int)fileStream.Length; // get file length
buffer = new byte[length]; // create buffer
int count; // actual number of bytes read
int sum = 0; // total number of bytes read
// read until Read method returns 0 (end of the stream has been reached)
while ((count = fileStream.Read(buffer, sum, length - sum)) > 0)
sum += count; // sum is a buffer offset for next reading
}
finally
{
fileStream.Close();
}
return buffer;
}
public string GetFilePath()
{
return HttpContext.Current.Server.MapPath("~/Images/money.png");
}
I don't get why it is not working because the FileStream constructor allow using relative path.
I'm assuming the folder in your program has the subfolder images, which contains your image file.
\folder\program.exe
\folder\Images\money.jpg
Try without the "~".
I also had the same issue but I solved it by using this code,
Try one of this code, hope it will solve your issue too.
#region GetImageStream
public static Stream GetImageStream(string Image64string)
{
Stream imageStream = new MemoryStream();
if (!string.IsNullOrEmpty(Image64string))
{
byte[] imageBytes = Convert.FromBase64String(Image64string.Substring(Image64string.IndexOf(',') + 1));
using (Image targetimage = BWS.AWS.S3.ResizeImage(System.Drawing.Image.FromStream(new MemoryStream(imageBytes, false)), new Size(1600, 1600), true))
{
targetimage.Save(imageStream, ImageFormat.Jpeg);
}
}
return imageStream;
}
#endregion
2nd one
#region GetImageStream
public static Stream GetImageStream(Stream stream)
{
Stream imageStream = new MemoryStream();
if (stream != null)
{
using (Image targetimage = BWS.AWS.S3.ResizeImage(System.Drawing.Image.FromStream(stream), new Size(1600, 1600), true))
{
targetimage.Save(imageStream, ImageFormat.Jpeg);
}
}
return imageStream;
}
#endregion
I handle with big files(of which capacities is minimum 500MB) to split and merge by c#.
I have to split the file into thousands of files, sort these files into some groups, and merge these by each group.
The minimum number of files are 10,000.
I implement the merge function by using the method Stream.CopyTo(). Here is the main part of that.
using (Stream writer = File.OpenWrite(outputFilePath))
{
int fileNum = filePaths.Count();
for (int i = 0; i < fileNum; i++)
{
using (Stream reader = File.OpenRead(filePaths.ElementAt(i)))
{ reader.CopyTo(writer); }
}
}
I've tested my program to split 500MB into 17000 files of 2 groups and to merge each group of 8500 files into one file.
The merging part takes about 80 seconds. I think it is pretty slow compared to splitting the same file which takes about 15~20 seconds
Is there any method which is faster than my code?
Your code looks fine but ElementAt is a code smell. Convert that to an array and use [i] instead. If you have 10K elements I'm positive you're wasting a lot of time.
Why not just use the Stream.CopyTo() method?
private static void CombineMultipleFilesIntoSingleFile(string inputDirectoryPath, string inputFileNamePattern, string outputFilePath)
{
string[] inputFilePaths = Directory.GetFiles(inputDirectoryPath, inputFileNamePattern);
Console.WriteLine("Number of files: {0}.", inputFilePaths.Length);
using (var outputStream = File.Create(outputFilePath))
{
foreach (var inputFilePath in inputFilePaths)
{
using (var inputStream = File.OpenRead(inputFilePath))
{
// Buffer size can be passed as the second argument.
inputStream.CopyTo(outputStream);
}
Console.WriteLine("The file {0} has been processed.", inputFilePath);
}
}
}
OR
Do it in chunks:
const int chunkSize = 2 * 1024; // 2KB
var inputFiles = new[] ;
using (var output = File.Create("output.dat"))
{
foreach (var file in inputFiles)
{
using (var input = File.OpenRead(file))
{
var buffer = new byte[chunkSize];
int bytesRead;
while ((bytesRead = input.Read(buffer, 0, buffer.Length)) > 0)
{
output.Write(buffer, 0, bytesRead);
}
}
}
}
Maybe try compressing the files?
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.IO.Compression;
class Program {
static void SaveCompressedFile(string filename, string data) {
FileStream fileStream = new FileStream(filename, FileMode.Create, FileAccess.Write);
GZipStream compressionStream = new GZipStream(fileStream, CompressionMode.Compress);
StreamWriter writer = new StreamWriter(compressionStream);
writer.Write(data);
writer.Close();
}
static string LoadCompressedFile(string filename) {
FileStream fileStream = new FileStream(filename, FileMode.Open, FileAccess.Read);
GZipStream compressionStream = new GZipStream(fileStream, CompressionMode.Decompress);
StreamReader reader = new StreamReader(compressionStream);
string data = reader.ReadToEnd();
reader.Close();
return data;
}
static void Main(string[] args) {
try {
string filename = "compressedFile.txt";
string sourceString = "Source String";
SaveCompressedFile(filename, sourceString);
FileInfo compressedFileData = new FileInfo(filename);
string recoveredString = LoadCompressedFile(filename);
} catch (IOException ex) {
Console.WriteLine(ex.ToString());
}
}
}
Source
Also check out the example of compressing a directory.
using System;
using System.Text;
using System.IO;
using System.IO.Compression;
namespace CmprDir
{
class Program
{
delegate void ProgressDelegate(string sMessage);
static void CompressFile(string sDir, string sRelativePath, GZipStream zipStream)
{
//Compress file name
char[] chars = sRelativePath.ToCharArray();
zipStream.Write(BitConverter.GetBytes(chars.Length), 0, sizeof(int));
foreach (char c in chars)
zipStream.Write(BitConverter.GetBytes(c), 0, sizeof(char));
//Compress file content
byte[] bytes = File.ReadAllBytes(Path.Combine(sDir, sRelativePath));
zipStream.Write(BitConverter.GetBytes(bytes.Length), 0, sizeof(int));
zipStream.Write(bytes, 0, bytes.Length);
}
static bool DecompressFile(string sDir, GZipStream zipStream, ProgressDelegate progress)
{
//Decompress file name
byte[] bytes = new byte[sizeof(int)];
int Readed = zipStream.Read(bytes, 0, sizeof(int));
if (Readed < sizeof(int))
return false;
int iNameLen = BitConverter.ToInt32(bytes, 0);
bytes = new byte[sizeof(char)];
StringBuilder sb = new StringBuilder();
for (int i = 0; i < iNameLen; i++)
{
zipStream.Read(bytes, 0, sizeof(char));
char c = BitConverter.ToChar(bytes, 0);
sb.Append(c);
}
string sFileName = sb.ToString();
if (progress != null)
progress(sFileName);
//Decompress file content
bytes = new byte[sizeof(int)];
zipStream.Read(bytes, 0, sizeof(int));
int iFileLen = BitConverter.ToInt32(bytes, 0);
bytes = new byte[iFileLen];
zipStream.Read(bytes, 0, bytes.Length);
string sFilePath = Path.Combine(sDir, sFileName);
string sFinalDir = Path.GetDirectoryName(sFilePath);
if (!Directory.Exists(sFinalDir))
Directory.CreateDirectory(sFinalDir);
using (FileStream outFile = new FileStream(sFilePath, FileMode.Create, FileAccess.Write, FileShare.None))
outFile.Write(bytes, 0, iFileLen);
return true;
}
static void CompressDirectory(string sInDir, string sOutFile, ProgressDelegate progress)
{
string[] sFiles = Directory.GetFiles(sInDir, "*.*", SearchOption.AllDirectories);
int iDirLen = sInDir[sInDir.Length - 1] == Path.DirectorySeparatorChar ? sInDir.Length : sInDir.Length + 1;
using (FileStream outFile = new FileStream(sOutFile, FileMode.Create, FileAccess.Write, FileShare.None))
using (GZipStream str = new GZipStream(outFile, CompressionMode.Compress))
foreach (string sFilePath in sFiles)
{
string sRelativePath = sFilePath.Substring(iDirLen);
if (progress != null)
progress(sRelativePath);
CompressFile(sInDir, sRelativePath, str);
}
}
static void DecompressToDirectory(string sCompressedFile, string sDir, ProgressDelegate progress)
{
using (FileStream inFile = new FileStream(sCompressedFile, FileMode.Open, FileAccess.Read, FileShare.None))
using (GZipStream zipStream = new GZipStream(inFile, CompressionMode.Decompress, true))
while (DecompressFile(sDir, zipStream, progress));
}
public static int Main(string[] argv)
{
if (argv.Length != 2)
{
Console.WriteLine("Usage: CmprDir.exe <in_dir compressed_file> | <compressed_file out_dir>");
return 1;
}
string sDir;
string sCompressedFile;
bool bCompress = false;
try
{
if (Directory.Exists(argv[0]))
{
sDir = argv[0];
sCompressedFile = argv[1];
bCompress = true;
}
else
if (File.Exists(argv[0]))
{
sCompressedFile = argv[0];
sDir = argv[1];
bCompress = false;
}
else
{
Console.Error.WriteLine("Wrong arguments");
return 1;
}
if (bCompress)
CompressDirectory(sDir, sCompressedFile, (fileName) => { Console.WriteLine("Compressing {0}...", fileName); });
else
DecompressToDirectory(sCompressedFile, sDir, (fileName) => { Console.WriteLine("Decompressing {0}...", fileName); });
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine(ex.Message);
return 1;
}
}
}
}
Source
I've lot of tried to write file from collection of bytes. but file always get corrupted. not sure why its happening. If somebody knows about it would be helpful me more.
Note: Its always working good when I uncomment under while loop this line //AppendAllBytes(pathSource, bytes);
but I need bytes from object. later on I will use this concept on p2p.
namespace Sender
{
static class Program
{
static void Main(string[] args)
{
string pathSource = "../../Ok&SkipButtonForWelcomeToJakayaWindow.jpg";
using (FileStream fsSource = new FileStream(pathSource,
FileMode.Open, FileAccess.Read))
{
// Read the source file into a byte array.
const int numBytesToRead = 100000; // Your amount to read at a time
byte[] bytes = new byte[numBytesToRead];
int numBytesRead = 0;
if (File.Exists(pathSource))
{
Console.WriteLine("File of this name already exist, you want to continue?");
System.IO.FileInfo obj = new System.IO.FileInfo(pathSource);
pathSource = "../../Files/" + Guid.NewGuid() + obj.Extension;
}
int i = 0;
byte[] objBytes = new byte[numBytesRead];
List<FileInfo> objFileInfo = new List<FileInfo>();
Guid fileID = Guid.NewGuid();
FileInfo fileInfo = null;
while (numBytesToRead > 0)
{
// Read may return anything from 0 to numBytesToRead.
int n = fsSource.Read(bytes, numBytesRead, numBytesToRead);
i++;
//AppendAllBytes(pathSource, bytes);
fileInfo = new FileInfo { FileID = fileID, FileBytes = bytes, FileByteID = i };
objFileInfo.Add(fileInfo);
// Break when the end of the file is reached.
if (n == 0)
{
break;
}
// Do here what you want to do with the bytes read (convert to string using Encoding.YourEncoding.GetString())
}
//foreach (var b in objFileInfo.OrderBy(m => m.FileByteID))
//{
// AppendAllBytes(pathSource, b.FileBytes);
//}
foreach (var item in objFileInfo)
{
AppendAllBytes(pathSource, item.FileBytes);
}
fileInfo = null;
}
}
static void AppendAllBytes(string path, byte[] bytes)
{
using (var stream = new FileStream(path, FileMode.Append))
{
stream.Write(bytes, 0, bytes.Length);
}
}
}
class FileInfo
{
public Guid FileID { get; set; }
public int FileByteID { get; set; }
public byte[] FileBytes { get; set; }
}
}
You don't increase numBytesRead and don't decrease numBytesToRead.
objFileInfo contains a List of FileInfo which contains a reference type byte[].
You copy the reference to the bytes when you create a new FileInfo and then repeatedly overwrite those bytes until you reach the end of the file.
byte[] bytes = new byte[numBytesToRead];
//...
List<FileInfo> objFileInfo = new List<FileInfo>();
//...
//...
while (numBytesToRead > 0)
{
int n = fsSource.Read(bytes, numBytesRead, numBytesToRead);
//First time here bytes[0] == the first byte of the file
//Second time here bytes[0] == 10000th byte of file
//...
//The following line should copy the bytes into file info instead of the reference to the existing byte array
fileInfo = new FileInfo { ..., FileBytes = bytes, ... };
objFileInfo.Add(fileInfo);
//First time here objFileInfo[0].FileBytes[0] == first byte of file
//Second time here objFileInfo[0].FileBytes[0] == 10000th byte of file because objFileInfo[All].FileBytes == bytes
//...
}
You can test this by looking in the FileBytes variable for multiple FileInfo. I'd bet the contents look similar
There is two problem in your code :
The block of data is all of size 100000, which cannot work most of time unless the file size is exactly a multiple of it. So, the last block of data will contains 0s.
FileInfo.FileBytes will change, if you change the buffer to something new causing the every single block of data being the identical to the last block read.
using System;
using System.Collections.Generic;
using System.IO;
static class Program
{
static void Main(string[] args)
{
string pathSource = "test.jpg";
using (FileStream fsSource = new FileStream(pathSource, FileMode.Open, FileAccess.Read))
{
// Read the source file into a byte array.
const int BufferSize = 100000; // Your amount to read at a time
byte[] buffer = new byte[BufferSize];
if (File.Exists(pathSource))
{
Console.WriteLine("File of this name already exist, you want to continue?");
System.IO.FileInfo obj = new System.IO.FileInfo(pathSource);
pathSource = "Files/" + Guid.NewGuid() + obj.Extension;
}
int i = 0, offset = 0, bytesRead;
List<FileInfo> objFileInfo = new List<FileInfo>();
Guid fileID = Guid.NewGuid();
while (0 != (bytesRead = fsSource.Read(buffer, offset, BufferSize)))
{
var data = new byte[bytesRead];
Array.Copy(buffer, data, bytesRead);
objFileInfo.Add(new FileInfo { FileID = fileID, FileBytes = data, FileByteID = ++i });
}
foreach (var item in objFileInfo)
{
AppendAllBytes(pathSource, item.FileBytes);
}
}
}
static void AppendAllBytes(string path, byte[] bytes)
{
using (var stream = new FileStream(path, FileMode.Append))
{
stream.Write(bytes, 0, bytes.Length);
}
}
}
class FileInfo
{
public Guid FileID { get; set; }
public int FileByteID { get; set; }
public byte[] FileBytes { get; set; }
}
How to Zip a folder using ICSharplib.
Is there any way I can add a encrypt password while zipping it ?
There is no option that I can use any other dll. Have to use only ICSharplib.
Currently I am using this code block
private static void CompressFiles(string folderPath) {
string zipOutput = #"C:\temp\myoutput.zip";
try {
using (ZipOutputStream zs = new ZipOutputStream(File.Create(zipOutput))) {
zs.SetLevel(9); // 0-9 (9 being best compression)
foreach (string file in Directory.GetFiles(folderPath)) {
ZipEntry entry = new ZipEntry(Path.GetFileName(file));
entry.DateTime = DateTime.Now;
using (FileStream fs = File.OpenRead(file)) {
byte[] buffer = new byte[fs.Length];
fs.Read(buffer, 0, buffer.Length);
entry.Size = buffer.Length; // This is very important
zs.PutNextEntry(entry);
zs.Write(buffer, 0, buffer.Length);
}
}
zs.Finish();
zs.Close();
}
}
catch { throw; }
}
It can zip all the files in the folder.
But What I want is to zip the whole folder.
Like the folders in side that folder also be included in the zip file .
Thanks in advance
Use the FastZip object.
ICSharpCode.SharpZipLib.Zip.FastZip z = new ICSharpCode.SharpZipLib.Zip.FastZip();
z.CreateEmptyDirectories = true;
z.CreateZip("F:\\ZipTest.zip", "F:\\ZipTest\\", true, "");
if (File.Exists("F:\\ZipTest.zip"))
Console.WriteLine("Done");
else
Console.WriteLine("Failed");
I use following code:
public static bool ZipIt(string sourcePath, string destinationPath)
{
List<string> ListOfFiles = GetListOfFiles(sourcePath);
try
{
string OutPath = destinationPath + ".zip";
int TrimLength = (Directory.GetParent(sourcePath)).ToString().Length;
TrimLength += 1;
//remove '\'
FileStream ostream;
byte[] obuffer;
ZipOutputStream oZipStream = new ZipOutputStream(System.IO.File.Create(OutPath));
oZipStream.Password = EncodePassword("Password");
oZipStream.SetLevel(9);
// 9 = maximum compression level
ZipEntry oZipEntry;
foreach (string Fil in ListOfFiles.ToArray()) // for each file, generate a zipentry
{
oZipEntry = new ZipEntry(Fil.Remove(0, TrimLength));
oZipStream.PutNextEntry(oZipEntry);
if (!Fil.EndsWith(#"/")) // if a file ends with '/' its a directory
{
ostream = File.OpenRead(Fil);
obuffer = new byte[ostream.Length];
ostream.Read(obuffer, 0, obuffer.Length);
oZipStream.Write(obuffer, 0, obuffer.Length);
ostream.Close();
}
}
oZipStream.Finish();
oZipStream.Close();
return true;
}
catch (Exception ex)
{
return false;
}
}
public static string EncodePassword(string originalPassword)
{
Byte[] encodedBytes;
encodedBytes = ASCIIEncoding.Default.GetBytes(originalPassword);
return BitConverter.ToString(encodedBytes);
}