The code previously implemented takes in the xls file saves it on to a column in a table using the stream i use the same method but the only change is the the file saved is a xlsm or an xlsx type file it saves to the column in the database
When I try and get the contents from the database and throw the saved xlsm file or xlsx file I get an error "Excel file found unreadable content do you want to recover the contents of this work book ?"
Here's the code to save the xlsm or the xlsx file
System.IO.Stream filestream = System.IO.File.Open(file, System.IO.FileMode.Open);
int fileLength = (int)filestream.Length;
byte[] input = new byte[fileLength];
filestream.Read(input, 0, fileLength);
string Sql = "insert into upload values(#contents)";
con.Open();
System.Data.SqlClient.SqlCommand c = new System.Data.SqlClient.SqlCommand(Sql, con);
c.Parameters.Add("#contents", System.Data.SqlDbType.Binary);
c.Parameters["#contents"].Value = input;
c.ExecuteNonQuery();
To retrieve and send to user
SqlCommand comm = new SqlCommand("select contents from upload order by id desc", con);
SqlDataReader reader = comm.ExecuteReader();
int bufferSize = 32768;
byte[] outbyte = new byte[bufferSize];
long retval;
long startIndex = 0;
startIndex = 0;
retval = reader.GetBytes(0, startIndex, outbyte, 0, bufferSize);
while (retval > 0)
{
System.Web.HttpContext.Current.Response.BinaryWrite(outbyte);
startIndex += bufferSize;
if (retval == bufferSize)
{
retval = reader.GetBytes(2, startIndex, outbyte, 0, bufferSize);
}
else
{
retval = 0;
}
}
A couple of things strike me as possibilities.
Firstly, you are not calling reader.Read().
Secondly, there is not need for the check on retval == bufferSize - just call GetBytes again and it will return 0 if no bytes were read from the field.
Thirdly, as you are writing to the HttpResponse you need to make sure that you call Response.Clear() before writing the bytes to the output, and Response.End() after writing the file to the response.
The other thing to try is saving the file to the hard drive and comparing it to the original. Is it the same size? If it is bigger then you are writing too much information to the file (see previous comments about HttpResponse). If it is smaller then you are not writing enough, and are most likely exiting the loop too soon (see comment about retval).
I couldn't help but notice the number of places where your code failed to wrap an IDisposable in a using block, like the following:
using (SqlConnection con = new SqlConnection(connectionString))
{
byte[] input;
using (System.IO.Stream filestream = System.IO.File.Open(file, System.IO.FileMode.Open))
{
int fileLength = (int)filestream.Length;
input = new byte[fileLength];
filestream.Read(input, 0, fileLength);
}
const string Sql = "insert into upload values(#contents)";
con.Open();
using (System.Data.SqlClient.SqlCommand c = new System.Data.SqlClient.SqlCommand(Sql, con))
{
c.Parameters.Add("#contents", System.Data.SqlDbType.Binary);
c.Parameters["#contents"].Value = input;
c.ExecuteNonQuery();
}
using (SqlCommand comm = new SqlCommand("select contents from upload order by id desc", con))
{
using (SqlDataReader reader = comm.ExecuteReader())
{
int bufferSize = 32768;
byte[] outbyte = new byte[bufferSize];
long retval;
long startIndex = 0;
startIndex = 0;
retval = reader.GetBytes(0, startIndex, outbyte, 0, bufferSize);
while (retval > 0)
{
System.Web.HttpContext.Current.Response.BinaryWrite(outbyte);
startIndex += bufferSize;
if (retval == bufferSize)
{
retval = reader.GetBytes(2, startIndex, outbyte, 0, bufferSize);
}
else
{
retval = 0;
}
}
}
}
}
Related
My question is the following: I'm trying to upload an Excel file to database with this method:
using (SqlConnection connection = new SqlConnection(#"Data Source=TESZT1\SQLEXPRESS;Initial Catalog=Alepitmeny;Persist Security Info=True;User ID=sa;Password=*****"))
using (SqlCommand command = connection.CreateCommand())
{
byte[] file;
using (var stream = new FileStream(ExcelFilePath, FileMode.Open, FileAccess.Read))
{
using (var reader = new BinaryReader(stream))
{
file = reader.ReadBytes((int)stream.Length);
}
}
command.CommandText = "INSERT INTO Dokumentacio (Elrendelo_ExcelFile) VALUES (#File) SELECT SCOPE_IDENTITY()";
command.Parameters.Add("#File", SqlDbType.VarBinary, file.Length).Value = file;
connection.Open();
this.dokumentacio_Class.Dokumentacio_ID = Convert.ToInt32(command.ExecuteScalar());
connection.Close();
}
But when I'm downloading the uploaded files with the method below, I get an error message
Excel found unreadable content in filename.xls. Do you want to recover the contents of this workbook?
from Microsoft Excel, and it can't recover it.
(I'm using SQL Server 2012, Visual Studio 2013, the project is WPF project, my Office version is 2013)
In the database, Elrendelo_ExcelFile column is VARBINARY(MAX)
public bool ElrendeloExcelFileLetolt(string SavePath)
{
using (SqlConnection connection = new SqlConnection(#"Data Source=TESZT1\SQLEXPRESS;Initial Catalog=Alepitmeny;Persist Security Info=True;User ID=sa;Password=*****"))
try
{
using (SqlCommand command = connection.CreateCommand())
{
command.CommandText = #"SELECT d.Elrendelo_ExcelFile FROM Dokumentacio d INNER JOIN Kapcsolotabla k ON k.Dokumentacio_ID=d.Dokumentacio_ID WHERE k.Elrendelo_ID=#id";
command.Parameters.AddWithValue("#id", this.dokumentacio_ID);
FileStream stream;
BinaryWriter writer;
int bufferSize = 100;
byte[] buffer = new byte[bufferSize];
long retval;
long startIndex = 0;
connection.Open();
SqlDataReader reader = command.ExecuteReader(CommandBehavior.Default);
while (reader.Read())
{
stream = new FileStream(SavePath, FileMode.OpenOrCreate, FileAccess.Write);
writer = new BinaryWriter(stream);
startIndex = 0;
retval = reader.GetBytes(0, startIndex, buffer, 0, bufferSize);
while (retval == bufferSize)
{
writer.Write(buffer);
writer.Flush();
startIndex += bufferSize;
retval = reader.GetBytes(0, startIndex, buffer, 0, bufferSize);
}
writer.Write(buffer, 0, (int)retval - 1);
writer.Flush();
writer.Close();
stream.Close();
}
reader.Close();
connection.Close();
}
return true;
}
catch (System.Data.SqlClient.SqlException)
{
return false;
}
finally
{
connection.Close();
}
}
This SO answer should help you -> How do I insert/retrieve Excel files to varbinary(max) column in SQL Server 2008?
I have a windows application with C# that I have the feature for user can add and retrieve video file in theirs data
how to store and retrieve video in database with C# winforms?
Is any way for this problem.
thank u for help.
It is a bad idea to store video in database. that will slowdown the process when you are trying to load video from database. instead of doing that you should store that video in your harddrive and store that file path only in your database. thus, you can stream your video quickly. BTW, if you want to store any file in the database than you can use MemoryStream or FileStream to read all bytes then you can write that bytes in database.
Try this code.
NOTE: Following code is not compiled it may gives you error. This is just to get idea to know how to store video file in database.
string path = #"D:\videos\myvideos.mpeg";
try
{
using (FileStream fsSource = new FileStream(path, FileMode.Open, FileAccess.Read))
{
byte[] bytes = new byte[fsSource.Length];
int numBytesToRead = (int)fsSource.Length;
int numBytesRead = 0;
while (numBytesToRead > 0)
{
// Read may return anything from 0 to numBytesToRead.
int n = fsSource.Read(bytes, numBytesRead, numBytesToRead);
// Break when the end of the file is reached.
if (n == 0)
break;
numBytesRead += n;
numBytesToRead -= n;
}
SqlCommand Cmd = Connection.CreateCommand();
Cmd.CommandText = "Insert Into MyTable(ID,Video,FileName,Format,Size)Values(#ID,#Video,#FileName,#Format,#Size)";
Cmd.Parameters.Add("#ID", SqlDbType.Int).Value = 1;
Cmd.Parameters.Add("#Video", SqlDbType.VarBinary).Value = bytes;
Cmd.Parameters.Add("#FileName", SqlDbType.Varchar).Value = "My File Name";
Cmd.Parameters.Add("#Format", SqlDbType.Varchar).Value = "MPEG";
Cmd.Parameters.Add("#Size", SqlDbType.Int).Value = bytes.length;
Cmd.ExecuteNonQuery();
}
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
We have a lot of files, saved as binary in our SQL Server database.
I have made an .ashx file, that delivers these files, to the users.
Unfortunately, when the files become rather large, it will fail, with the following error:
Overflow or underflow in the arithmetic operation
I assume it runs out of memory, as I load the binary into a byte[].
So, my question is, how can I make this functionality, read in chunks (maybe?), when it is from a database table? It also seems like Response.TransmitFile() is a good option, but again, how would this work with a database?
The DB.GetReposFile(), in the code beneath, gets the file from the database. There are various fields, for the entry:
Filename, ContentType, datestamps and the FileContent as varbinary.
This is my function, to deliver the file:
context.Response.Clear();
try
{
if (!String.IsNullOrEmpty(context.Request.QueryString["id"]))
{
int id = Int32.Parse(context.Request.QueryString["id"]);
DataTable dtbl = DB.GetReposFile(id);
string FileName = dtbl.Rows[0]["FileName"].ToString();
string Extension = FileName.Substring(FileName.LastIndexOf('.')).ToLower();
context.Response.ContentType = ReturnExtension(Extension);
context.Response.AddHeader("Content-Disposition", "attachment; filename=" + FileName);
byte[] buffer = (byte[])dtbl.Rows[0]["FileContent"];
context.Response.OutputStream.Write(buffer, 0, buffer.Length);
}
else
{
context.Response.ContentType = "text/html";
context.Response.Write("<p>Need a valid id</p>");
}
}
catch (Exception ex)
{
context.Response.ContentType = "text/html";
context.Response.Write("<p>" + ex.ToString() + "</p>");
}
Update:
The function I ended up with, is the one listed below.
DB.GetReposFileSize() simply gets the content Datalength, as Tim mentions.
I call this function, in the original code, instead of these two lines:
byte[] buffer = (byte[])dtbl.Rows[0]["FileContent"];
context.Response.OutputStream.Write(buffer, 0, buffer.Length);
New download function:
private void GetFileInChunks(HttpContext context, int ID)
{
//string path = #"c:\somefile.txt";
//FileInfo file = new FileInfo(path);
int len = DB.GetReposFileSize(ID);
context.Response.AppendHeader("content-length", len.ToString());
context.Response.Buffer = false;
//Stream outStream = (Stream)context.Response.OutputStream;
SqlConnection conn = null;
string strSQL = "select FileContent from LM_FileUploads where ID=#ID";
try
{
DB.OpenDB(ref conn, DB.DatabaseConnection.PDM);
SqlCommand cmd = new SqlCommand(strSQL, conn);
cmd.Parameters.AddWithValue("#ID", ID);
SqlDataReader reader = cmd.ExecuteReader(CommandBehavior.SequentialAccess);
reader.Read();
byte[] buffer = new byte[1024];
int bytes;
long offset = 0;
while ((bytes = (int)reader.GetBytes(0, offset, buffer, 0, buffer.Length)) > 0)
{
// TODO: do something with `bytes` bytes from `buffer`
context.Response.OutputStream.Write(buffer, 0, buffer.Length);
offset += bytes;
}
}
catch (Exception ex)
{
throw ex;
}
finally
{
DB.CloseDB(ref conn);
}
}
You can use DATALENGTH to get the size of the VARBINARY and stream it for instance with a SqldataReader and it's Read-or ReadBytes-Method.
Have a look at this answer to see an implementation: Best way to stream files in ASP.NET
I am saving files to a SQL server 2008 (Express) database using FILESTREAM, the trouble I'm having is that certain files seem to be getting corrupted in the process.
For example if I save a word or excel document in one of the newer formats (docx, or xslx) then when I try to open the file I get an error message saying that the data is corrupted and would I like word/excel to try and recover it, If I click yes office is able to 'recover' the data and opens the file in compatibility mode.
However if i zip the file first then after extracting the contents I'm able to open the file without a problem. Strangely If I save an mp3 file to the database then I have the reverse issue, I can open the file no problem, but If I saved a zipped version of the mp3 I can't even extract the contents of that zip. When I tried to save a pdf or power-point file I ran into similar problems (the pdf i could only read if I zipped it first, and the ppt I couldn't read at all).
Update: here's my code that I'm using to write to the database and to read
To write to the database:
SQL = "SELECT Attachment.PathName(), GET_FILESTREAM_TRANSACTION_CONTEXT() FROM Activity " +
"WHERE RowID = CAST(#RowID as uniqueidentifier)";
transaction = connection.BeginTransaction();
command.Transaction = transaction;
command.CommandText = SQL;
command.Parameters.Clear();
command.Parameters.Add(rowIDParam);
SqlDataReader readerFS = null;
readerFS= command.ExecuteReader();
string path = (string)readerFS[0].ToString();
byte[] context = (byte[])readerFS[1];
int length = context.Length;
SqlFileStream targetStream = new SqlFileStream(path, context, FileAccess.Write);
int blockSize = 1024 * 512; //half a megabyte
byte[] buffer = new byte[blockSize];
int bytesRead = sourceStream.Read(buffer, 0, buffer.Length);
while (bytesRead > 0)
{
targetStream.Write(buffer, 0, bytesRead);
bytesRead = sourceStream.Read(buffer, 0, buffer.Length);
}
targetStream.Close();
sourceStream.Close();
readerFS.Close();
transaction.Commit();
And to read:
SqlConnection connection = null;
SqlTransaction transaction = null;
try
{
connection = getConnection();
connection.Open();
transaction = connection.BeginTransaction();
SQL = "SELECT Attachment.PathName(), + GET_FILESTREAM_TRANSACTION_CONTEXT() FROM Activity"
+ " WHERE ActivityID = #ActivityID";
SqlCommand command = new SqlCommand(SQL, connection);
command.Transaction = transaction;
command.Parameters.Add(new SqlParameter("ActivityID", activity.ActivityID));
SqlDataReader reader = command.ExecuteReader();
string path = (string)reader[0];
byte[] context = (byte[])reader[1];
int length = context.Length;
reader.Close();
SqlFileStream sourceStream = new SqlFileStream(path, context, FileAccess.Read);
int blockSize = 1024 * 512; //half a megabyte
byte[] buffer = new byte[blockSize];
List<byte> attachmentBytes = new List<byte>();
int bytesRead = sourceStream.Read(buffer, 0, buffer.Length);
while (bytesRead > 0)
{
bytesRead = sourceStream.Read(buffer, 0, buffer.Length);
foreach (byte b in buffer)
{
attachmentBytes.Add(b);
}
}
FileStream outputStream = File.Create(outputPath);
foreach (byte b in attachmentBytes)
{
byte[] barr = new byte[1];
barr[0] = b;
outputStream.Write(barr, 0, 1);
}
outputStream.Close();
sourceStream.Close();
command.Transaction.Commit();
Your read code is incorrect:
while (bytesRead > 0)
{
bytesRead = sourceStream.Read(buffer, 0, buffer.Length);
foreach (byte b in buffer)
{
attachmentBytes.Add(b);
}
}
If the bytesRead is less than buffer.Length, you still add the entire buffer to the attachementBytes. Thus, you always corrupt the document returned by adding any garbage in the end of the last buffer post bytesRead.
Other than that, allow me to have a really WTF moment. Reading a stream as a List<byte> ?? C'mon! First, I don't see the reason why you need to read into an intermediate in-memory storage to start with. You can simply read buffer by buffer and write each buffer straight into the outputStream. Second, if you must use an intermediate in-memory storage, use a MemoryStream, not a List<byte>.
I had the exact problem a few months back and figured out that I was adding an extra byte at the end of the file when reading it from FILESTREAM.
I am trying to zip files to an SQL Server database table. I can't ensure that the user of the tool has write priveledges on the source file folder so I want to load the file into memory, compress it to an array of bytes and insert it into my database.
This below does not work.
class ZipFileToSql
{
public event MessageHandler Message;
protected virtual void OnMessage(string msg)
{
if (Message != null)
{
MessageHandlerEventArgs args = new MessageHandlerEventArgs();
args.Message = msg;
Message(this, args);
}
}
private int sourceFileId;
private SqlConnection Conn;
private string PathToFile;
private bool isExecuting;
public bool IsExecuting
{
get
{ return isExecuting; }
}
public int SourceFileId
{
get
{ return sourceFileId; }
}
public ZipFileToSql(string pathToFile, SqlConnection conn)
{
isExecuting = false;
PathToFile = pathToFile;
Conn = conn;
}
public void Execute()
{
isExecuting = true;
byte[] data;
byte[] cmpData;
//create temp zip file
OnMessage("Reading file to memory");
FileStream fs = File.OpenRead(PathToFile);
data = new byte[fs.Length];
ReadWholeArray(fs, data);
OnMessage("Zipping file to memory");
MemoryStream ms = new MemoryStream();
GZipStream zip = new GZipStream(ms, CompressionMode.Compress, true);
zip.Write(data, 0, data.Length);
cmpData = new byte[ms.Length];
ReadWholeArray(ms, cmpData);
OnMessage("Saving file to database");
using (SqlCommand cmd = Conn.CreateCommand())
{
cmd.CommandText = #"MergeFileUploads";
cmd.CommandType = CommandType.StoredProcedure;
//cmd.Parameters.Add("#File", SqlDbType.VarBinary).Value = data;
cmd.Parameters.Add("#File", SqlDbType.VarBinary).Value = cmpData;
SqlParameter p = new SqlParameter();
p.ParameterName = "#SourceFileId";
p.Direction = ParameterDirection.Output;
p.SqlDbType = SqlDbType.Int;
cmd.Parameters.Add(p);
cmd.ExecuteNonQuery();
sourceFileId = (int)p.Value;
}
OnMessage("File Saved");
isExecuting = false;
}
private void ReadWholeArray(Stream stream, byte[] data)
{
int offset = 0;
int remaining = data.Length;
float Step = data.Length / 100;
float NextStep = data.Length - Step;
while (remaining > 0)
{
int read = stream.Read(data, offset, remaining);
if (read <= 0)
throw new EndOfStreamException
(String.Format("End of stream reached with {0} bytes left to read", remaining));
remaining -= read;
offset += read;
if (remaining < NextStep)
{
NextStep -= Step;
}
}
}
}
Your code will be easier to debug if you break it down into smaller chunks. In my example, I have provided a Compress and Decompress method. In addition, you do not need to roll your own code to read all bytes out of a FileStream. You can simply use File.ReadAllBytes. Third, make sure you wrap classes that implement IDisposable in a using statement.
public void Execute()
{
isExecuting = true;
byte[] data;
byte[] cmpData;
//create temp zip file
OnMessage("Reading file to memory");
byte[] data = File.ReadAllBytes( PathToFile );
OnMessage("Zipping file to memory");
byte[] compressedData = Compress(data);
OnMessage("Saving file to database");
SaveToDatabase( compressedData );
OnMessage("File Saved");
isExecuting = false;
}
private void SaveToDatabase( byte[] data )
{
using ( var cmd = Conn.CreateCommand() )
{
cmd.CommandText = #"MergeFileUploads";
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("#File", data );
cmd.Parameters["#File"].DbType = DbType.Binary;
cmd.Parameters.Add("#SourceField");
var parameter = cmd.Parameters["#SourceField"];
parameter.DbType = DbType.Int32;
parameter.Direction = ParameterDirection.Output;
cmd.ExecuteNonQuery();
sourceFileId = (int)parameter.Value;
}
}
private static byte[] Compress( byte[] data )
{
var output = new MemoryStream();
using ( var gzip = new GZipStream( output, CompressionMode.Compress, true ) )
{
gzip.Write( data, 0, data.Length );
gzip.Close();
}
return output.ToArray();
}
private static byte[] Decompress( byte[] data )
{
var output = new MemoryStream();
var input = new MemoryStream();
input.Write( data, 0, data.Length );
input.Position = 0;
using ( var gzip = new GZipStream( input, CompressionMode.Decompress, true ) )
{
var buff = new byte[64];
var read = gzip.Read( buff, 0, buff.Length );
while ( read > 0 )
{
output.Write( buff, 0, read );
read = gzip.Read( buff, 0, buff.Length );
}
gzip.Close();
}
return output.ToArray();
}
According to the docs:
The write might not occur immediately but is buffered until the buffer size is reached or until the Flush or Close method is called.
So you might try putting a zip.Flush() to make sure it flushes the stream.
In addition, when passing your memory stream to your ReadWholeArray method, make sure you rewind the stream by setting its Position property to 0.
You could probably simplify your code that performs the compression and byte array conversion so something along the lines of the following (Untested, but should be close)
MemoryStream ms = new MemoryStream();
using (FileStream fs = File.OpenRead(PathToFile))
using (GZipStream zip = new GZipStream(ms, CompressionMode.Compress))
{
// This could be replaced with fs.CopyTo(zip); if you are using Framework 4.0
byte[] buffer = new byte[1024];
int bytesRead = 0;
while ((bytesRead = fs.Read(buffer, 0, buffer.Length)) > 0)
{
zip.Write(buffer, 0, bytesRead);
}
}
// Get the compressed bytes from the memmory stream
byte[] cmpData = ms.ToArray();
BEWARE. MemoryStream will pad your output array with zeros. You need to remember its final position before calling ToArray() and truncate the array to the appropriate size afterwords.