How to store the file which is uploaded by the user to the database ? I want to store the file in the database how can we do that ? In the back-end I am using sql with c#.net application.
This solution works for SQL SERVER 2005/2008.
You have to create table with VARBINARY(MAX) as one of the columns. In my case I've created Table Raporty with column RaportPlik being VARBINARY(MAX) column.
Below there are couple of support functions you can modify for your needs:
public static void databaseFilePut(string varFilePath) {
byte[] file;
using (var stream = new FileStream(varFilePath, FileMode.Open, FileAccess.Read)) {
using (var reader = new BinaryReader(stream)) {
file = reader.ReadBytes((int) stream.Length);
}
}
using (var varConnection = Locale.sqlConnectOneTime(Locale.sqlDataConnectionDetails))
using (var sqlWrite = new SqlCommand("INSERT INTO Raporty (RaportPlik) Values(#File)", varConnection)) {
sqlWrite.Parameters.Add("#File", SqlDbType.VarBinary, file.Length).Value = file;
sqlWrite.ExecuteNonQuery();
}
}
public static void databaseFileRead(string varID, string varPathToNewLocation) {
using (var varConnection = Locale.sqlConnectOneTime(Locale.sqlDataConnectionDetails))
using (var sqlQuery = new SqlCommand(#"SELECT [RaportPlik] FROM [dbo].[Raporty] WHERE [RaportID] = #varID", varConnection)) {
sqlQuery.Parameters.AddWithValue("#varID", varID);
using (var sqlQueryResult = sqlQuery.ExecuteReader())
if (sqlQueryResult != null) {
sqlQueryResult.Read();
var blob = new Byte[(sqlQueryResult.GetBytes(0, 0, null, 0, int.MaxValue))];
sqlQueryResult.GetBytes(0, 0, blob, 0, blob.Length);
using (var fs = new FileStream(varPathToNewLocation, FileMode.Create, FileAccess.Write)) fs.Write(blob, 0, blob.Length);
}
}
}
public static MemoryStream databaseFileRead(string varID) {
MemoryStream memoryStream = new MemoryStream();
using (var varConnection = Locale.sqlConnectOneTime(Locale.sqlDataConnectionDetails))
using (var sqlQuery = new SqlCommand(#"SELECT [RaportPlik] FROM [dbo].[Raporty] WHERE [RaportID] = #varID", varConnection)) {
sqlQuery.Parameters.AddWithValue("#varID", varID);
using (var sqlQueryResult = sqlQuery.ExecuteReader())
if (sqlQueryResult != null) {
sqlQueryResult.Read();
var blob = new Byte[(sqlQueryResult.GetBytes(0, 0, null, 0, int.MaxValue))];
sqlQueryResult.GetBytes(0, 0, blob, 0, blob.Length);
//using (var fs = new MemoryStream(memoryStream, FileMode.Create, FileAccess.Write)) {
memoryStream.Write(blob, 0, blob.Length);
//}
}
}
return memoryStream;
}
First method is to put file into database from drive, second method is to get file and save it on drive, and 3rd method is to get file from database and put it as MemoryStream so you can some other stuff with it then just writing it to drive.
This 4th method is to put MemoryStream into database:
public static int databaseFilePut(MemoryStream fileToPut) {
int varID = 0;
byte[] file = fileToPut.ToArray();
const string preparedCommand = #"
INSERT INTO [dbo].[Raporty]
([RaportPlik])
VALUES
(#File)
SELECT [RaportID] FROM [dbo].[Raporty]
WHERE [RaportID] = SCOPE_IDENTITY()
";
using (var varConnection = Locale.sqlConnectOneTime(Locale.sqlDataConnectionDetails))
using (var sqlWrite = new SqlCommand(preparedCommand, varConnection)) {
sqlWrite.Parameters.Add("#File", SqlDbType.VarBinary, file.Length).Value = file;
using (var sqlWriteQuery = sqlWrite.ExecuteReader())
while (sqlWriteQuery != null && sqlWriteQuery.Read()) {
varID = sqlWriteQuery["RaportID"] is int ? (int) sqlWriteQuery["RaportID"] : 0;
}
}
return varID;
}
MS SQL Server 2008 (and above, I guess) offers FileStream data type. Just Google for it (or Bing it, whatever :-)), I think you'll find what you need.
Assuming SQL 2005 or later, I would use a VarBinary(MAX) field. Pass the uploaded file as a byte[] to the insert statement. According to Microsoft, since SQL 2005, storing large chunks of data (files and images) in the DB no longer decreases performance to a great extent.
eg:
public void SaveFileToDB(string description, byte[] file)
{
using (SqlConnection con = new SqlConnection(conStr)
{
con.Open();
using (SqlCommand cmd = con.CreateCommand())
{
cmd.Parameters.Add("#Description", SqlDbType.VarChar, description);
cmd.Parameters.Add("#File", SqlDbType.VarBinary, file);
cmd.CommandText = "UploadedFileUpdate";
cmd.CommandType = CommandType.StoredProcedure;
cmd.ExecuteNonQuery();
}
}
}
If you have SQL 2008, see Ron Klein's suggestion.
One solution that will keep your db size down is to store the location of the file on the server. IE a file path.
However you will have to make a manager if you ever want to move files about.
You can use a blob field type. When you read the file in from a StreamReader convert it to a byte array and then insert that into the blob field.
The reverse procedure when you want to read it, get the blob as a byte array, read it into a streamreader and write that to the response buffer.
Related
I am trying to upload the zipped the results of an sql query converted to a csv to a blob.
See my code below:
//this doesn't work
using (var sqlCommand = _dataPointService.OpenSqlConnectionForCsvQuery(device, start, end))
using (var sqlDataReader = sqlCommand.ExecuteReader(CommandBehavior.CloseConnection))
using (var blobWriteStream = appendBlobClient.OpenWrite(true))
using (var zipArchive = new ZipArchive(blobWriteStream, ZipArchiveMode.Create))
using (var streamWriter = new StreamWriter(zipArchive.CreateEntry(fileName + ".csv").Open()))
using (var csvWriter = new CsvWriter(streamWriter, new CsvConfiguration(CultureInfo.InvariantCulture)))
{
var generator = new StreamCsvGenerator(device, start, end, showTemperatureInFahrenheit);
generator.FeedHeader(csvWriter);
{
while (sqlDataReader.Read())
{
generator.FeedRow(csvWriter, sqlDataReader);
}
}
streamWriter.Flush();
}
I end up with the following empty archive in my blob:
However if I don't bother with zipping, everything works out:
//this works
using (var sqlCommand = _dataPointService.OpenSqlConnectionForCsvQuery(device, start, end))
using (var sqlDataReader = sqlCommand.ExecuteReader(CommandBehavior.CloseConnection))
using (var blobWriteStream = appendBlobClient.OpenWrite(true))
using (var streamWriter = new StreamWriter(blobWriteStream))
using (var csvWriter = new CsvWriter(streamWriter, new CsvConfiguration(CultureInfo.InvariantCulture)))
{
var generator = new StreamCsvGenerator(device, start, end, showTemperatureInFahrenheit);
generator.FeedHeader(csvWriter);
{
while (sqlDataReader.Read())
{
generator.FeedRow(csvWriter, sqlDataReader);
}
}
streamWriter.Flush();
}
Here's what the result looks like:
Am I using the ZipArchive / ZipEntry streams incorrectly? How can I fix it?
I believe you need to tell ZipArchive to keep the Stream open so that you can write to it with CsvHelper
using (var zipArchive = new ZipArchive(blobWriteStream, ZipArchiveMode.Create, true))
If that doesn't work, you may be able write to a MemoryStream first and then upload with your appendBlobClient. The first part works, I was able to write to a local file on my computer. I wasn't able to test the appendBlobClient part, since I'm not setup with an Azure account.
using (var memoryStream = new MemoryStream())
{
using (var sqlCommand = _dataPointService.OpenSqlConnectionForCsvQuery(device, start, end))
using (var sqlDataReader = sqlCommand.ExecuteReader(CommandBehavior.CloseConnection))
using (var zipArchive = new ZipArchive(memoryStream, ZipArchiveMode.Create, true))
using (var streamWriter = new StreamWriter(zipArchive.CreateEntry(fileName + ".csv").Open()))
using (var csvWriter = new CsvWriter(streamWriter, new CsvConfiguration(CultureInfo.InvariantCulture)))
{
var generator = new StreamCsvGenerator(device, start, end, showTemperatureInFahrenheit);
generator.FeedHeader(csvWriter);
{
while (sqlDataReader.Read())
{
generator.FeedRow(csvWriter, sqlDataReader);
}
}
streamWriter.Flush();
}
using (var blobWriteStream = appendBlobClient.OpenWrite(true))
{
memoryStream.Seek(0, SeekOrigin.Begin);
blobWriteStream.AppendBlock(memoryStream, memoryStream.Length);
}
}
The fix was to update my Azure.Storage.Blobs package to latest.
I was on version 12.6.0. Once I updated to 12.8.3, everything worked fine.
Here's how my call looks for anyone coming across this in the future:
var appendBlobClient = BlobStorage.CreateAppendBlobClient(blobName);
using (var sqlCommand = _dataPointService.OpenSqlConnectionForCsvQuery(device, start, end))
using (var sqlDataReader = sqlCommand.ExecuteReader(CommandBehavior.CloseConnection))
using (var blobWriteStream = appendBlobClient.OpenWrite(true))
using (var zipArchive = new ZipArchive(blobWriteStream, ZipArchiveMode.Create))
using (var zipEntryStream = zipArchive.CreateEntry(fileName + ".csv").Open())
using (var streamWriter = new StreamWriter(zipEntryStream))
using (var csvWriter = new CsvWriter(streamWriter, new CsvConfiguration(CultureInfo.CurrentCulture)))
{
// this class is a csvWriter helper
var generator = new StreamCsvGenerator(device, start, end, showTemperatureInFahrenheit);
generator.FeedHeader(csvWriter); // this writes some comments at the top of the csv file
while (sqlDataReader.Read())
{
generator.FeedRow(csvWriter, sqlDataReader); // this writes records
}
}
So what im trying to do is read a Select store procedure from my database save the data in a csv file and make it that the user is able to download it through the web application. I was able to get the requested result by saving the file temporary into my program foldel and using filestream. What i want to do now is skip the part where the file is saved onto my computer and temporary save it in the RAM memory instead. From what i understood i have to make use of memory stream instead of file stream but i dont really understand how i can do that. From what i understood from what i read is that instead of me making use of a file i need to convert my data to bytes make a memorystream out of it and then use it in my FileStreamResult. Am i correct here?
Method when i read from procedure and save to a csvfile:
public static String StoreApproved ()
{
string path1 = HttpRuntime.AppDomainAppPath + "Report.csv";
SqlConnection sqlConnection1 = new SqlConnection("CONNECTIONSTRING");
SqlCommand cmd = new SqlCommand();
SqlDataReader reader;
cmd.CommandText = "ExportApproved";
cmd.CommandType = CommandType.StoredProcedure;
cmd.Connection = sqlConnection1;
sqlConnection1.Open();
reader = cmd.ExecuteReader();
List<ModelStoreProcedureApproved> TestList = new List<ModelStoreProcedureApproved>();
ModelStoreProcedureApproved test ;
while (reader.Read())
{
test = new ModelStoreProcedureApproved();
// test.Id = int.Parse(reader["IdTimeTracker"].ToString());
test.Month = reader["Month"].ToString();
test.EmailUser = reader["Email"].ToString();
test.Project = reader["Name"].ToString();
test.Approved = reader["Description"].ToString();
test.Month = reader["Month"].ToString();
test.Year = reader["Year"].ToString();
TestList.Add(test);
}
File.Create(path1).Close();
var i = TestList.FirstOrDefault();
using (TextWriter fileReader = new StreamWriter(path1))
{
var csv = new CsvWriter(fileReader);
csv.Configuration.Encoding = Encoding.UTF8;
foreach (var value in TestList)
{
csv.WriteRecord(value);
}
fileReader.Close();
}
sqlConnection1.Close();
return path1;
}
Controller code:
public ActionResult ExportToCSV()
{
string path = Repositories.UserRepository.StoreApproved();
var fileStream = new FileStream(path,
FileMode.Open,
FileAccess.Read);
return new FileStreamResult(fileStream, "text/csv") { FileDownloadName = "export.csv" };
}
Can someone explain me what the best way to do this is?
Other posts i have read
Serialize and Deserialize using BinaryFormatter
BinaryFormatter and Deserialization Complex objects
Using CSVHelper to output stream to browser
You can make it like this:
public static byte[] StoreApproved ()
{
string path1 = HttpRuntime.AppDomainAppPath + "Report.csv";
SqlConnection sqlConnection1 = new SqlConnection("CONNECTIONSTRING");
SqlCommand cmd = new SqlCommand();
SqlDataReader reader;
cmd.CommandText = "ExportApproved";
cmd.CommandType = CommandType.StoredProcedure;
cmd.Connection = sqlConnection1;
sqlConnection1.Open();
reader = cmd.ExecuteReader();
List<ModelStoreProcedureApproved> TestList = new List<ModelStoreProcedureApproved>();
ModelStoreProcedureApproved test ;
while (reader.Read())
{
test = new ModelStoreProcedureApproved();
// test.Id = int.Parse(reader["IdTimeTracker"].ToString());
test.Month = reader["Month"].ToString();
test.EmailUser = reader["Email"].ToString();
test.Project = reader["Name"].ToString();
test.Approved = reader["Description"].ToString();
test.Month = reader["Month"].ToString();
test.Year = reader["Year"].ToString();
TestList.Add(test);
}
var i = TestList.FirstOrDefault();
var mem = new MemoryStream();
using (TextWriter fileReader = new StreamWriter(mem))
{
var csv = new CsvWriter(fileReader);
csv.Configuration.Encoding = Encoding.UTF8;
foreach (var value in TestList)
{
csv.WriteRecord(value);
}
}
sqlConnection1.Close();
return mem.ToArray();
}
public ActionResult ExportToCSV()
{
byte[] bytes = Repositories.UserRepository.StoreApproved();
Stream stream = new MemoryStream(bytes);
return new FileStreamResult(stream, "text/csv") { FileDownloadName = "export.csv" };
}
I suggest you make clean separation of concerns since you are also using Asp.Net MVC. Instead of reading and creating memory stream inside same method, first read/get the data collection you need and just return the data out of the method. Then inside the action method you can decorate it with required format(binding to UI or returning a file etc.) based on your requirement
Though this is not be a straight answer to your question, and if all that you are looking for is using a memory stream, there are plenty of examples available to use for example as shown here and the answer you accepted etc.
Hope this help you.
using (var ms = new MemoryStream())
{
using (var writer = new StreamWriter(ms))
using (var csv = new CsvWriter(writer))
{
csv.WriteRecords({A list here});
}
ms.ToArray() // here is your actual data in memory stream
}
I need to check if the image has special messurements like 25px25p so i do not save all images from our mailsigniture (Xing, LinkedIn, Twitter, etc.). Retrieveing the images from the mail works how it should.
I have a little program which retrieves emails from the server and saves them to database. It also saves all Attachments. The necessary code part looks like this:
else if (attachment is FileAttachment || attachment.IsInline)
{
FileAttachment fileAttachment = attachment as FileAttachment;
fileAttachment.Load();
string sfilename = fileAttachment.Name;
string sContentType = fileAttachment.ContentType;
using (var fs = new MemoryStream(fileAttachment.Content))
{
using (BinaryReader br = new BinaryReader(fs))
{
byte[] bytes = br.ReadBytes((int)fs.Length);
var oSQLConnection = new SqlConnection(System.Configuration.ConfigurationManager.ConnectionStrings["..."].ConnectionString);
var cmd = new SqlCommand();
cmd.Connection = oSQLConnection;
cmd.CommandType = CommandType.Text;
string sTicketID = gTicketID.ToString() ?? string.Empty;
cmd.CommandText = "INSERT INTO TBL_Attachment (fkTicketID, fkMailID, FileName, ContentType, Data)VALUES('"+sTicketID+"', '"+snFile+"', '"+sfilename+"', '"+sContentType+"', #Data)";
var DataParameter = new SqlParameter("#Data", SqlDbType.Binary);
cmd.Parameters.Add(DataParameter);
DataParameter.Value = bytes;
.
.
.
I convert the file to byte and save it to the database.
Is it possible to get the file/image messurements for an if statement?
I found an obvious solution. I just had to convert my MemoryStream to Image and check with and hight.
using (var fs = new MemoryStream(fileAttachment.Content))
{
Image image = System.Drawing.Image.FromStream(fs);
if (!(image.Width == 190 && image.Height == 45) )
{
using (BinaryReader br = new BinaryReader(fs))
{
...
My question is the following: I'm trying to upload an Excel file to database with this method:
using (SqlConnection connection = new SqlConnection(#"Data Source=TESZT1\SQLEXPRESS;Initial Catalog=Alepitmeny;Persist Security Info=True;User ID=sa;Password=*****"))
using (SqlCommand command = connection.CreateCommand())
{
byte[] file;
using (var stream = new FileStream(ExcelFilePath, FileMode.Open, FileAccess.Read))
{
using (var reader = new BinaryReader(stream))
{
file = reader.ReadBytes((int)stream.Length);
}
}
command.CommandText = "INSERT INTO Dokumentacio (Elrendelo_ExcelFile) VALUES (#File) SELECT SCOPE_IDENTITY()";
command.Parameters.Add("#File", SqlDbType.VarBinary, file.Length).Value = file;
connection.Open();
this.dokumentacio_Class.Dokumentacio_ID = Convert.ToInt32(command.ExecuteScalar());
connection.Close();
}
But when I'm downloading the uploaded files with the method below, I get an error message
Excel found unreadable content in filename.xls. Do you want to recover the contents of this workbook?
from Microsoft Excel, and it can't recover it.
(I'm using SQL Server 2012, Visual Studio 2013, the project is WPF project, my Office version is 2013)
In the database, Elrendelo_ExcelFile column is VARBINARY(MAX)
public bool ElrendeloExcelFileLetolt(string SavePath)
{
using (SqlConnection connection = new SqlConnection(#"Data Source=TESZT1\SQLEXPRESS;Initial Catalog=Alepitmeny;Persist Security Info=True;User ID=sa;Password=*****"))
try
{
using (SqlCommand command = connection.CreateCommand())
{
command.CommandText = #"SELECT d.Elrendelo_ExcelFile FROM Dokumentacio d INNER JOIN Kapcsolotabla k ON k.Dokumentacio_ID=d.Dokumentacio_ID WHERE k.Elrendelo_ID=#id";
command.Parameters.AddWithValue("#id", this.dokumentacio_ID);
FileStream stream;
BinaryWriter writer;
int bufferSize = 100;
byte[] buffer = new byte[bufferSize];
long retval;
long startIndex = 0;
connection.Open();
SqlDataReader reader = command.ExecuteReader(CommandBehavior.Default);
while (reader.Read())
{
stream = new FileStream(SavePath, FileMode.OpenOrCreate, FileAccess.Write);
writer = new BinaryWriter(stream);
startIndex = 0;
retval = reader.GetBytes(0, startIndex, buffer, 0, bufferSize);
while (retval == bufferSize)
{
writer.Write(buffer);
writer.Flush();
startIndex += bufferSize;
retval = reader.GetBytes(0, startIndex, buffer, 0, bufferSize);
}
writer.Write(buffer, 0, (int)retval - 1);
writer.Flush();
writer.Close();
stream.Close();
}
reader.Close();
connection.Close();
}
return true;
}
catch (System.Data.SqlClient.SqlException)
{
return false;
}
finally
{
connection.Close();
}
}
This SO answer should help you -> How do I insert/retrieve Excel files to varbinary(max) column in SQL Server 2008?
I've method which saves the file saved in db into file on disk. How would I modify it so method returns MemoryStream?
public static void databaseFileRead(string varID, string varPathToNewLocation) {
using (var varConnection = Locale.sqlConnectOneTime(Locale.sqlDataConnectionDetailsDZP))
using (var sqlQuery = new SqlCommand(#"SELECT [RaportPlik] FROM [dbo].[Raporty] WHERE [RaportID] = #varID", varConnection)) {
sqlQuery.Parameters.AddWithValue("#varID", varID);
using (var sqlQueryResult = sqlQuery.ExecuteReader()) {
if (sqlQueryResult != null) {
sqlQueryResult.Read();
var blob = new Byte[(sqlQueryResult.GetBytes(0, 0, null, 0, int.MaxValue))];
sqlQueryResult.GetBytes(0, 0, blob, 0, blob.Length);
using (var fs = new FileStream(varPathToNewLocation, FileMode.Create, FileAccess.Write)) {
fs.Write(blob, 0, blob.Length);
}
}
}
}
}
Change FileStream to MemoryStream. Declare the stream object at the top level of the method and use it on the return statement