Related
There is a strange behaviour that i dont understand.
There'the part of the code wich is imply in my problem.
public static NpgsqlConnection ConnectRead()
{
string pass = "password_here";
StreamReader sr = new StreamReader(#"Stc.cts");
string line;
string conn = "";
while ((line = sr.ReadLine()) != null)
{
conn = line;
}
sr.Close();
conn = Cdf.Cdf.Crypt.Decrypt(conn, pass);
NpgsqlConnection con = new NpgsqlConnection(conn);
con.Open();
return con;
}
and
if (mailCheckBox.Checked == true)
{
string subject = pototal;
string body = "Voici le bon de commande";
MapiMailMessage message = new MapiMailMessage(subject, body);
//message.Files.Add(serveur + nomfichier);
message.Files.Add(#"c:\pdftemp\" + nomfichier);
message.ShowDialog();
}
As you can see, the first part is a connection string and the second one is a mapi to open default mail software.
My problem is: If i dont use the mapi portion of my program, everything work perfectly. If i use the mapi portion, my program stop connecting because it seems to change the Stc.cts path to c:/foxmail/Stc.cts.
If anyone have a clue, i would realy appreciate.
It appears as though your code is using a relative path. You should provide an absolute path in case something changes the "current" directory.
For example, if your file is in the same path as your executable, you could do this:
string strAppDir = Path.GetDirectoryName(
System.Reflection.Assembly.GetExecutingAssembly().GetName().CodeBase);
// or...
// string strAppDir = AppDomain.CurrentDomain.BaseDirectory;
string strFullPathToMyFile = System.IO.Path.Combine(strAppDir, "Stc.cts");
StreamReader sr = new StreamReader(strFullPathToMyFile);
Suppose I have this method from one class:
private void btnChangeImage_Click(object sender, EventArgs e)
{
using (var openFileDialogForImgUser = new OpenFileDialog())
{
string location = null;
string fileName = null;
openFileDialogForImgUser.Filter = "Image Files (*.jpg, *.png, *.gif, *.bmp)|*.jpg; *.png; *.gif; *.bmp|All Files (*.*)|*.*"; // filtering only picture file types
var openFileResult = openFileDialogForImgUser.ShowDialog(); // show the file open dialog box
if (openFileResult == DialogResult.OK)
{
using (var formSaveImg = new FormSave())
{
var saveResult = formSaveImg.ShowDialog();
if (saveResult == DialogResult.Yes)
{
imgUser.Image = new Bitmap(openFileDialogForImgUser.FileName); //showing the image opened in the picturebox
location = openFileDialogForImgUser.FileName;
fileName = openFileDialogForImgUser.SafeFileName;
FileStream fs = new FileStream(location, FileMode.Open, FileAccess.Read); //Creating a filestream to open the image file
int fileLength = (int)fs.Length; // getting the length of the file in bytes
byte[] rawdata = new byte[fileLength]; // creating an array to store the image as bytes
fs.Read(rawdata, 0, (int)fileLength); // using the filestream and converting the image to bits and storing it in an array
MySQLOperations MySQLOperationsObj = new MySQLOperations("localhost", "root", "myPass");
MySQLOperationsObj.saveImage(rawdata);
fs.Close();
}
else
openFileDialogForImgUser.Dispose();
}
}
}
}
And this method from another class (MySQLOperations):
public void saveImage(byte[] rawdata)
{
try
{
string myConnectionString = "Data Source = " + server + "; User = " + user + "; Port = 3306; Password = " + password + ";";
MySqlConnection myConnection = new MySqlConnection(myConnectionString);
string currentUser = FormLogin.userID;
string useDataBaseCommand = "USE " + dbName + ";";
string updateTableCommand = "UPDATE tblUsers SET UserImage = #file WHERE Username = \'" + currentUser + "\';";
MySqlCommand myCommand = new MySqlCommand(useDataBaseCommand + updateTableCommand, myConnection);
myCommand.Parameters.AddWithValue("#file", rawdata);
myConnection.Open();
myCommand.ExecuteNonQuery();
myConnection.Close();
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "Error!", MessageBoxButtons.OK, MessageBoxIcon.Error);
}
}
If I must, this is my constructor for the MySQLOperations class:
public MySQLOperations(string server, string user, string password)
{
this.server = server;
this.user = user;
this.password = password;
}
What I'm trying to do is save an image file (which the user selects through the open file dialog box) to the database. Problem is I get this error: "You have an error in your SQL syntax; check the manual that correponds to your MySQL server version for the right syntax to use near ';UPDATE tblUsers SET UserImage = _binary'?PNG ... (and so on with some random characters). So, I can't really save the file in the database. I would love to post a picture on how the error is seen at the MessageBox, but I guess my account is not given the privilege to do so yet.
I'm not really sure where the syntax error is in that. I'm thinking, it's in the #file - but that's just a guess. Your help would be very much appreciated.
And oh, the table column UserImage has a type of LONGBLOB.
Other things I'm interested to know about also:
Is it necessary that I add another column for my table to store the
size of the file (because I'm going to need to retrieve the file
to display the image later on)?
Is it okay that I used the using statement that way in the method
btnChangeImage_Click?
Thank you very much.
EDIT: Got the problem solved. Such a simple thing not given attention to. Thanks to everybody who tried to help. I'm still willing to hear your opinion on the questions at the bottom (those on bullets).
I think the problem is in the following line of code:
WHERE Username = \'" + currentUser + "\';"
It should change to the following one:
WHERE Username = " + currentUser;
Or better (to avoid sql injections) to the following one:
WHERE Username = #Username";
myCommand.Parameters.AddWithValue("#Username", currentUser);
Do not store binary files in a MySQL table. Instead, save it to disk and save path to PNG file to MySQL database. Also, use Christos advice to avoid SQL injections.
I have a remote sql connection in C# that needs to execute a query and save its results to the users's local hard disk. There is a fairly large amount of data this thing can return, so need to think of an efficient way of storing it. I've read before that first putting the whole result into memory and then writing it is not a good idea, so if someone could help, would be great!
I am currently storing the sql result data into a DataTable, although I am thinking it could be better doing something in while(myReader.Read(){...}
Below is the code that gets the results:
DataTable t = new DataTable();
string myQuery = QueryLoader.ReadQueryFromFileWithBdateEdate(#"Resources\qrs\qryssysblo.q", newdate, newdate);
using (SqlDataAdapter a = new SqlDataAdapter(myQuery, sqlconn.myConnection))
{
a.Fill(t);
}
var result = string.Empty;
for(int i = 0; i < t.Rows.Count; i++)
{
for (int j = 0; j < t.Columns.Count; j++)
{
result += t.Rows[i][j] + ",";
}
result += "\r\n";
}
So now I have this huge result string. And I have the datatable. There has to be a much better way of doing it?
Thanks.
You are on the right track yourself. Use a loop with while(myReader.Read(){...} and write each record to the text file inside the loop. The .NET framework and operating system will take care of flushing the buffers to disk in an efficient way.
using(SqlConnection conn = new SqlConnection(connectionString))
using(SqlCommand cmd = conn.CreateCommand())
{
conn.Open();
cmd.CommandText = QueryLoader.ReadQueryFromFileWithBdateEdate(
#"Resources\qrs\qryssysblo.q", newdate, newdate);
using(SqlDataReader reader = cmd.ExecuteReader())
using(StreamWriter writer = new StreamWriter("c:\temp\file.txt"))
{
while(reader.Read())
{
// Using Name and Phone as example columns.
writer.WriteLine("Name: {0}, Phone : {1}",
reader["Name"], reader["Phone"]);
}
}
}
I came up with this, it's a better CSV writer than the other answers:
public static class DataReaderExtension
{
public static void ToCsv(this IDataReader dataReader, string fileName, bool includeHeaderAsFirstRow)
{
const string Separator = ",";
StreamWriter streamWriter = new StreamWriter(fileName);
StringBuilder sb = null;
if (includeHeaderAsFirstRow)
{
sb = new StringBuilder();
for (int index = 0; index < dataReader.FieldCount; index++)
{
if (dataReader.GetName(index) != null)
sb.Append(dataReader.GetName(index));
if (index < dataReader.FieldCount - 1)
sb.Append(Separator);
}
streamWriter.WriteLine(sb.ToString());
}
while (dataReader.Read())
{
sb = new StringBuilder();
for (int index = 0; index < dataReader.FieldCount; index++)
{
if (!dataReader.IsDBNull(index))
{
string value = dataReader.GetValue(index).ToString();
if (dataReader.GetFieldType(index) == typeof(String))
{
if (value.IndexOf("\"") >= 0)
value = value.Replace("\"", "\"\"");
if (value.IndexOf(Separator) >= 0)
value = "\"" + value + "\"";
}
sb.Append(value);
}
if (index < dataReader.FieldCount - 1)
sb.Append(Separator);
}
if (!dataReader.IsDBNull(dataReader.FieldCount - 1))
sb.Append(dataReader.GetValue(dataReader.FieldCount - 1).ToString().Replace(Separator, " "));
streamWriter.WriteLine(sb.ToString());
}
dataReader.Close();
streamWriter.Close();
}
}
usage: mydataReader.ToCsv("myfile.csv", true)
Rob Sedgwick answer is more like it, but can be improved and simplified. This is how I did it:
string separator = ";";
string fieldDelimiter = "";
bool useHeaders = true;
string connectionString = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
using (SqlConnection conn = new SqlConnection(connectionString))
{
using (SqlCommand cmd = conn.CreateCommand())
{
conn.Open();
string query = #"SELECT whatever";
cmd.CommandText = query;
using (SqlDataReader reader = cmd.ExecuteReader())
{
if (!reader.Read())
{
return;
}
List<string> columnNames = GetColumnNames(reader);
// Write headers if required
if (useHeaders)
{
first = true;
foreach (string columnName in columnNames)
{
response.Write(first ? string.Empty : separator);
line = string.Format("{0}{1}{2}", fieldDelimiter, columnName, fieldDelimiter);
response.Write(line);
first = false;
}
response.Write("\n");
}
// Write all records
do
{
first = true;
foreach (string columnName in columnNames)
{
response.Write(first ? string.Empty : separator);
string value = reader[columnName] == null ? string.Empty : reader[columnName].ToString();
line = string.Format("{0}{1}{2}", fieldDelimiter, value, fieldDelimiter);
response.Write(line);
first = false;
}
response.Write("\n");
}
while (reader.Read());
}
}
}
And you need to have a function GetColumnNames:
List<string> GetColumnNames(IDataReader reader)
{
List<string> columnNames = new List<string>();
for (int i = 0; i < reader.FieldCount; i++)
{
columnNames.Add(reader.GetName(i));
}
return columnNames;
}
I agree that your best bet here would be to use a SqlDataReader. Something like this:
StreamWriter YourWriter = new StreamWriter(#"c:\testfile.txt");
SqlCommand YourCommand = new SqlCommand();
SqlConnection YourConnection = new SqlConnection(YourConnectionString);
YourCommand.Connection = YourConnection;
YourCommand.CommandText = myQuery;
YourConnection.Open();
using (YourConnection)
{
using (SqlDataReader sdr = YourCommand.ExecuteReader())
using (YourWriter)
{
while (sdr.Read())
YourWriter.WriteLine(sdr[0].ToString() + sdr[1].ToString() + ",");
}
}
Mind you, in the while loop, you can write that line to the text file in any format you see fit with the column data from the SqlDataReader.
Keeping your original approach, here is a quick win:
Instead of using String as a temporary buffer, use StringBuilder. That will allow you to use the function .append(String) for concatenations, instead of using the operator +=.
The operator += is specially inefficient, so if you place it on a loop and it is repeated (potentially) millions of times, the performance will be affected.
The .append(String) method won't destroy the original object, so it's faster
Using the response object without a response.Close() causes at least in some instances the html of the page writing out the data to be written to the file. If you use Response.Close() the connection can be closed prematurely and cause an error producing the file.
It is recommended to use the HttpApplication.CompleteRequest() however this appears to always cause the html to be written to the end of the file.
I have tried the stream in conjunction with the response object and have had success in the development environment. I have not tried it in production yet.
I used .CSV to export data from database by DataReader. in my project i read datareader and create .CSV file manualy. in a loop i read datareader and for every rows i append cell value to result string. for separate columns i use "," and for separate rows i use "\n". finally i saved result string as result.csv.
I suggest this high performance extension. i tested it and quickly export 600,000 rows as .CSV .
I use:
private void SaveData(string path)
{
DataTable tblResult = new DataTable();
using(SqlCommand cm = new SqlCommand("select something", objConnect))
{
tblResult.Load(cm.ExecuteLoad());
}
if (tblResult != null)
{
using(FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write))
{
BinaryFormatter bin = new BinaryFormatter();
bin.Serialize(fs, tblResult);
}
}
}
ease to use, and easy to load, with:
private DataTable LoadData(string path)
{
DataTable t = new DataTable();
using(FileStream fs = new FileStream(path, FileMode.Open, FileAccess.Read))
{
BinaryFormatter bin = new BinaryFormatter();
t = (DataTable)bin.Deserialize(fs);
}
return t;
}
you can use this method also to save a DataSet.
I have a tab delimited txt file with 500K records. I'm using the code below to read data to dataset. With 50K it works fine but 500K it gives "Exception of type 'System.OutOfMemoryException' was thrown."
What is the more efficient way to read large tab delimited data?
Or how to resolve this issue? Please give me an example
public DataSet DataToDataSet(string fullpath, string file)
{
string sql = "SELECT * FROM " + file; // Read all the data
OleDbConnection connection = new OleDbConnection // Connection
("Provider=Microsoft.Jet.OLEDB.4.0;Data Source=" + fullpath + ";"
+ "Extended Properties=\"text;HDR=YES;FMT=Delimited\"");
OleDbDataAdapter ole = new OleDbDataAdapter(sql, connection); // Load the data into the adapter
DataSet dataset = new DataSet(); // To hold the data
ole.Fill(dataset); // Fill the dataset with the data from the adapter
connection.Close(); // Close the connection
connection.Dispose(); // Dispose of the connection
ole.Dispose(); // Get rid of the adapter
return dataset;
}
Use a stream approach with TextFieldParser - this way you will not load the whole file into memory in one go.
You really want to enumerate the source file and process each line at a time. I use the following
public static IEnumerable<string> EnumerateLines(this FileInfo file)
{
using (var stream = File.Open(file.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
using (var reader = new StreamReader(stream))
{
string line;
while ((line = reader.ReadLine()) != null)
{
yield return line;
}
}
}
Then for each line you can split it using tabs and process each line at a time. This keeps memory down really low for the parsing, you only use memory if the application needs it.
Have you tried the TextReader?
using (TextReader tr = File.OpenText(YourFile))
{
string strLine = string.Empty;
string[] arrColumns = null;
while ((strLine = tr.ReadLine()) != null)
{
arrColumns = strLine .Split('\t');
// Start Fill Your DataSet or Whatever you wanna do with your data
}
tr.Close();
}
I found FileHelpers
The FileHelpers are a free and easy to use .NET library to import/export data from fixed length or delimited records in files, strings or streams.
Maybe it can help.
Is converting a file to a byte array the best way to save ANY file format to disk or database var binary column?
So if someone wants to save a .gif or .doc/.docx or .pdf file, can I just convert it to a bytearray UFT8 and save it to the db as a stream of bytes?
Since it's not mentioned what database you mean I'm assuming SQL Server. Below solution works for both 2005 and 2008.
You have to create table with VARBINARY(MAX) as one of the columns. In my example I've created Table Raporty with column RaportPlik being VARBINARY(MAX) column.
Method to put file into database from drive:
public static void databaseFilePut(string varFilePath) {
byte[] file;
using (var stream = new FileStream(varFilePath, FileMode.Open, FileAccess.Read)) {
using (var reader = new BinaryReader(stream)) {
file = reader.ReadBytes((int) stream.Length);
}
}
using (var varConnection = Locale.sqlConnectOneTime(Locale.sqlDataConnectionDetails))
using (var sqlWrite = new SqlCommand("INSERT INTO Raporty (RaportPlik) Values(#File)", varConnection)) {
sqlWrite.Parameters.Add("#File", SqlDbType.VarBinary, file.Length).Value = file;
sqlWrite.ExecuteNonQuery();
}
}
This method is to get file from database and save it on drive:
public static void databaseFileRead(string varID, string varPathToNewLocation) {
using (var varConnection = Locale.sqlConnectOneTime(Locale.sqlDataConnectionDetails))
using (var sqlQuery = new SqlCommand(#"SELECT [RaportPlik] FROM [dbo].[Raporty] WHERE [RaportID] = #varID", varConnection)) {
sqlQuery.Parameters.AddWithValue("#varID", varID);
using (var sqlQueryResult = sqlQuery.ExecuteReader())
if (sqlQueryResult != null) {
sqlQueryResult.Read();
var blob = new Byte[(sqlQueryResult.GetBytes(0, 0, null, 0, int.MaxValue))];
sqlQueryResult.GetBytes(0, 0, blob, 0, blob.Length);
using (var fs = new FileStream(varPathToNewLocation, FileMode.Create, FileAccess.Write))
fs.Write(blob, 0, blob.Length);
}
}
}
This method is to get file from database and put it as MemoryStream:
public static MemoryStream databaseFileRead(string varID) {
MemoryStream memoryStream = new MemoryStream();
using (var varConnection = Locale.sqlConnectOneTime(Locale.sqlDataConnectionDetails))
using (var sqlQuery = new SqlCommand(#"SELECT [RaportPlik] FROM [dbo].[Raporty] WHERE [RaportID] = #varID", varConnection)) {
sqlQuery.Parameters.AddWithValue("#varID", varID);
using (var sqlQueryResult = sqlQuery.ExecuteReader())
if (sqlQueryResult != null) {
sqlQueryResult.Read();
var blob = new Byte[(sqlQueryResult.GetBytes(0, 0, null, 0, int.MaxValue))];
sqlQueryResult.GetBytes(0, 0, blob, 0, blob.Length);
//using (var fs = new MemoryStream(memoryStream, FileMode.Create, FileAccess.Write)) {
memoryStream.Write(blob, 0, blob.Length);
//}
}
}
return memoryStream;
}
This method is to put MemoryStream into database:
public static int databaseFilePut(MemoryStream fileToPut) {
int varID = 0;
byte[] file = fileToPut.ToArray();
const string preparedCommand = #"
INSERT INTO [dbo].[Raporty]
([RaportPlik])
VALUES
(#File)
SELECT [RaportID] FROM [dbo].[Raporty]
WHERE [RaportID] = SCOPE_IDENTITY()
";
using (var varConnection = Locale.sqlConnectOneTime(Locale.sqlDataConnectionDetails))
using (var sqlWrite = new SqlCommand(preparedCommand, varConnection)) {
sqlWrite.Parameters.Add("#File", SqlDbType.VarBinary, file.Length).Value = file;
using (var sqlWriteQuery = sqlWrite.ExecuteReader())
while (sqlWriteQuery != null && sqlWriteQuery.Read()) {
varID = sqlWriteQuery["RaportID"] is int ? (int) sqlWriteQuery["RaportID"] : 0;
}
}
return varID;
}
While you can store files in this fashion, it has significant tradeoffs:
Most DBs are not optimized for giant quantities of binary data, and query performance often degrades dramatically as the table bloats, even with indexes. (SQL Server 2008, with the FILESTREAM column type, is the exception to the rule.)
DB backup/replication becomes extremely slow.
It's a lot easier to handle a corrupted drive with 2 million images -- just replace the disk on the RAID -- than a DB table that becomes corrupted.
If you accidentally delete a dozen images on a filesystem, your operations guys can replace them pretty easily from a backup, and since the table index is tiny by comparison, it can be restored quickly. If you accidentally delete a dozen images in a giant database table, you have a long and painful wait to restore the DB from backup, paralyzing your entire system in the meantime.
These are just some of the drawbacks I can come up with off the top of my head. For tiny projects it may be worth storing files in this fashion, but if you're designing enterprise-grade software I would strongly recommend against it.
It really depends on the database server.
For example, SQL Server 2008 supports a FILESTREAM datatype for exactly this situation.
Other than that, if you use a MemoryStream, it has a ToArray() method that will convert to a byte[] - this can be used for populating a varbinary field..
I'll describe the way I've stored files, in SQL Server and Oracle. It largely depends on how you are getting the file, in the first place, as to how you will get its contents, and it depends on which database you are using for the content in which you will store it for how you will store it. These are 2 separate database examples with 2 separate methods of getting the file that I used.
SQL Server
Short answer: I used a base64 byte string I converted to a byte[] and store in a varbinary(max) field.
Long answer:
Say you're uploading via a website, so you're using an <input id="myFileControl" type="file" /> control, or React DropZone. To get the file, you're doing something like var myFile = document.getElementById("myFileControl")[0]; or myFile = this.state.files[0];.
From there, I'd get the base64 string using code here: Convert input=file to byte array (use function UploadFile2).
Then I'd get that string, the file name (myFile.name) and type (myFile.type) into a JSON object:
var myJSONObj = {
file: base64string,
name: myFile.name,
type: myFile.type,
}
and post the file to an MVC server backend using XMLHttpRequest, specifying a Content-Type of application/json: xhr.send(JSON.stringify(myJSONObj);. You have to build a ViewModel to bind it with:
public class MyModel
{
public string file { get; set; }
public string title { get; set; }
public string type { get; set; }
}
and specify [FromBody]MyModel myModelObj as the passed in parameter:
[System.Web.Http.HttpPost] // required to spell it out like this if using ApiController, or it will default to System.Mvc.Http.HttpPost
public virtual ActionResult Post([FromBody]MyModel myModelObj)
Then you can add this into that function and save it using Entity Framework:
MY_ATTACHMENT_TABLE_MODEL tblAtchm = new MY_ATTACHMENT_TABLE_MODEL();
tblAtchm.Name = myModelObj.name;
tblAtchm.Type = myModelObj.type;
tblAtchm.File = System.Convert.FromBase64String(myModelObj.file);
EntityFrameworkContextName ef = new EntityFrameworkContextName();
ef.MY_ATTACHMENT_TABLE_MODEL.Add(tblAtchm);
ef.SaveChanges();
tblAtchm.File = System.Convert.FromBase64String(myModelObj.file); being the operative line.
You would need a model to represent the database table:
public class MY_ATTACHMENT_TABLE_MODEL
{
[Key]
public byte[] File { get; set; } // notice this change
public string Name { get; set; }
public string Type { get; set; }
}
This will save the data into a varbinary(max) field as a byte[]. Name and Type were nvarchar(250) and nvarchar(10), respectively. You could include size by adding it to your table as an int column & MY_ATTACHMENT_TABLE_MODEL as public int Size { get; set;}, and add in the line tblAtchm.Size = System.Convert.FromBase64String(myModelObj.file).Length; above.
Oracle
Short answer: Convert it to a byte[], assign it to an OracleParameter, add it to your OracleCommand, and update your table's BLOB field using a reference to the parameter's ParameterName value: :BlobParameter
Long answer:
When I did this for Oracle, I was using an OpenFileDialog and I retrieved and sent the bytes/file information this way:
byte[] array;
OracleParameter param = new OracleParameter();
Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
dlg.Filter = "Image Files (*.jpg, *.jpeg, *.jpe)|*.jpg;*.jpeg;*.jpe|Document Files (*.doc, *.docx, *.pdf)|*.doc;*.docx;*.pdf"
if (dlg.ShowDialog().Value == true)
{
string fileName = dlg.FileName;
using (FileStream fs = File.OpenRead(fileName)
{
array = new byte[fs.Length];
using (BinaryReader binReader = new BinaryReader(fs))
{
array = binReader.ReadBytes((int)fs.Length);
}
// Create an OracleParameter to transmit the Blob
param.OracleDbType = OracleDbType.Blob;
param.ParameterName = "BlobParameter";
param.Value = array; // <-- file bytes are here
}
fileName = fileName.Split('\\')[fileName.Split('\\').Length-1]; // gets last segment of the whole path to just get the name
string fileType = fileName.Split('.')[1];
if (fileType == "doc" || fileType == "docx" || fileType == "pdf")
fileType = "application\\" + fileType;
else
fileType = "image\\" + fileType;
// SQL string containing reference to BlobParameter named above
string sql = String.Format("INSERT INTO YOUR_TABLE (FILE_NAME, FILE_TYPE, FILE_SIZE, FILE_CONTENTS, LAST_MODIFIED) VALUES ('{0}','{1}',{2},:BlobParamerter, SYSDATE)", fileName, fileType, array.Length);
// Do Oracle Update
RunCommand(sql, param);
}
And inside the Oracle update, done with ADO:
public void RunCommand(string strSQL, OracleParameter param)
{
OracleConnection oraConn = null;
OracleCommand oraCmd = null;
try
{
string connString = GetConnString();
oraConn = OracleConnection(connString);
using (oraConn)
{
if (OraConnection.State == ConnectionState.Open)
OraConnection.Close();
OraConnection.Open();
oraCmd = new OracleCommand(strSQL, oraConnection);
// Add your OracleParameter
if (param != null)
OraCommand.Parameters.Add(param);
// Execute the command
OraCommand.ExecuteNonQuery();
}
}
catch (OracleException err)
{
// handle exception
}
finally
{
OraConnction.Close();
}
}
private string GetConnString()
{
string host = System.Configuration.ConfigurationManager.AppSettings["host"].ToString();
string port = System.Configuration.ConfigurationManager.AppSettings["port"].ToString();
string serviceName = System.Configuration.ConfigurationManager.AppSettings["svcName"].ToString();
string schemaName = System.Configuration.ConfigurationManager.AppSettings["schemaName"].ToString();
string pword = System.Configuration.ConfigurationManager.AppSettings["pword"].ToString(); // hopefully encrypted
if (String.IsNullOrEmpty(host) || String.IsNullOrEmpty(port) || String.IsNullOrEmpty(serviceName) || String.IsNullOrEmpty(schemaName) || String.IsNullOrEmpty(pword))
{
return "Missing Param";
}
else
{
pword = decodePassword(pword); // decrypt here
return String.Format(
"Data Source=(DESCRIPTION =(ADDRESS = ( PROTOCOL = TCP)(HOST = {2})(PORT = {3}))(CONNECT_DATA =(SID = {4})));User Id={0};Password={1};",
user,
pword,
host,
port,
serviceName
);
}
}
And the datatype for the FILE_CONTENTS column was BLOB, the FILE_SIZE was NUMBER(10,0), LAST_MODIFIED was DATE, and the rest were NVARCHAR2(250).
What database are you using? normally you don't save files to a database but i think sql 2008 has support for it...
A file is binary data hence UTF 8 does not matter here..
UTF 8 matters when you try to convert a string to a byte array... not a file to byte array.
Confirming I was able to use the answer posted by MadBoy and edited by Otiel on both MS SQL Server 2012 and 2014 in addition to the versions previously listed using varbinary(MAX) columns.
If you are wondering why you cannot "Filestream" (noted in a separate answer) as a datatype in the SQL Server table designer or why you cannot set a column's datatype to "Filestream" using T-SQL, it is because FILESTREAM is a storage attribute of the varbinary(MAX) datatype. It is not a datatype on its own.
See these articles on setting up and enabling FILESTREAM on a database:
https://msdn.microsoft.com/en-us/library/cc645923(v=sql.120).aspx
http://www.kodyaz.com/t-sql/default-filestream-filegroup-is-not-available-in-database.aspx
Once configured, a filestream enabled varbinary(max) column can be added as so:
ALTER TABLE TableName
ADD ColumnName varbinary(max) FILESTREAM NULL
GO
Yes, generally the best way to store a file in a database is to save the byte array in a BLOB column. You will probably want a couple of columns to additionally store the file's metadata such as name, extension, and so on.
It is not always a good idea to store files in the database - for instance, the database size will grow fast if you store files in it. But that all depends on your usage scenario.
Look at this, you may find the answer to your question easier
using:
using System.IO;
using System.Data.SqlClient;
code:
private void Form1_Load(object sender, EventArgs e)
{
display();
}
byte[] filebyte = null;
SqlConnection sqlcon = new SqlConnection("Data Source=.;Initial Catalog=test programin;Integrated Security=True");
SqlCommand sqlcmnd = new SqlCommand();
void display ()
{
DataSet dtset = new DataSet();
SqlDataAdapter sqldta = new SqlDataAdapter("select name from tbl_down_up",sqlcon);
sqldta.Fill(dtset, "tbl_down_up");
dataGridView1.DataSource = dtset;
dataGridView1.DataMember = "tbl_down_up";
dataGridView1.Columns[0].AutoSizeMode = DataGridViewAutoSizeColumnMode.Fill;
}
private void btnup_Click(object sender, EventArgs e)
{
OpenFileDialog ofd = new OpenFileDialog();
ofd.Filter = "all file|*.*";
if(ofd.ShowDialog()==DialogResult.OK)
{
FileStream fs = new FileStream(ofd.FileName, FileMode.Open);
MemoryStream ms = new MemoryStream();
fs.CopyTo(ms);
filebyte = ms.ToArray();
string[] filename = ofd.FileName.Split('\\');
sqlcmnd = new SqlCommand("insert into tbl_down_up(name,data)values(#name,#data)",sqlcon);
sqlcmnd.Parameters.AddWithValue("#name",filename[filename.Length-1]);
sqlcmnd.Parameters.AddWithValue("#data",SqlDbType.VarBinary).Value=filebyte;
sqlcon.Open();
sqlcmnd.ExecuteNonQuery();
sqlcon.Close();
sqlcmnd.Parameters.Clear();
display();
}
}
private void btndown_Click(object sender, EventArgs e)
{
SaveFileDialog sfd = new SaveFileDialog();
string[] filename = dataGridView1[0, dataGridView1.CurrentRow.Index].Value.ToString().Split('.');
sfd.Filter = "type file " + filename[filename.Length - 1] + " |*." + filename[filename.Length - 1];
sfd.FileName = dataGridView1[0, dataGridView1.CurrentRow.Index].Value.ToString();
if (sfd.ShowDialog() == DialogResult.OK)
{
FileStream fs = new FileStream(sfd.FileName, FileMode.Create);
sqlcmnd = new SqlCommand("select data from tbl_down_up where name ='"+dataGridView1[0,dataGridView1.CurrentRow.Index].Value.ToString()+"'", sqlcon);sqlcon.Open();
SqlDataReader dr = sqlcmnd.ExecuteReader();
while (dr.Read())
{
filebyte = (byte[])dr[0];
}
sqlcon.Close();
fs.Write(filebyte, 0, filebyte.Length);
fs.Close();
display();
}
}
private void btndel_Click(object sender, EventArgs e)
{
sqlcmnd = new SqlCommand("delete from tbl_down_up where name =N'" + dataGridView1[0, dataGridView1.CurrentRow.Index].Value.ToString() + "'", sqlcon);
sqlcon.Open();
sqlcmnd.ExecuteNonQuery();
sqlcon.Close();
display();
}
video for form:
form1
image for tbl_down_up sqlserver:
tbl_down_up