I'm trying to set up code to import .CSV files into .NET.
I've tried both Microsoft.Jet.OLEDB.4.0 and Microsoft.ACE.OLEDB.12.0 providers, including modifying the Extended Properties and even modifying corresponding registry keys for each. I have yet to come up with a solution for what I am attempting to do:
I would like to import each field as text, but leave fields longer than 255 characters un-truncated.
What I've found so far is that I can have one or the other, but not both.
If I set the ImportMixedTypes registry value to Majority Type, it leaves 255+ character text fields un-truncated, but converts other fields to unwanted types.
If I set the ImportMixedTypes registry value to Text, it truncates 255+ character text fields, but leaves the other field types as text.
How do I accomplish this using OleDb?
Additional info:
I have a "notes" column, which can contain very lengthy text. I also have a "zip code" column, which contains mixed zip-code formats (5-digit and 9-digit with a dash). Typically, the 5-digit zip-code format is more popular, so the importer thinks that the column should be integer type, leaving the 9-digit zip-codes as null values after import.
Have you considered using something as versatile as the FileHelpers library (http://filehelpers.sourceforge.net/) instead?
Or alternatively if your requirements are no more than you state (read csv file, get string fields), use something really simple such as:
public static class SimpleCsvImport
{
public static IEnumerable<List<string>> Import(string csvFileName)
{
using (var reader = File.OpenText(csvFileName))
{
while (!reader.EndOfStream)
{
var fields = reader.ReadLine().Split(new[] { ',' }, StringSplitOptions.None).Select(f => f.Trim()).ToList();
if (fields.Count > 0)
yield return fields;
}
}
}
}
i have implemented this code to read memo field (Microsoft Access):
private string GetMemoField(string TableName, string FieldName, string IdentityFieldName, string IdentityFieldValue, OleDbConnection conn)
{
string ret = "";
OleDbCommand cmd1 = new OleDbCommand("SELECT " + FieldName + " FROM “ + TableName + “ WHERE " + IdentityFieldName + "=" + IdentityFieldValue, conn);
var reader = cmd1.ExecuteReader(System.Data.CommandBehavior.SequentialAccess); // Create the DataReader that will get the memo field one buffer at a time
if (reader.Read())
{
long numberOfChars = reader.GetChars(/*Field pos*/ 0, 0, null, 0, 0); // Total number of memo field's chars
if (numberOfChars > 0)
{
int bufferSize = 1024;
char[] totalBuffer = new char[64*bufferSize]; // Array to hold memo field content
long dataIndex = 0;
do
{
char[] buffer = new char[bufferSize]; // Buffer to hold single read
long numberOfCharsReaded = reader.GetChars(0, dataIndex, buffer, 0, bufferSize);
if (numberOfCharsReaded == 0)
{
ret = new string(totalBuffer,0, (int)numberOfChars);
break;
}
Array.Copy(buffer, 0, totalBuffer, dataIndex, numberOfCharsReaded); // Add temporary buffer to main buffer
dataIndex += numberOfCharsReaded;
} while (true);
}
}
return ret;
}
Related
I am trying to import value HACKÅS in my sql table(throu SSIS Package), but it's getting inserted as HACKÃ…S.
tried with changing datatype from varchar(max) to nvarchar(max). No Success.
Please suggest.
Below is my code block from SSIS script task..
public void Main()
{
//Declare new aplication
Application importTextFile_app = new Application();
//Create package
Package ImportTextFile_pkg = new Package();
//Get the File_Path from package variable
string File_Path;
File_Path = (string)Dts.Variables["$Package::File_Path"].Value;
//Get the delimiter value from package variable
string Delimiter = (string)Dts.Variables["$Package::Delimiter"].Value;
Delimiter = Delimiter.Replace("\\t", "\t");
char[] delimiters = new char[Delimiter.Length];
delimiters = Delimiter.ToCharArray();
//Get the Oledb destination connection string from package avriable
string Oledb_Connection_String;
Oledb_Connection_String = (string)Dts.Variables["$Package::Oledb_Connection_String"].Value;
//Set the destination table name
string Destination_Table_Name;
Destination_Table_Name = (string)Dts.Variables["$Package::Table_Name"].Value;
//Assign relevant package name and description - given table name for uniqueness to avoid conccurrency issues
ImportTextFile_pkg.Name = Destination_Table_Name;
ImportTextFile_pkg.Description = "Programmatically create an SSIS 2012 package that loads a Flat File Source into OLE DB Destination Using Script Task's C# language";
//Insert the Data Flow Task with appropriate name and some buffer space for processing of file
ImportTextFile_pkg.Executables.Add("STOCK:PipelineTask");
TaskHost taskHost = ImportTextFile_pkg.Executables[0] as TaskHost;
MainPipe dataFlowTask = (MainPipe)taskHost.InnerObject;
taskHost.Name = "Dynamic Data Flow Task";
taskHost.Properties["DefaultBufferMaxRows"].SetValue(taskHost, "1000000");
//Insert the Flat File connection
ConnectionManager connectionManagerFlatFile = ImportTextFile_pkg.Connections.Add("FLATFILE");
//You can change this path depending on where you have stored the flat file
connectionManagerFlatFile.ConnectionString = File_Path;
//Assign name to the flat file connection
connectionManagerFlatFile.Name = "TXT_FlatFile";
//Indicate that the flat file is delimited
connectionManagerFlatFile.Properties["Format"].SetValue(connectionManagerFlatFile, "Delimited");
//Indicate whether the source file has column headings or not - in this case, our sample data has column headings.
connectionManagerFlatFile.Properties["ColumnNamesInFirstDataRow"].SetValue(connectionManagerFlatFile, Convert.ToBoolean(true));
//Indicate that the flat file is text qualified
connectionManagerFlatFile.Properties["TextQualifier"].SetValue(connectionManagerFlatFile, "\"");
//Get native Flat File connection
RuntimeWrapper.IDTSConnectionManagerFlatFile100 connectionFlatFile = connectionManagerFlatFile.InnerObject as RuntimeWrapper.IDTSConnectionManagerFlatFile100;
string line;
//Prepare create table script according to columns in a file
string create_table_script;
Destination_Table_Name = "[" + Destination_Table_Name + "]";
create_table_script = "create table "+Destination_Table_Name+" ( ";
//Determine the number of columns by reading the sample Flat File - line by line.
using (StreamReader file = new StreamReader(File_Path))
{
try
{
while ((line = file.ReadLine()) != null)
{
//char[] delimiters = new char[] { '|' };
string[] parts = line.Split(delimiters, StringSplitOptions.RemoveEmptyEntries);
for (int i = 0; i < parts.Length; i++)
{
RuntimeWrapper.IDTSConnectionManagerFlatFileColumn100 flatFileCol = connectionFlatFile.Columns.Add() as RuntimeWrapper.IDTSConnectionManagerFlatFileColumn100;
create_table_script = create_table_script +" ["+ parts[i] + "] nvarchar(max),";
sS_AssignColumnProperties(flatFileCol, parts[i], Delimiter);
}
//Exit file after reading the first line
break;
}
create_table_script = create_table_script.Remove(create_table_script.Length - 1);
create_table_script = create_table_script + ")";
}
catch (Exception ex)
{
throw ex;
}
finally
{
file.Close();
}
}
OleDbConnection conn = new OleDbConnection(Oledb_Connection_String);
conn.Open();
string commandText = create_table_script;
OleDbCommand cmd = new OleDbCommand(commandText, conn);
cmd.ExecuteNonQuery();
conn.Close();
//Edit the last Flat File column delimiter into NewLine instead of a Comma
connectionFlatFile.Columns[connectionFlatFile.Columns.Count - 1].ColumnDelimiter = Environment.NewLine;
//Insert Flat File source component
IDTSComponentMetaData100 componentSource = dataFlowTask.ComponentMetaDataCollection.New();
componentSource.Name = "FlatFileSource";
componentSource.ComponentClassID = "DTSAdapter.FlatFileSource";
//Insert source design-time instance and initialise component
CManagedComponentWrapper instanceSource = componentSource.Instantiate();
instanceSource.ProvideComponentProperties();
//Set source connection
componentSource.RuntimeConnectionCollection[0].ConnectionManagerID = connectionManagerFlatFile.ID;
componentSource.RuntimeConnectionCollection[0].ConnectionManager = DtsConvert.GetExtendedInterface(connectionManagerFlatFile);
//Reinitialize Flat File source metadata,
instanceSource.AcquireConnections(null);
instanceSource.ReinitializeMetaData();
instanceSource.ReleaseConnections();
//Insert the SQL Server 2008 OLE-DB connection
ConnectionManager connectionManagerOleDb = ImportTextFile_pkg.Connections.Add("OLEDB");
connectionManagerOleDb.ConnectionString = string.Format(Oledb_Connection_String);
connectionManagerOleDb.Name = "OLEDB";
connectionManagerOleDb.Description = "OLEDB Connection";
//Insert OLE-DB destination
IDTSComponentMetaData100 componentDestination = dataFlowTask.ComponentMetaDataCollection.New();
componentDestination.Name = "OLEDBDestination";
componentDestination.Description = "OLEDB Destination for the Flat File data load";
componentDestination.ComponentClassID = "DTSAdapter.OLEDBDestination";
//Insert destination design-time instance and initialise component
CManagedComponentWrapper instanceDestination = componentDestination.Instantiate();
instanceDestination.ProvideComponentProperties();
//Set destination connection
componentDestination.RuntimeConnectionCollection[0].ConnectionManagerID = connectionManagerOleDb.ID;
componentDestination.RuntimeConnectionCollection[0].ConnectionManager = DtsConvert.GetExtendedInterface(connectionManagerOleDb);
//Indicates the name of the database object used to open a rowset
instanceDestination.SetComponentProperty("OpenRowset", Destination_Table_Name);
//Specifies the mode used to open the database
instanceDestination.SetComponentProperty("AccessMode", 3);
//Specifies options to be used with fast load. Applies only if fast load is turned on
instanceDestination.SetComponentProperty("FastLoadOptions", "TABLOCK,CHECK_CONSTRAINTS");
//Indicates whether the values supplied for identity columns will be copied to the destination or not
//In this case, we have set this property to false
instanceDestination.SetComponentProperty("FastLoadKeepIdentity", false);
//Indicates whether the columns containing null willhave null inserted in the destination or not
//In this case, we have opted no to insert nulls
instanceDestination.SetComponentProperty("FastLoadKeepNulls", false);
//Specifies the column code page to use when code page information is unavailable from the data source
//In this case we used the default - 1252
instanceDestination.SetComponentProperty("DefaultCodePage", 1252);
//Specifies when commits are issued during data insertion
//In this case, we have opted for the default size which is set to 2147483647
instanceDestination.SetComponentProperty("FastLoadMaxInsertCommitSize", 2147483647);
//Indicates the number of seconds before a command times out
//In this case, we have opted for the default value of 0 which indicates an infinite time-out
instanceDestination.SetComponentProperty("CommandTimeout", 0);
//Indicates the usage of DefaultCodePage property value when describing the character data
//In this case, we have opted for the default value of false
instanceDestination.SetComponentProperty("AlwaysUseDefaultCodePage", false);
//Connect the Flat File source to the OLE DB Destination component
dataFlowTask.PathCollection.New().AttachPathAndPropagateNotifications(componentSource.OutputCollection[0], componentDestination.InputCollection[0]);
//Get input and virtual input for destination to select and map columns
IDTSInput100 destinationInput = componentDestination.InputCollection[0];
IDTSVirtualInput100 destinationVirtualInput = destinationInput.GetVirtualInput();
IDTSVirtualInputColumnCollection100 destinationVirtualInputColumns = destinationVirtualInput.VirtualInputColumnCollection;
//Reinitialize the metadata, generating exernal columns from flat file columns
instanceDestination.AcquireConnections(null);
instanceDestination.ReinitializeMetaData();
instanceDestination.ReleaseConnections();
//Select and map destination columns
foreach (IDTSVirtualInputColumn100 virtualInputColumn in destinationVirtualInputColumns)
{
// Select column, and retain new input column
IDTSInputColumn100 inputColumn = instanceDestination.SetUsageType(destinationInput.ID, destinationVirtualInput, virtualInputColumn.LineageID, DTSUsageType.UT_READONLY);
// Find external column by name
IDTSExternalMetadataColumn100 externalColumn = destinationInput.ExternalMetadataColumnCollection[inputColumn.Name];
// Map input column to external column
instanceDestination.MapInputColumn(destinationInput.ID, inputColumn.ID, externalColumn.ID);
}
//Execute the package or disable the below code if you intend running the package later
ImportTextFile_pkg.Execute();
//Finally, save the package - in this case, we have opted to save the package into file system
//importTextFile_app.SaveToXml(#"D:\newArticle.dtsx", ImportTextFile_pkg, null);
Dts.TaskResult = (int)ScriptResults.Success;
}
private static void sS_AssignColumnProperties(RuntimeWrapper.IDTSConnectionManagerFlatFileColumn100 flatFileCol, string getColName, string getDelim)
{
//Assign delimiter
flatFileCol.ColumnType = "Delimited";
flatFileCol.ColumnDelimiter = getDelim;
flatFileCol.TextQualified = true;
//Indicate column data type - in this case, all the source columns will be set to String Data Type
flatFileCol.DataType = RuntimeWrapper.DataType.DT_WSTR;
//Indicate column width - in this case, width of all source columns will be set to a length of 100
flatFileCol.ColumnWidth = 4000;
flatFileCol.MaximumWidth = 4000;
//Assign column name
RuntimeWrapper.IDTSName100 columnName = flatFileCol as RuntimeWrapper.IDTSName100;
columnName.Name = getColName.ToString();
}
Got the Solution...
Specified CodePage for flat file connection as below,
connectionManagerFlatFile.Properties["CodePage"].SetValue(connectionManagerFlatFile, 65001);
try this
insert into your_table (column_name) values (N'HACKÅS')
I'm trying to insert Some text in exact location on existing file.
First method I've applied-
Which does not effect anything??
string newData;
string data = System.IO.File.ReadAllText("lal.txt");
int indx = data.IndexOf("1");
if (data.Contains("1"))
{
MessageBox.Show(indx.ToString());
newData=data.Insert(indx+1, "ooooooooooooooooooooooooo");
File.WriteAllText("lal.txt",data);
}
Another Method I'm using --This method completely erases all contents or creates no content ???
string newData;
string data = System.IO.File.ReadAllText("lal.txt");
int indx = data.IndexOf("1");
if (data.Contains("1"))
{
MessageBox.Show(indx.ToString());
newData=data.Insert(indx+1, "ooooooooooooooooooooooooo");
var f = new StreamWriter(File.Create("ll1.txt"));
f.Write(newData);
//data.Insert(indx, "OIasasas");
}
First snippet has error: changed string is in newData variable, but you wrote unchanged string to file.
2nd snippet has another error: you write proper string, but do not close file stream, so changes may not be made.
This should work
string data = System.IO.File.ReadAllText("lal.txt");
int indx = data.IndexOf("1");
if (indx != -1)
{
var newData = data.Insert(indx + 1, "ooooooooooooooooooooooooo");
File.WriteAllText("lal.txt", newData);
}
So i'm in my 1st year of college, C# in Visual Studio is one of six modules.
Basically my problem is, i need to read in a value that's in a .txt file and calculate commission from that value.
The .txt file consists of:
1,Pat Ryan,280
2,Mary Smith,300
3,Tom Lynch,20
The 3rd value on each line is what i need to calculate the commission but i can't wrap my head around getting that value since you can't just pick out a value with the code we are currently using, you need to go through each line to get to the next.
This is what i've done so far. I tried doing the calculations this way:
if (columns [0] < 1000) {commission = column[0] * .05}
But get an error:
"Operator '<' cannot be applied to operands of type 'string[]' and 'int'"
Any help would be greatly appreciated!
static void salesReport()
{
string path = "sales.txt";
FileStream fs = new FileStream(path, FileMode.Open, FileAccess.Read);
StreamReader salesReport = new StreamReader(fs);
string inputText = salesReport.ReadLine();
Console.WriteLine("{0,-15}{1,-30}{2,-20}\n", "Number","Name","Sales");
while (inputText != null)
{
string[] columns = new string [3];
columns = inputText.Split(',');
Console.WriteLine("{0,-15}{1,-30}{2,-10}\n", columns[0], columns[1], columns[2]);
inputText = salesReport.ReadLine();
}
}
You cannot perform a comparison operation between a string and int as specified in your error. You will need to cast the value you get from the text file to int and then do a comparison.
if (Convert.ToInt32(columns[2]) < 1000)
{
commission = Convert.ToInt32(columns[2]) / .05;
}
Looks like you want the 3rd column, I have changed the index to 2.
here is a quick example of trying to parse a file and do what you want. This has a lot of bad practices, such has the way I am concatenating the output string, but you should get the idea.
static void Main(string[] args)
{
using (StreamReader reader = new StreamReader(#"C:\Path\To\File.txt"))
{
string line;
while ((line = reader.ReadLine()) != null)
{
string[] stuff = line.Split(',');
int id = Convert.ToInt32(stuff[0]);
string name = stuff[1];
int val = Convert.ToInt32(stuff[2]);
double commission = (double)val * 0.05;
Console.WriteLine(name + "'s Commission: " + commission.ToString());
}
}
}
Your issue is that you are not evaluating an integer. You are attempting to apply your comparison operator to the string representation after the split operation.
I added a method safeToInt which will prevent pesky exceptions if the string is not an int. Of course, if you want to be aware of those errors, you should just use Int32.TryParse directly and evaluate the boolean result.
I did not change your code to use the method I added for you :-) You should be able to figure that out.
static void salesReport() {
string path = "sales.txt";
FileStream fs = new FileStream(path, FileMode.Open, FileAccess.Read);
StreamReader salesReport = new StreamReader(fs);
string inputText = salesReport.ReadLine();
Console.WriteLine("{0,-15}{1,-30}{2,-20}\n", "Number","Name","Sales");
while (inputText != null) {
string[] columns = new string [3];
columns = inputText.Split(',');
Console.WriteLine("{0,-15}{1,-30}{2,-10}\n", columns[0], columns[1], columns[2]);
inputText = salesReport.ReadLine();
}
}
static int safeToInt(string input, int defaultValue = 0){
int result = 0;
if(Int32.TryParse(input, out result)){
return result;
}
return defaultValue;
}
Try this
if (int.Parse(columns[0]) < 1000) {commission = int.Parse(columns[0]) * .05}
My program currently reads a text file and compares it with the value in a text box and then tells me how many matches, this currently works.
My query is that it is case sensitive. Is there any way to make it so it doesn't matter whether it is in upper or lower case?
This is my code below:
if (!String.IsNullOrEmpty(CustodianEAddress.Text))
{
for (AddressLength1 = 0; AddressLength1 < Length; AddressLength1++)
{
List<string> list1 = new List<string>();
using (StreamReader reader = new StreamReader(FileLocation))
{
string line1;
//max 500
string[] LineArray1 = new string[500];
while ((line1 = reader.ReadLine()) != null)
{
list1.Add(line1); // Add to list.
if (line1.IndexOf(cust1[AddressLength1].ToString()) != -1)
{
count1++;
LineArray1[count1] = line1;
}
}
reader.Close();
using (System.IO.StreamWriter filed =
new System.IO.StreamWriter(FileLocation, true))
{
filed.WriteLine("");
filed.WriteLine("The email address " +
cust1[AddressLength1].ToString() + " was found " + count1 +
" times within the recipient's inbox");
}
string count1a;
count1a = count1.ToString();
}
}
}
else
{
MessageBox.Show("Please Enter an Email Address");
}
So basically, I need to compare the value in cust1[AddressLength1] with any values found in an array which is in the text file.
String.Compare() takes in an optional parameter that let's you specify whether or not the equality check should be case sensitive.
Edited in response to code being posted
Compare and Index of both take in an optional enumeration, StringComparison. If you choose StringComparison.OrdinalIgnoreCase then case will be ignored.
Here's a quick way to compare two strings without checking case:
string a;
string b;
string.Compare(a, b, true);
The true here is passed as the value of the ignoreCase parameter, meaning that upper and lower-case letters will be compared as if they were all the same case.
EDIT:
I've cleaned up your code a bit, and also put in the compare function. I included comments where I changed stuff:
// Not needed: see below. List<string> list1 = new List<string>();
using (StreamReader reader = new StreamReader(FileLocation))
{
string line1;
//max 500
List<string> LineArray1 = new List<string>();
while ((line1 = reader.ReadLine()) != null)
{
// list1.Add(line1); // Add to list.
// By adding to the list, then searching it, you are searching the whole list for every single new line - you're searching through the same elements multiple times.
if (string.Compare(line1, cust1[AddressLength1].ToString(), true) == 0)
{
// You can just use LineArray1.Count for this instead. count1++;
LineArray1.Add(line1);
}
}
// Not needed: using() takes care of this. reader.Close();
using (System.IO.StreamWriter filed =
new System.IO.StreamWriter(FileLocation, true))
{
filed.WriteLine(); // You don't need an empty string for a newline.
filed.WriteLine("The email address " +
cust1[AddressLength1].ToString() + " was found " + LineArray1.Count +
" times within the recipient's inbox");
}
string count1a;
count1a = LineArray1.Count.ToString();
}
The fact you are reading from a file or not it does not matter, when compare
use the static string Comapare function:
public static int Compare(
string strA,
string strB,
bool ignoreCase
)
and pass true as a last parameter.
Hi all i have my database structure as follows
Field Type
FileHeader longblob
BatchHeader longblob
Entry longblob
BtchEntry longblob
FileControl longblob
I will have the data to be inserted is as follows
101 111111111 1111111111104021031A094101
52201 1 1 PPD1 110402110402 1111000020000001
6221110000251 00000000011 1 1 0111000020000001
822000000100111000020000000000000000000000011 111000020000001
52251 1 1 CCD1 110402110402 1111000020000002
6281110000251 00000000011 1 1 0111000020000002
822500000100111000020000000000010000000000001 111000020000002
9000006000001000000060066600012000000000003000000000003
as you can observe there are multiple lines that starts with 5,6 and 8. I would like to save those individually to the corresponding columns of my table. Is it possible to do if so can any mention the best method to do it. If unclear please specify
The code i written is
using (StreamReader srRead = new StreamReader(filePath))
{
while (srRead.Peek() >= 0)
{
strLine = srRead.ReadLine();
if (strLine.StartsWith("1"))
{
strFileHeader = strLine;
}
if (strLine.StartsWith("5"))
{
strBatchHeader = strLine;
}
if (strLine.StartsWith("6"))
{
strEntry = strLine;
}
if (strLine.StartsWith("8"))
{
strBtchcntrl = strLine;
}
if (strLine.StartsWith("9"))
{
strFileCntrl = strLine;
}
}
string strQuery = "insert into tblfiles(FName, FData,FileHeader,BatchHeader,Entry,BtchEntry,FileControl) values (#_FName,#_FData,#_FileHeader,#_BtchHeader,#_EntryDets,#_BtchCntrl,#_FileCntrl)";
MySqlCommand cmd = new MySqlCommand(strQuery);
cmd.Parameters.Add("#_FName", MySqlDbType.VarChar).Value = filename;
cmd.Parameters.Add("#_FData", MySqlDbType.LongBlob).Value = bytes;
cmd.Parameters.Add("#_FileHeader", MySqlDbType.LongBlob).Value = strFileHeader;
cmd.Parameters.Add("#_BtchHeader", MySqlDbType.LongBlob).Value = strBatchHeader;
cmd.Parameters.Add("#_EntryDets", MySqlDbType.LongBlob).Value = strEntry;
cmd.Parameters.Add("#_BtchCntrl", MySqlDbType.LongBlob).Value = strBtchcntrl;
cmd.Parameters.Add("#_FileCntrl", MySqlDbType.LongBlob).Value = strFileCntrl;
InsertUpdateData(cmd);
But this will insert the latest to the DB but i would like to save each and every line as per i stated
No - a column can only store one value per row. You could combine all your batch headers into one blob and store that as a single value, but you would have to be able to split them apart again when your read the data.
Instead - it looks as though:
each file starts with a '1' record and ends with a '9' record
each file contains zero or more batches
each batch starts with a '5' record and ends with an '8' record
each batch contains zero or more entries ('6' records)
If that is all correct, then you need 3 tables that would look something like:
File table:
Field Type
----------- --------
FileID integer # unique file ID - see AUTO_INCREMENT in the MySQL reference
FName varchar
FData longblob
FileHeader longblob # '1' record
FileControl longblob # '9' record
Batch table:
Field Type
----------- --------
FileID integer # references a row in the File table
BatchID integer # unique batch ID
BatchHeader longblob # '5' record
BatchControl longblob # '8' record
BatchEntry table:
Field Type
----------- --------
BatchID integer # references a row in the Batch table
EntryId integer # unique file ID
Entry longblob # '6' record
That should get you started. Good luck.
Why don't you use Stringbuilder and append the required lines to that string builder and write them to the DB instead of using strings. Seperating each column will be a tough one to retrieve the data if you need. So declare a string builder and append the lines to each and every one you required and after all write to the DB
string strFileHeader = string.Empty;
StringBuilder strBatchHeader=new StringBuilder();
StringBuilder strEntry=new StringBuilder();
StringBuilder strBtchcntrl=new StringBuilder();
string strFileCntrl = string.Empty;
using (StreamReader srRead = new StreamReader(filePath))
{
while (srRead.Peek() >= 0)
{
strLine = srRead.ReadLine();
if (strLine.StartsWith("1"))
{
strFileHeader = strLine;
}
if (strLine.StartsWith("5"))
{
strBatchHeader.AppendLine(strLine);
}
if (strLine.StartsWith("6"))
{
strEntry.AppendLine(strLine);
}
if (strLine.StartsWith("8"))
{
strBtchcntrl.AppendLine(strLine);
}
if (strLine.StartsWith("9"))
{
strFileCntrl = strLine;
}
}
string strQuery = "insert into tblfiles(FName, FData,FileHeader,BatchHeader,Entry,BtchEntry,FileControl) values (#_FName,#_FData,#_FileHeader,#_BtchHeader,#_EntryDets,#_BtchCntrl,#_FileCntrl)";
MySqlCommand cmd = new MySqlCommand(strQuery);
cmd.Parameters.Add("#_FName", MySqlDbType.VarChar).Value = filename;
cmd.Parameters.Add("#_FData", MySqlDbType.LongBlob).Value = bytes;
cmd.Parameters.Add("#_FileHeader", MySqlDbType.LongBlob).Value = strFileHeader;
cmd.Parameters.Add("#_BtchHeader", MySqlDbType.LongBlob).Value = strBatchHeader.ToString();
cmd.Parameters.Add("#_EntryDets", MySqlDbType.LongBlob).Value = strEntry.ToString();
cmd.Parameters.Add("#_BtchCntrl", MySqlDbType.LongBlob).Value = strBtchcntrl.ToString();
cmd.Parameters.Add("#_FileCntrl", MySqlDbType.LongBlob).Value = strFileCntrl;
InsertUpdateData(cmd);