I am new MVC framework and trying to figure out on how to Parse the CSV file in such a way that only data from certain columns are saved to the database.
I am able to select the CSV file and upload it via the View and pass it to my controller using the following code as mentioned here Codelocker
public ActionResult UploadMultipleFiles(FileUploadViewModel fileModel)
{
//open file
if (Request.Files.Count == 1)
{
//get file
var postedFile = Request.Files[0];
if (postedFile.ContentLength > 0)
{
//read data from input stream
using (var csvReader = new System.IO.StreamReader(postedFile.InputStream))
{
string inputLine = "";
//read each line
while ((inputLine = csvReader.ReadLine()) != null)
{
//get lines values
string[] values = inputLine.Split(new char[] { ',' });
for (int x = 0; x < values.Length; x++)
{
//do something with each line and split value
}
}
csvReader.Close();
}
}
}
return View("Index");
}
However, I am not really sure as how to only select the required columns in CSV file and store it to the database?
Any suggestions guys?
Solved the problem by creating a DataTable method where by creating required columns and then using StreamReader and looping through the lines and selecting the required columns
[HttpPost]
public ActionResult UploadMultipleFiles()
{
FileUploadService service = new FileUploadService();
var postedFile = Request.Files[0];
StreamReader sr = new StreamReader(postedFile.InputStream);
StringBuilder sb = new StringBuilder();
DataTable dt = CreateTable();
DataRow dr;
string s;
int j = 0;
while (!sr.EndOfStream)
{
while ((s = sr.ReadLine()) != null)
{
//Ignore first row as it consists of headers
if (j > 0)
{
string[] str = s.Split(',');
dr = dt.NewRow();
dr["Postcode"] = str[0].ToString();
dr["Latitude"] = str[2].ToString();
dr["Longitude"] = str[3].ToString();
dr["County"] = str[7].ToString();
dr["District"] = str[8].ToString();
dr["Ward"] = str[9].ToString();
dr["CountryRegion"] = str[12].ToString();
dt.Rows.Add(dr);
}
j++;
}
}
service.SaveFilesDetails(dt);
sr.Close();
return View("Index");
}
Related
I have a program to parse a CSV file from local filesystem to a specified SQL Server table.
Now when i execute the program i get error :
System.IndexOutOfRangeException: 'Cannot find column 1' exception on the line where i the program attempts to populate the datatable.
On closer inspection the error shows that its emanating from row number 3 as shown on this link :
CSV_ERROR
This is how i am reading and saving the CSV file :
static void Main(string[] args)
{
var absPath = #"C:\Users\user\Documents\Projects\MastercardSurveillance\fbc_mc_all_cards.csv";
ProcessFile();
void ProcessFile()
{
string realPath = #"C:\Users\user\Documents\CSV";
string appLog = "CSVERRORS";
var logPath = realPath + Convert.ToString(appLog) + DateTime.Today.ToString("dd -MM-yy") + ".txt";
if (!File.Exists(logPath))
{
File.Create(logPath).Dispose();
}
var dt = GetDATATable();
if (dt == null)
{
return;
}
if (dt.Rows.Count == 0)
{
using (StreamWriter sw = File.AppendText(logPath))
{
sw.WriteLine("No rows imported after reading file " + absPath);
sw.Flush();
sw.Close();
}
return;
}
ClearData();
InsertDATA();
}
DataTable GetDATATable()
{
var FilePath = absPath;
string TableName = "Cards";
string realPath = #"C:\Users\user\Documents\CSV";
string appLog = "CSVERRORS";
var logPath = realPath + Convert.ToString(appLog) + DateTime.Today.ToString("dd -MM-yy") + ".txt";
if (!File.Exists(logPath))
{
File.Create(logPath).Dispose();
}
var dt = new DataTable(TableName);
using (var csvReader = new TextFieldParser(FilePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
if (readFields == null)
{
using (StreamWriter sw = File.AppendText(logPath))
{
sw.WriteLine("Could not read header fields for file " + FilePath);
sw.Flush();
sw.Close();
}
return null;
}
foreach (var dataColumn in readFields.Select(column => new DataColumn(column, typeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
dt.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
if (data == null)
{
using (StreamWriter sw = File.AppendText(logPath))
{
sw.WriteLine(string.Format("Could not read fields on line {0} for file {1}", csvReader.LineNumber, FilePath));
sw.Flush();
sw.Close();
}
continue;
}
var dr = dt.NewRow();
for (var i = 0; i < data.Length; i++)
{
if (!string.IsNullOrEmpty(data[i]))
{
dr[i] = data[i];
}
}
dt.Rows.Add(dr);
}
}
return dt;
}
void ClearData()
{
string SqlSvrConn = #"Server=XXXXXX-5QFK4BL\MSDEVOPS;Database=McardSurveillance;Trusted_Connection=True;MultipleActiveResultSets=true;";
using (var sqlConnection = new SqlConnection(SqlSvrConn))
{
sqlConnection.Open();
// Truncate the live table
using (var sqlCommand = new SqlCommand(_truncateLiveTableCommandText, sqlConnection))
{
sqlCommand.ExecuteNonQuery();
}
}
}
void InsertDATA()
{
string SqlSvrConn = #"Server=XXXXXX-5QFK4BL\MSDEVOPS;Database=McardSurveillance;Trusted_Connection=True;MultipleActiveResultSets=true;";
DataTable table = GetDATATable();
using (var sqlBulkCopy = new SqlBulkCopy(SqlSvrConn))
{
sqlBulkCopy.DestinationTableName = "dbo.Cards";
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
sqlBulkCopy.WriteToServer(table);
}
}
}
How can i identify and possibly exclude the extra data columns being returned from the CSV file?
It appears there is a mismatch between number of columns in datatable and number of columns being read from the CSV file.
Im not sure however how i can account for this with my logic. For now i did not want to switch to using a CSV parse package but rather i need insight on how i can remove the extra column or rather ensure that the splitting takes account of all possible dubious characters.
For clarity i have a copy of the CSV file here :
CSV_FILE
I want to add a new column with checkbox, my data is from a csv file and showed it in a datagridview with this code:
DataTable dtDataSource = new DataTable();
string[] fileContent = File.ReadAllLines(\data.csv);
if (fileContent.Count() > 0)
{
//Create data table columns
dtDataSource.Columns.Add("ID);
dtDataSource.Columns.Add("Data 1");
dtDataSource.Columns.Add("Data 2");
dtDataSource.Columns.Add("Status");
//Add row data dynamically
for (int i = 1; i < fileContent.Count(); i++)
{
string[] rowData = fileContent[i].Split(',');
dtDataSource.Rows.Add(rowData);
}
if (dtDataSource != null)
{
dataGridView1.DataSource = dtDataSource;
}
}
But also I need to validate if checkbox is checked, the column ¨Status¨, their value must be changed by 1 or if is it unchecked the value must be 0 in every row of datagridview.
Example:
ID,Data1,Data2,Status,checkbox
1,aaa,bbb,0,✓
2,ccc,ddd,1,(unchecked)
3,eee,fff,1,(unchecked)
When you click the save button, the csv file should looks like this:
ID,Data1,Data2,Status
1,aaa,bbb,1
2,ccc,ddd,0
3,eee,fff,0
What I should do? Any ideas? CSV file is a little difficult for me.
Thank you!
I resolved this, thanks anyway..
This is the code:
string id;
for (int i = 0; i < dataGridView1.RowCount; i++) {
String path = "\\registros.csv";
List<String> lines = new List<String>();
if (File.Exists(path))
{
using (StreamReader reader = new StreamReader(path))
{
String line;
while ((line = reader.ReadLine()) != null)
{
id = (string)dataGridView1.Rows[i].Cells[2].Value;
if (line.Contains(","))
{
String[] split = line.Split(',');
if (split[1].Equals(id) && (bool)dataGridView1.Rows[i].Cells[0].FormattedValue == true)
{
split[10] = "" + 1;
line = String.Join(",", split);
}
if (split[1].Equals(id) && (bool)dataGridView1.Rows[i].Cells[0].FormattedValue == false)
{
split[10] = "" + 0;
line = String.Join(",", split);
}
}
lines.Add(line);
}
}
using (StreamWriter writer = new StreamWriter(path, false))
{
foreach (String line in lines)
writer.WriteLine(line);
}
}
}
How to load data from a text/dat file into a data table in c#,Here i need to dynamically generate columns based on the data in a text file.
private static System.Data.DataTable SplitColumns()
{
System.Data.DataTable table = new System.Data.DataTable("dataFromFile");
string file="textfile.txt" ==>Get file which you want to split into columns
using (StreamReader sr = new StreamReader(file))
{
string line;
int rowsCount = 0;
while ((line = sr.ReadLine()) != null)
{
string[] data = line.Split(new string[] { "\t"," " }, StringSplitOptions.RemoveEmptyEntries);==>here i'm using the tab delimeter to split the row line
==>in the file to columns data,You can use your own delimeter
if(table.Columns.Count==0)
{
for (int i = 1; i <= data.Length; i++)
{
table.Columns.AddRange(new DataColumn[] { new DataColumn("col"+(i).ToString(), typeof(string)) });==>here i'm dynamically creating the column headers
==> based on the strings in the line
}
}
table.Rows.Add();
for (int i = 0; i < data.Length; i++)
{
if (data[i].Contains(" "))
data[i] = data[i].Replace(" ", "");
if (!data[i].Equals(""))
table.Rows[rowsCount][i] = data[i];
}
rowsCount++;
}
}
return table;
}
I have a query that gets report data via a SqlDataReader and sends the SqlDataReader to a method that exports the content out to a .CSV file; however, the column names are showing up in the .CSV file the way that they appear in the database which is not ideal.
I do not want to alter the query itself (changing the names to have spaces) because this query is called in another location where it maps to an object and spaces would not work. I would prefer not to create a duplicate query because maintenance could be problematic. I also do not want to modify the method that writes out the .CSV as this is a method that is globally used.
Can I modify the column names after I fill the data reader but before I send it to the .CSV method? If so, how?
If I can't do it this way, could I do it if it was a DataTable instead?
Here is the general flow:
public static SqlDataReader RunMasterCSV(Search search)
{
SqlDataReader reader = null;
using (Network network = new Network())
{
using (SqlCommand cmd = new SqlCommand("dbo.MasterReport"))
{
cmd.CommandType = CommandType.StoredProcedure;
//Parameters here...
network.FillSqlReader(cmd, ref reader);
<-- Ideally would like to find a solution here -->
return reader;
}
}
}
public FileInfo CSVFileWriter(SqlDataReader reader)
{
DeleteOldFolders();
FileInfo file = null;
if (reader != null)
{
using (reader)
{
var WriteDirectory = GetExcelOutputDirectory();
double folderToSaveInto = Math.Ceiling((double)DateTime.Now.Hour / Folder_Age_Limit.TotalHours);
string uploadFolder = GetExcelOutputDirectory() + "\\" + DateTime.Now.ToString("ddMMyyyy") + "_" + folderToSaveInto.ToString();
//Add directory for today if one does not exist
if (!Directory.Exists(uploadFolder))
Directory.CreateDirectory(uploadFolder);
//Generate random GUID fileName
file = new FileInfo(uploadFolder + "\\" + Guid.NewGuid().ToString() + ".csv");
if (file.Exists)
file.Delete();
using (file.Create()) { /*kill the file stream immediately*/};
StringBuilder sb = new StringBuilder();
if (reader.Read())
{
//write the column names
for (int i = 0; i < reader.FieldCount; i++)
{
AppendValue(sb, reader.GetName(i), (i == reader.FieldCount - 1));
}
//write the column names
for (int i = 0; i < reader.FieldCount; i++)
{
AppendValue(sb, reader[i] == DBNull.Value ? "" : reader[i].ToString(), (i == reader.FieldCount - 1));
}
int rowcounter = 1;
while (reader.Read())
{
for (int i = 0; i < reader.FieldCount; i++)
{
AppendValue(sb, reader[i] == DBNull.Value ? "" : reader[i].ToString(), (i == reader.FieldCount - 1));
}
rowcounter++;
if (rowcounter == MaxRowChunk)
{
using (var sw = file.AppendText())
{
sw.Write(sb.ToString());
sw.Close();
sw.Dispose();
}
sb = new StringBuilder();
rowcounter = 0;
}
}
if (sb.Length > 0)
{
//write the last bit
using (var sw = file.AppendText())
{
sw.Write(sb.ToString());
sw.Close();
sw.Dispose();
sb = new StringBuilder();
}
}
}
}
}
return file;
}
I would try a refactoring of your CSVFileWriter.
First you should add a delegate declaration
public delegate string onColumnRename(string);
Then create an overload of your CSVFileWriter where you pass the delegate together with the reader
public FileInfo CSVFileWriter(SqlDataReader reader, onColumnRename renamer)
{
// Move here all the code of the old CSVFileWriter
.....
}
Move the code of the previous CSVFileWriter to the new method and, from the old one call the new one
public FileInfo CSVFileWriter(SqlDataReader reader)
{
// Pass null for the delegate to the new version of CSVFileWriter....
return this.CSVFileWriter(reader, null)
}
This will keep existing clients of the old method happy. For them nothing has changed.....
Inside the new version of CSVFileWriter you change the code that prepare the column names
for (int i = 0; i < reader.FieldCount; i++)
{
string colName = (renamer != null ? renamer(reader.GetName(i))
: reader.GetName(i))
AppendValue(sb, colName, (i == reader.FieldCount - 1));
}
Now it is just a matter to create the renamer function that translates your column names
private string myColumnRenamer(string columnName)
{
if(columnName == "yourNameWithoutSpaces")
return "your Name with Spaces";
else
return text;
}
This could be optimized with a static dictionary to remove the list of ifs
At this point your could call the new CSVFileWriter passing your function
FileInfo fi = CSVFileWrite(reader, myColumnRenamer);
Hi all, I have CSV files which are in this format:
**CSV Format1**
||OrderGUID||OrderItemID||Qty||SKUID||TrackingNumber||TotalWeight||DateShipped||DateDelivered||ShippingStatusId||OrderShippingAddressId
||5 ||3 ||2 ||12312||aasdasd ||24 ||2012-12-2010|| || 10025 ||10028
||5 ||4 ||3 ||113123||adadasdasd ||22 ||2012-12-2012|| ||10026 ||10028
**CSV Format2**
||"OrderGUID"||"OrderItemID"||"Qty"||"SKUID"||"TrackingNumber"||"TotalWeight"||"DateShipped"||"DateDelivered"||"ShippingStatusId"||"OrderShippingAddressId"||
||"5" ||"3" ||"2" ||"12312"||"aasdasd" ||"24" ||"2012-12-2010"||"" || "10025" ||"10028"||
||"5" ||"4" ||"3" ||"113123"||"adadasdasd" ||"22" ||"2012-12-2012"|| "2012-12-2010" ||"10026" ||"10028"||
I have to read these files without saving them on the server. Can anyone help me? How can I read this files and insert in my db? How can I trim the special characters from the files?
This is what I am trying to do for the file upload:
[AcceptVerbs(HttpVerbs.Post)]
public ActionResult ImportTrackingNumber(FormCollection form,HttpPostedFileBase UploadedFile,TrackingNumbersModel Trackingnumbers)
{
if (UploadedFile != null)
{
var allowedExtensions = new[] {".xlsx", ".csv"};
if (UploadedFile.ContentLength > 0)
{
var extension = Path.GetExtension(UploadedFile.FileName);
if (extension == ".xlsx")
{
//Need To code For Excel Files Reading
}
else if (extension == ".csv")
{
//string filename = Path.GetFileName(UploadedFile.PostedFile.InputStream);
StreamReader csvreader = new StreamReader(UploadedFile.FileName);
DataTable dt;
}
}
}
return View();
}
Just an example on how you can read the uploaded file without saving it on the server:
// Use the InputStream to get the actual stream sent.
using (StreamReader csvReader = new StreamReader(UploadedFile.InputStream))
{
while (!csvReader.EndOfStream)
{
var line = csvReader.ReadLine();
var values = line.Split(';');
}
}
This is my code:
public static DataTable GetDataTabletFromCSVFile(HttpPostedFileBase file)
{
DataTable csvDataTable = new DataTable();
// Read bytes from http input stream
var csvBody = string.Empty;
using (BinaryReader b = new BinaryReader(file.InputStream))
{
byte[] binData = b.ReadBytes(file.ContentLength);
csvBody = Encoding.UTF8.GetString(binData);
}
var memoryStream = new MemoryStream();
var streamWriter = new StreamWriter(memoryStream);
streamWriter.Write(csvBody);
streamWriter.Flush();
memoryStream.Position = 0;
using (TextFieldParser csvReader = new TextFieldParser(memoryStream))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
string[] colFields = csvReader.ReadFields();
foreach (string column in colFields)
{
DataColumn datecolumn = new DataColumn(column);
datecolumn.AllowDBNull = true;
csvDataTable.Columns.Add(datecolumn);
}
while (!csvReader.EndOfData)
{
string[] fieldData = csvReader.ReadFields();
//Making empty value as null
for (int i = 0; i < fieldData.Length; i++)
{
if (fieldData[i] == "")
{
fieldData[i] = null;
}
}
csvDataTable.Rows.Add(fieldData);
}
}
return csvDataTable;
}