I have 2 issues when converting DataTable to excel
The date format is messy, for example my original data is '2016-12-31', in excel it became '31-12-16'
Excel remove trailing zeroes, for example '01' become '1'
Here's the code
MemoryStream result = new MemoryStream();
StreamWriter writer = new StreamWriter(result);
CSVwriter csv = new CSVwriter();
string content = csv.GetFromTable(table);
writer.Write(content);
writer.Flush();
return result;
GetFromTable method
StringBuilder str = new StringBuilder();
StringBuilder rowstr = new StringBuilder();
string Encloser = "\""
foreach (DataRow row in table.Rows) {
rowstr.Length = 0;
foreach (DataColumn col in table.Columns) {
if (col.Ordinal > 0) {
rowstr.Append(",");
}
object obj = row(table.Columns.IndexOf(col));
rowstr.Append(Encloser + ToStr(obj) + Encloser);
}
if (rowstr.Length > 0) {
str.AppendLine(rowstr.ToString());
}
}
return str.ToString();
UPDATE:
str.ToString() result
"Date","Code"
"2016-01-31","01"
"2016-01-31","02"
Related
I have a program to parse a CSV file from local filesystem to a specified SQL Server table.
Now when i execute the program i get error :
System.IndexOutOfRangeException: 'Cannot find column 1' exception on the line where i the program attempts to populate the datatable.
On closer inspection the error shows that its emanating from row number 3 as shown on this link :
CSV_ERROR
This is how i am reading and saving the CSV file :
static void Main(string[] args)
{
var absPath = #"C:\Users\user\Documents\Projects\MastercardSurveillance\fbc_mc_all_cards.csv";
ProcessFile();
void ProcessFile()
{
string realPath = #"C:\Users\user\Documents\CSV";
string appLog = "CSVERRORS";
var logPath = realPath + Convert.ToString(appLog) + DateTime.Today.ToString("dd -MM-yy") + ".txt";
if (!File.Exists(logPath))
{
File.Create(logPath).Dispose();
}
var dt = GetDATATable();
if (dt == null)
{
return;
}
if (dt.Rows.Count == 0)
{
using (StreamWriter sw = File.AppendText(logPath))
{
sw.WriteLine("No rows imported after reading file " + absPath);
sw.Flush();
sw.Close();
}
return;
}
ClearData();
InsertDATA();
}
DataTable GetDATATable()
{
var FilePath = absPath;
string TableName = "Cards";
string realPath = #"C:\Users\user\Documents\CSV";
string appLog = "CSVERRORS";
var logPath = realPath + Convert.ToString(appLog) + DateTime.Today.ToString("dd -MM-yy") + ".txt";
if (!File.Exists(logPath))
{
File.Create(logPath).Dispose();
}
var dt = new DataTable(TableName);
using (var csvReader = new TextFieldParser(FilePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
if (readFields == null)
{
using (StreamWriter sw = File.AppendText(logPath))
{
sw.WriteLine("Could not read header fields for file " + FilePath);
sw.Flush();
sw.Close();
}
return null;
}
foreach (var dataColumn in readFields.Select(column => new DataColumn(column, typeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
dt.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
if (data == null)
{
using (StreamWriter sw = File.AppendText(logPath))
{
sw.WriteLine(string.Format("Could not read fields on line {0} for file {1}", csvReader.LineNumber, FilePath));
sw.Flush();
sw.Close();
}
continue;
}
var dr = dt.NewRow();
for (var i = 0; i < data.Length; i++)
{
if (!string.IsNullOrEmpty(data[i]))
{
dr[i] = data[i];
}
}
dt.Rows.Add(dr);
}
}
return dt;
}
void ClearData()
{
string SqlSvrConn = #"Server=XXXXXX-5QFK4BL\MSDEVOPS;Database=McardSurveillance;Trusted_Connection=True;MultipleActiveResultSets=true;";
using (var sqlConnection = new SqlConnection(SqlSvrConn))
{
sqlConnection.Open();
// Truncate the live table
using (var sqlCommand = new SqlCommand(_truncateLiveTableCommandText, sqlConnection))
{
sqlCommand.ExecuteNonQuery();
}
}
}
void InsertDATA()
{
string SqlSvrConn = #"Server=XXXXXX-5QFK4BL\MSDEVOPS;Database=McardSurveillance;Trusted_Connection=True;MultipleActiveResultSets=true;";
DataTable table = GetDATATable();
using (var sqlBulkCopy = new SqlBulkCopy(SqlSvrConn))
{
sqlBulkCopy.DestinationTableName = "dbo.Cards";
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
sqlBulkCopy.WriteToServer(table);
}
}
}
How can i identify and possibly exclude the extra data columns being returned from the CSV file?
It appears there is a mismatch between number of columns in datatable and number of columns being read from the CSV file.
Im not sure however how i can account for this with my logic. For now i did not want to switch to using a CSV parse package but rather i need insight on how i can remove the extra column or rather ensure that the splitting takes account of all possible dubious characters.
For clarity i have a copy of the CSV file here :
CSV_FILE
I have a csv file in the same project with a Build Action of "Embedded Resource". When I try to create a stream using this file I get the following error:
system.io.stream.null.readtimeout threw an exception of type system.invalidoperationexception
When I let it run I catch an exception that says:
Value cannot be null.\r\nParameter name: stream
I am not sure what is causing this? Is it possible that it's not picking up my file for some reason and therefore nothing gets passed into the stream?
internal static void InsertPropertyDefinitions()
{
DataTable csvData = null;
string[] columnNames = null;
int rowCount = 0;
string[] streamDataValues = null;
try {
using (Stream stream = System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream(
"My csv file path"))
{
StreamReader streamReader = new StreamReader(stream);
while (!streamReader.EndOfStream)
{
String streamRowData = streamReader.ReadLine().Trim();
if (streamRowData.Length > 0)
{
streamDataValues = streamRowData.Split(',');
if (rowCount == 0)
{
rowCount = 1;
columnNames = streamRowData.Split(',');
csvData = new DataTable();
foreach (string csvColumn in columnNames)
{
DataColumn dataColumn = new DataColumn(csvColumn.ToUpper(), typeof(string));
dataColumn.DefaultValue = string.Empty;
csvData.Columns.Add(dataColumn);
}
}
else
{
DataRow dataRow = csvData.NewRow();
for (int i = 0; i < columnNames.Length; i++)
{
dataRow[columnNames[i]] = streamDataValues[i] == null ? string.Empty : streamDataValues[i].ToString();
}
csvData.Rows.Add(dataRow);
}
}
}
streamReader.Close();
streamReader.Dispose();
foreach (DataRow dataRow in csvData.Rows)
{
string rowValues = string.Empty;
foreach (string csvColumn in columnNames)
{
rowValues += csvColumn + "=" + dataRow[csvColumn].ToString() + "; ";
}
}
}
}
Try by resourceName :
var assembly = Assembly.GetExecutingAssembly();
var resourceName = "MyCompany.MyProduct.MyFile.csv";
using (Stream stream = assembly.GetManifestResourceStream(resourceName))
using (StreamReader reader = new StreamReader(stream))
{
...
}
The resource name is not necessarily equal to the name of the file. You can check using Assembly.GetManifestResourceNames();
Usually VS add namespace to resource name. Then MyList.csv -> NS.MyList.csv
To get names
var assembly = System.Reflection.Assembly.GetExecutingAssembly();
var lst = assembly.GetManifestResourceNames();
Console.WriteLine("Files");
Console.WriteLine(string.Join(", ", lst));
I am trying to convert string to DataTable using the ffollowing method , But It did"nt work
public static DataTable convertStringToDataTable(string data)
{
DataTable dataTable = new DataTable();
bool columnsAdded = false;
foreach (string row in data.Split('\n'))
{
DataRow dataRow = dataTable.NewRow();
foreach (string cell in row.Split(','))
{
string[] keyValue = cell.Split('"');
if (!columnsAdded)
{
DataColumn dataColumn = new DataColumn(keyValue[0]);
dataTable.Columns.Add(dataColumn);
}
dataRow[keyValue[0]] = keyValue[1];
}
columnsAdded = true;
dataTable.Rows.Add(dataRow);
}
return dataTable;
}
Code that contains data string :
StringWriter sw = new StringWriter();
sw.WriteLine("\"NumClient\",\"Raisons Sociale\",\"DateDocument\",\"NumCommandeNAV\",\"Réference\",\"Designation\",\"QteCommandée\",\"QteLivrée\",\"QteAnnulée\",\"Reste à Livrer\",\"Type Disponibilite\",\"DateDisponibilite\"");
var EnTete =
db.AURES_GROS_EnTeteCommande.Where(e => e.NumCommandeNAV != " " && e.NumCommandeNAV != "_")
.OrderBy(x => x.CodeMagasin)
.ThenBy(s => s.NumClient)
.ThenBy(c => c.DateDocument)
.OrderByDescending(x => x.NumCommandeNAV)
.ToList();
foreach (var element in EnTete)
{
string statut = RecuperStatut(element.NumCommandeNAV);
if (statut == "A livrer")
{
Raison = context.Users.First(x => x.No_ == element.NumClient).RaisonSociale;
lignes = db.AURES_GROS_LigneCommande.Where(x => x.NumDocument == element.NumDocument).ToList();
foreach (var elt in lignes)
{
sw.WriteLine(string.Format("\"{0}\",\"{1}\",\"{2}\",\"{3}\",\"{4}\",\"{5}\",\"{6}\",\"{7}\",\"{8}\",\"{9}\",\"{10}\",\"{11}\"",
element.NumClient,
Raison,
element.DateDocument,
element.NumCommandeNAV,
elt.CodeArticle,
elt.Designation,
elt.Quantite,
0,
elt.QteANNULEE,
elt.Quantite,
element.Couleur,
elt.DateDisponibilite
));
}
}
}
DataTable t = convertStringToDataTable(sw.ToString());
Response.ClearContent();
Response.ClearHeaders();
Response.BufferOutput = true;
Response.ContentType = "text/excel";
Response.AddHeader("Content-Disposition", "attachment; filename=Reliquat" + DateTime.Now.ToString("yyyy_MM_dd") + ".csv");
Response.Write(t);
Response.Flush();
Response.Close();
//Response.End();
In this code I want I wnat to export data to Excel file
Any one have solutions
Thanks,
I think you want to create a DataTable from a string. So first split the "rows" and then the "columns". You are adding the DataColumns in the row-loop. You need that only once before the loop. Here is another implementation which handles this and other edge cases you haven't considered yet:
public static DataTable ConvertStringToDataTable(string data)
{
DataTable dataTable = new DataTable();
// extract all lines:
string[] lines = data.Split(new string[] { "\r\n", "\n" }, StringSplitOptions.RemoveEmptyEntries);
string header = lines.FirstOrDefault();
if (header == null)
return dataTable;
// first create the columns:
string[] columns = header.Split(','); // using commas as delimiter is brave ;)
foreach (string col in columns)
dataTable.Columns.Add(col.Trim());
foreach (string line in lines.Skip(1))
{
string[] fields = line.Split(',');
if(fields.Length != dataTable.Columns.Count)
continue; // should not happen
DataRow dataRow = dataTable.Rows.Add();
for (int i = 0; i < fields.Length; i++)
dataRow.SetField(i, fields[i]);
}
return dataTable;
}
You can convert your column foreach to a for loop.
public static DataTable convertStringToDataTable(string data)
{
DataTable dataTable = new DataTable();
bool columnsAdded = false;
foreach (string row in data.Split('\n'))
{
DataRow dataRow = dataTable.NewRow();
string[] cell = row.Split(',');
for (int i = 0; i < cell.Length; i++)
{
string[] keyValue = cell[i].Split('"');
if (!columnsAdded)
{
DataColumn dataColumn = new DataColumn();
dataTable.Columns.Add(dataColumn);
}
dataRow[i] = keyValue[1];
}
columnsAdded = true;
dataTable.Rows.Add(dataRow);
}
return dataTable;
}
However if your split string[] keyValue = cell.Split('"'); is not returning what you are expecting you may need to investigate further.
Hi all, I have CSV files which are in this format:
**CSV Format1**
||OrderGUID||OrderItemID||Qty||SKUID||TrackingNumber||TotalWeight||DateShipped||DateDelivered||ShippingStatusId||OrderShippingAddressId
||5 ||3 ||2 ||12312||aasdasd ||24 ||2012-12-2010|| || 10025 ||10028
||5 ||4 ||3 ||113123||adadasdasd ||22 ||2012-12-2012|| ||10026 ||10028
**CSV Format2**
||"OrderGUID"||"OrderItemID"||"Qty"||"SKUID"||"TrackingNumber"||"TotalWeight"||"DateShipped"||"DateDelivered"||"ShippingStatusId"||"OrderShippingAddressId"||
||"5" ||"3" ||"2" ||"12312"||"aasdasd" ||"24" ||"2012-12-2010"||"" || "10025" ||"10028"||
||"5" ||"4" ||"3" ||"113123"||"adadasdasd" ||"22" ||"2012-12-2012"|| "2012-12-2010" ||"10026" ||"10028"||
I have to read these files without saving them on the server. Can anyone help me? How can I read this files and insert in my db? How can I trim the special characters from the files?
This is what I am trying to do for the file upload:
[AcceptVerbs(HttpVerbs.Post)]
public ActionResult ImportTrackingNumber(FormCollection form,HttpPostedFileBase UploadedFile,TrackingNumbersModel Trackingnumbers)
{
if (UploadedFile != null)
{
var allowedExtensions = new[] {".xlsx", ".csv"};
if (UploadedFile.ContentLength > 0)
{
var extension = Path.GetExtension(UploadedFile.FileName);
if (extension == ".xlsx")
{
//Need To code For Excel Files Reading
}
else if (extension == ".csv")
{
//string filename = Path.GetFileName(UploadedFile.PostedFile.InputStream);
StreamReader csvreader = new StreamReader(UploadedFile.FileName);
DataTable dt;
}
}
}
return View();
}
Just an example on how you can read the uploaded file without saving it on the server:
// Use the InputStream to get the actual stream sent.
using (StreamReader csvReader = new StreamReader(UploadedFile.InputStream))
{
while (!csvReader.EndOfStream)
{
var line = csvReader.ReadLine();
var values = line.Split(';');
}
}
This is my code:
public static DataTable GetDataTabletFromCSVFile(HttpPostedFileBase file)
{
DataTable csvDataTable = new DataTable();
// Read bytes from http input stream
var csvBody = string.Empty;
using (BinaryReader b = new BinaryReader(file.InputStream))
{
byte[] binData = b.ReadBytes(file.ContentLength);
csvBody = Encoding.UTF8.GetString(binData);
}
var memoryStream = new MemoryStream();
var streamWriter = new StreamWriter(memoryStream);
streamWriter.Write(csvBody);
streamWriter.Flush();
memoryStream.Position = 0;
using (TextFieldParser csvReader = new TextFieldParser(memoryStream))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
string[] colFields = csvReader.ReadFields();
foreach (string column in colFields)
{
DataColumn datecolumn = new DataColumn(column);
datecolumn.AllowDBNull = true;
csvDataTable.Columns.Add(datecolumn);
}
while (!csvReader.EndOfData)
{
string[] fieldData = csvReader.ReadFields();
//Making empty value as null
for (int i = 0; i < fieldData.Length; i++)
{
if (fieldData[i] == "")
{
fieldData[i] = null;
}
}
csvDataTable.Rows.Add(fieldData);
}
}
return csvDataTable;
}
I need to read from a CSV/Tab delimited file and write to such a file as well from .net.
The difficulty is that I don't know the structure of each file and need to write the cvs/tab file to a datatable, which the FileHelpers library doesn't seem to support.
I've already written it for Excel using OLEDB, but can't really see a way to write a tab file for this, so will go back to a library.
Can anyone help with suggestions?
.NET comes with a CSV/tab delminited file parser called the TextFieldParser class.
http://msdn.microsoft.com/en-us/library/microsoft.visualbasic.fileio.textfieldparser.aspx
It supports the full RFC for CSV files and really good error reporting.
I used this CsvReader, it is really great and well configurable. It behaves well with all kinds of escaping for strings and separators. The escaping in other quick and dirty implementations were poor, but this lib is really great at reading. With a few additional codelines you can also add a cache if you need to.
Writing is not supported but it rather trivial to implement yourself. Or inspire yourself from this code.
Simple example with CsvHelper
using (TextWriter writer = new StreamWriter(filePath)
{
var csvWriter = new CsvWriter(writer);
csvWriter.Configuration.Delimiter = "\t";
csvWriter.Configuration.Encoding = Encoding.UTF8;
csvWriter.WriteRecords(exportRecords);
}
Here are a couple CSV reader implementations:
http://www.codeproject.com/KB/database/CsvReader.aspx
http://www.heikniemi.fi/jhlib/ (just one part of the library; includes a CSV writer too)
I doubt there is a standard way to convert CSV to DataTable or database 'automatically', you'll have to write code to do that. How to do that is a separate question.
You'll create your datatable in code, and (presuming a header row) can create columns based on your first line in the file. After that, it will simply be a matter of reading the file and creating new rows based on the data therein.
You could use something like this:
DataTable Tbl = new DataTable();
using(StreamReader sr = new StreamReader(path))
{
int count = 0;
string headerRow = sr.Read();
string[] headers = headerRow.split("\t") //Or ","
foreach(string h in headers)
{
DataColumn dc = new DataColumn(h);
Tbl.Columns.Add(dc);
count++;
}
while(sr.Peek())
{
string data = sr.Read();
string[] cells = data.Split("\t")
DataRow row = new DataRow();
foreach(string c in cells)
{
row.Columns.Add(c);
}
Tbl.Rows.Add(row);
}
}
The above code has not been compiled, so it may have some errors, but it should get you on the right track.
You can read and write csv file..
This may be helpful for you.
pass split char to this parameter "serparationChar"
Example : -
private DataTable dataTable = null;
private bool IsHeader = true;
private string headerLine = string.Empty;
private List<string> AllLines = new List<string>();
private StringBuilder sb = new StringBuilder();
private char seprateChar = ',';
public DataTable ReadCSV(string path, bool IsReadHeader, char serparationChar)
{
seprateChar = serparationChar;
IsHeader = IsReadHeader;
using (StreamReader sr = new StreamReader(path,Encoding.Default))
{
while (!sr.EndOfStream)
{
AllLines.Add( sr.ReadLine());
}
createTemplate(AllLines);
}
return dataTable;
}
public void WriteCSV(string path,DataTable dtable,char serparationChar)
{
AllLines = new List<string>();
seprateChar = serparationChar;
List<string> StableHeadrs = new List<string>();
int colCount = 0;
using (StreamWriter sw = new StreamWriter(path))
{
foreach (DataColumn col in dtable.Columns)
{
sb.Append(col.ColumnName);
if(dataTable.Columns.Count-1 > colCount)
sb.Append(seprateChar);
colCount++;
}
AllLines.Add(sb.ToString());
for (int i = 0; i < dtable.Rows.Count; i++)
{
sb.Clear();
for (int j = 0; j < dtable.Columns.Count; j++)
{
sb.Append(Convert.ToString(dtable.Rows[i][j]));
if (dataTable.Columns.Count - 1 > j)
sb.Append(seprateChar);
}
AllLines.Add(sb.ToString());
}
foreach (string dataline in AllLines)
{
sw.WriteLine(dataline);
}
}
}
private DataTable createTemplate(List<string> lines)
{
List<string> headers = new List<string>();
dataTable = new DataTable();
if (lines.Count > 0)
{
string[] argHeaders = null;
for (int i = 0; i < lines.Count; i++)
{
if (i > 0)
{
DataRow newRow = dataTable.NewRow();
// others add to rows
string[] argLines = lines[i].Split(seprateChar);
for (int b = 0; b < argLines.Length; b++)
{
newRow[b] = argLines[b];
}
dataTable.Rows.Add(newRow);
}
else
{
// header add to columns
argHeaders = lines[0].Split(seprateChar);
foreach (string c in argHeaders)
{
DataColumn column = new DataColumn(c, typeof(string));
dataTable.Columns.Add(column);
}
}
}
}
return dataTable;
}
I have found best solution
http://www.codeproject.com/Articles/415732/Reading-and-Writing-CSV-Files-in-Csharp
Just I had to re-write
void ReadTest()
{
// Read sample data from CSV file
using (CsvFileReader reader = new CsvFileReader("ReadTest.csv"))
{
CsvRow row = new CsvRow();
while (reader.ReadRow(row))
{
foreach (string s in row)
{
Console.Write(s);
Console.Write(" ");
}
Console.WriteLine();
row = new CsvRow(); //this line added
}
}
}
Well, there is another library Cinchoo ETL - an open source one, for reading and writing CSV files.
Couple of ways you can read CSV files
Id, Name
1, Tom
2, Mark
This is how you can use this library to read it
using (var reader = new ChoCSVReader("emp.csv").WithFirstLineHeader())
{
foreach (dynamic item in reader)
{
Console.WriteLine(item.Id);
Console.WriteLine(item.Name);
}
}
If you have POCO object defined to match up with CSV file like below
public class Employee
{
public int Id { get; set; }
public string Name { get; set; }
}
You can parse the same file using this POCO class as below
using (var reader = new ChoCSVReader<Employee>("emp.csv").WithFirstLineHeader())
{
foreach (var item in reader)
{
Console.WriteLine(item.Id);
Console.WriteLine(item.Name);
}
}
Please check out articles at CodeProject on how to use it.
Disclaimer: I'm the author of this library