I have a requirement where-in I have to fill dataTable from a sheet of Microsoft excel.
The sheet may have lots of data so the requirement is that when a foreach loop is iterated over the data table which is supposed to hold the data from Microsoft excel sheet should fill the table on demand.
Meaning if there are 1000000 records in the sheet the data table should fetch data in batches of 100 depending on the current position of the foreach current item in the loop.
Any pointer or suggestion will be appreciated.
I would suggest you to use OpenXML to parse and read your excel data from file.
This will also allow you to read out specific sections/regions from your workbook.
You will find more information and also an example at this link:
Microsoft Docs - Parse and read a large spreadsheet document (Open XML SDK)
This will be more efficiently and easier to develop than use the official microsoft office excel interop.
**I am not near a PC with Visual stuido, so this code is untested, and may have syntax errors until I can test it later.
It will still give you the main idea of what needs to be done.
private void ExcelDataPages(int firstRecord, int numberOfRecords)
{
Excel.Application dataApp = new Excel.Application();
Excel.Workbook dataWorkbook = new Excel.Workbook();
int x = 0;
dataWorkbook.DisplayAlerts = false;
dataWorkbook.Visible = false;
dataWorkbook.AutomationSecurity = Microsoft.Office.Core.MsoAutomationSecurity.msoAutomationSecurityLow;
dataWorkbook = dataApp.Open(#"C:\Test\YourWorkbook.xlsx");
try
{
Excel.Worksheet dataSheet = dataWorkbook.Sheet("Name of Sheet");
while (x < numberOfRecords)
{
Range currentRange = dataSheet.Rows[firstRecord + x]; //For all columns in row
foreach (Range r in currentRange.Cells) //currentRange represents all the columns in the row
{
// do what you need to with the Data here.
}
x++;
}
}
catch (Exception ex)
{
//Enter in Error handling
}
dataWorkbook.Close(false); //Depending on how quick you will access the next batch of data, you may not want to close the Workbook, reducing load time each time. This may also mean you need to move the open of the workbook to a higher level in your class, or if this is the main process of the app, make it static, stopping the garbage collector from destroying the connection.
dataApp.Quit();
}
Give the following a try--it uses NuGet package DocumentFormat.OpenXml The code is from Using OpenXmlReader. However, I modified it to add data to a DataTable. Since you're reading data from the same Excel file multiple times, it's faster to open the Excel file once using an instance of SpreadSheetDocument and dispose of it when finished. Since the instance of SpreedSheetDocument needs to be disposed of before your application exits, IDisposable is used.
Where it says "ToDo", you'll need to replace the code that creates the DataTable columns with your own code to create the correct columns for your project.
I tested the code below with an Excel file containing approximately 15,000 rows. When reading 100 rows at a time, the first read took approximately 500 ms - 800 ms, whereas subsequent reads took approximately 100 ms - 400 ms.
Create a class (name: HelperOpenXml)
HelperOpenXml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using DocumentFormat.OpenXml;
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Spreadsheet;
using System.Data;
using System.Diagnostics;
namespace ExcelReadSpecifiedRowsUsingOpenXml
{
public class HelperOpenXml : IDisposable
{
public string Filename { get; private set; } = string.Empty;
public int RowCount { get; private set; } = 0;
private SpreadsheetDocument spreadsheetDocument = null;
private DataTable dt = null;
public HelperOpenXml(string filename)
{
this.Filename = filename;
}
public void Dispose()
{
if (spreadsheetDocument != null)
{
try
{
spreadsheetDocument.Dispose();
dt.Clear();
}
catch(Exception ex)
{
throw ex;
}
}
}
public DataTable GetRowsSax(int startRow, int endRow, bool firstRowIsHeader = false)
{
int startIndex = startRow;
int endIndex = endRow;
if (firstRowIsHeader)
{
//if first row is header, increment by 1
startIndex = startRow + 1;
endIndex = endRow + 1;
}
if (spreadsheetDocument == null)
{
//create new instance
spreadsheetDocument = SpreadsheetDocument.Open(Filename, false);
//create new instance
dt = new DataTable();
//ToDo: replace 'dt.Columns.Add(...)' below with your code to create the DataTable columns
//add columns to DataTable
dt.Columns.Add("A");
dt.Columns.Add("B");
dt.Columns.Add("C");
dt.Columns.Add("D");
dt.Columns.Add("E");
dt.Columns.Add("F");
dt.Columns.Add("G");
dt.Columns.Add("H");
dt.Columns.Add("I");
dt.Columns.Add("J");
dt.Columns.Add("K");
}
else
{
//remove existing data from DataTable
dt.Rows.Clear();
}
WorkbookPart workbookPart = spreadsheetDocument.WorkbookPart;
int numWorkSheetParts = 0;
foreach (WorksheetPart worksheetPart in workbookPart.WorksheetParts)
{
using (OpenXmlReader reader = OpenXmlReader.Create(worksheetPart))
{
int rowIndex = 0;
//use the reader to read the XML
while (reader.Read())
{
if (reader.ElementType == typeof(Row))
{
reader.ReadFirstChild();
List<string> cValues = new List<string>();
int colIndex = 0;
do
{
//only get data from desired rows
if ((rowIndex > 0 && rowIndex >= startIndex && rowIndex <= endIndex) ||
(rowIndex == 0 && !firstRowIsHeader && rowIndex >= startIndex && rowIndex <= endIndex))
{
if (reader.ElementType == typeof(Cell))
{
Cell c = (Cell)reader.LoadCurrentElement();
string cellRef = c.CellReference; //ex: A1, B1, ..., A2, B2
string cellValue = string.Empty;
//string/text data is stored in SharedString
if (c.DataType != null && c.DataType == CellValues.SharedString)
{
SharedStringItem ssi = workbookPart.SharedStringTablePart.SharedStringTable.Elements<SharedStringItem>().ElementAt(int.Parse(c.CellValue.InnerText));
cellValue = ssi.Text.Text;
}
else
{
cellValue = c.CellValue.InnerText;
}
//Debug.WriteLine("{0}: {1} ", c.CellReference, cellValue);
//add value to List which is used to add a row to the DataTable
cValues.Add(cellValue);
}
}
colIndex += 1; //increment
} while (reader.ReadNextSibling());
if (cValues.Count > 0)
{
//if List contains data, use it to add row to DataTable
dt.Rows.Add(cValues.ToArray());
}
rowIndex += 1; //increment
if (rowIndex > endIndex)
{
break; //exit loop
}
}
}
}
numWorkSheetParts += 1; //increment
}
DisplayDataTableData(dt); //display data in DataTable
return dt;
}
private void DisplayDataTableData(DataTable dt)
{
foreach (DataColumn dc in dt.Columns)
{
Debug.WriteLine("colName: " + dc.ColumnName);
}
foreach (DataRow r in dt.Rows)
{
Debug.WriteLine(r[0].ToString() + " " + r[1].ToString());
}
}
}
}
Usage:
private string excelFilename = #"C:\Temp\Test.xlsx";
private HelperOpenXml helperOpenXml = null;
...
private void GetData(int startIndex, int endIndex, bool firstRowIsHeader)
{
helperOpenXml.GetRowsSax(startIndex, endIndex, firstRowIsHeader);
}
Note: Make sure to call Dispose() (ex: helperOpenXml.Dispose();) before your application exits.
Update:
OpenXML stores dates as the number of days since 01 Jan 1900. For dates prior to 01 Jan 1900, they are stored in SharedString. For more info see Reading a date from xlsx using open xml sdk
Here's a code snippet:
Cell c = (Cell)reader.LoadCurrentElement();
...
string cellValue = string.Empty
...
cellValue = c.CellValue.InnerText;
double dateCellValue = 0;
Double.TryParse(cellValue, out dateCellValue);
DateTime dt = DateTime.FromOADate(dateCellValue);
cellValue = dt.ToString("yyyy/MM/dd");
Another simple alternative is this: Take a look at the NUGET package ExcelDataReader, with additional information on
https://github.com/ExcelDataReader/ExcelDataReader
Usage example:
[Fact]
void Test_ExcelDataReader()
{
System.Text.Encoding.RegisterProvider(System.Text.CodePagesEncodingProvider.Instance);
var scriptPath = Path.GetDirectoryName(Util.CurrentQueryPath); // LinqPad script path
var filePath = $#"{scriptPath}\TestExcel.xlsx";
using (var stream = File.Open(filePath, FileMode.Open, FileAccess.Read))
{
// Auto-detect format, supports:
// - Binary Excel files (2.0-2003 format; *.xls)
// - OpenXml Excel files (2007 format; *.xlsx, *.xlsb)
using (var reader = ExcelDataReader.ExcelReaderFactory.CreateReader(stream))
{
var result = reader.AsDataSet();
// The result of each spreadsheet is in result.Tables
var t0 = result.Tables[0];
Assert.True(t0.Rows[0][0].Dump("R0C0").ToString()=="Hello", "Expected 'Hello'");
Assert.True(t0.Rows[0][1].Dump("R0C1").ToString()=="World!", "Expected 'World!'");
} // using
} // using
} // fact
Before you start reading, you need to set and encoding provider as follows:
System.Text.Encoding.RegisterProvider(
System.Text.CodePagesEncodingProvider.Instance);
The cells are addressed the following way:
var t0 = result.Tables[0]; // table 0 is the first worksheet
var cell = t0.Rows[0][0]; // on table t0, read cell row 0 column 0
And you can easily loop through the rows and columns in a for loop as follows:
for (int r = 0; r < t0.Rows.Count; r++)
{
var row = t0.Rows[r];
var columns = row.ItemArray;
for (int c = 0; c < columns.Length; c++)
{
var cell = columns[c];
cell.Dump();
}
}
I use this code with EPPlus DLL, Don't forget to add reference. But should check to match with your requirement.
public DataTable ReadExcelDatatable(bool hasHeader = true)
{
using (var pck = new OfficeOpenXml.ExcelPackage())
{
using (var stream = File.OpenRead(this._fullPath))
{
pck.Load(stream);
}
var ws = pck.Workbook.Worksheets.First();
DataTable tbl = new DataTable();
int i = 1;
foreach (var firstRowCell in ws.Cells[1, 1, 1, ws.Dimension.End.Column])
{
//table head
tbl.Columns.Add(hasHeader ? firstRowCell.Text : string.Format("Column {0}", firstRowCell.Start.Column));
tbl.Columns.Add(_tableHead[i]);
i++;
}
var startRow = hasHeader ? 2 : 1;
for (int rowNum = startRow; rowNum <= ws.Dimension.End.Row; rowNum++)
{
var wsRow = ws.Cells[rowNum, 1, rowNum, ws.Dimension.End.Column];
DataRow row = tbl.Rows.Add();
foreach (var cell in wsRow)
{
row[cell.Start.Column - 1] = cell.Text;
}
}
return tbl;
}
}
I'm going to give you a different answer. If the performance is bad loading a million rows into a DataTable resort to using a Driver to load the data: How to open a huge excel file efficiently
DataSet excelDataSet = new DataSet();
string filePath = #"c:\temp\BigBook.xlsx";
// For .XLSXs we use =Microsoft.ACE.OLEDB.12.0;, for .XLS we'd use Microsoft.Jet.OLEDB.4.0; with "';Extended Properties=\"Excel 8.0;HDR=YES;\"";
string connectionString = "Provider=Microsoft.ACE.OLEDB.12.0;Data Source='" + filePath + "';Extended Properties=\"Excel 12.0;HDR=YES;\"";
using (OleDbConnection conn = new OleDbConnection(connectionString))
{
conn.Open();
OleDbDataAdapter objDA = new System.Data.OleDb.OleDbDataAdapter
("select * from [Sheet1$]", conn);
objDA.Fill(excelDataSet);
//dataGridView1.DataSource = excelDataSet.Tables[0];
}
Next filter the DataSet's DataTable using a DataView. Using a DataView's RowFilter property you can specify subsets of rows based on their column values.
DataView prodView = new DataView(excelDataSet.Tables[0],
"UnitsInStock <= ReorderLevel",
"SupplierID, ProductName",
DataViewRowState.CurrentRows);
Ref: https://www.c-sharpcorner.com/article/dataview-in-C-Sharp/
Or you could use the DataTables' DefaultView RowFilter directly:
excelDataSet.Tables[0].DefaultView.RowFilter = "Amount >= 5000 and Amount <= 5999 and Name = 'StackOverflow'";
Related
I have an excel sheet with three column (Name, Gender, Email, Salary), I want to write a console application using NOPI library to select all male data with highest salary
I use this code to read file from xl
using System;
using System.IO;
using NPOI.HSSF.UserModel;
using NPOI.SS.UserModel;
private static void procExcel(string fileName, string schoolPicDir){
try
{
IWorkbook workbook;
FileStream fs = new FileStream(fileName, FileMode.Open, FileAccess.Read);
if (fileName.IndexOf(".xlsx") > 0)
workbook = new XSSFWorkbook(fs);
else if (fileName.IndexOf(".xls") > 0)
workbook = new HSSFWorkbook(fs);
//First sheet
ISheet sheet = workbook.GetSheetAt(0);
if (sheet != null)
{
int rowCount = sheet.LastRowNum; // This may not be valid row count.
// If first row is table head, i starts from 1
for (int i = 1; i <= rowCount; i++)
{
IRow curRow = sheet.GetRow(i);
// Works for consecutive data. Use continue otherwise
if (curRow == null)
{
// Valid row count
rowCount = i - 1;
break;
}
// Get data from the 4th column (4th cell of each row)
var cellValue = curRow.GetCell(3).StringCellValue.Trim();
Console.WriteLine(cellValue);
}
}
}
catch(Exception e)
{
Console.WriteLine(e.Message);
}
}
I have an .xlsx file stored on my desktop that my C# program reads from, then it loads each worksheet into datatables that my program uses. When a modification is made to a dataTable, I save to the .xlsx file by first loading the datatable back into the worksheet, and then saving the excelPackage with the modified dataTable information.
The problem is, I sometimes need to overwrite a cell with a blank string, and after saving, the previous value is still there.
It will let me update a value to a space (" "), but I want to save the cell as empty, "", or null.
This is loading my dataTables from the .xlsx
using (var pck = new OfficeOpenXml.ExcelPackage())
{
try
{
using (var stream = File.OpenRead(filePath + "/dataSet.xlsx"))
{
pck.Load(stream);
}
for (int i = 1; i < pck.Workbook.Worksheets.Count; i++)
{
var ws = pck.Workbook.Worksheets[i];
DataTable tbl = new DataTable();
foreach (var firstRowCell in ws.Cells[1, 1, 1, ws.Dimension.End.Column])
{
tbl.Columns.Add(true ? firstRowCell.Text : string.Format("Column {0}", firstRowCell.Start.Column));
}
var startRow = true ? 2 : 1;
for (int rowNum = startRow; rowNum <= ws.Dimension.End.Row; rowNum++)
{
var wsRow = ws.Cells[rowNum, 1, rowNum, ws.Dimension.End.Column];
DataRow row = tbl.Rows.Add();
foreach (var cell in wsRow)
{
row[cell.Start.Column - 1] = cell.Text;
}
}
if (ws.Name == "customerDataTable") { _customerDataTable = tbl; }
else if (ws.Name == "vehicleDataTable") { _vehicleDataTable = tbl; }
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
This is saving the dataTable information into worksheet, then to .xlsx
string tableName = "";
if (table == _customerDataTable) { tableName = "customerDataTable"; }
else if (table == _vehicleDataTable) { tableName = "vehicleDataTable"; }
FileInfo file = new FileInfo(filePath + "/dataSet.xlsx");
using (ExcelPackage excelPackage = new ExcelPackage(file))
{
ExcelWorkbook excelWorkBook = excelPackage.Workbook;
ExcelWorksheet excelWorksheet = excelWorkBook.Worksheets[tableName];
excelWorksheet.Cells.LoadFromDataTable(table, true);
excelPackage.Save();
}
I got it to work following VDWWDs post, using the following code:
for (int i = 0; i < table.Rows.Count; i++)
{
for (int j = 0; j < table.Columns.Count; j++)
{
excelWorksheet.Cells[i+2,j+1].Value = table.Rows[i][j];
}
}
//excelWorksheet.Cells.LoadFromDataTable(table, true);
I replaced the LoadFromTable() line with a nested For loop. Although it functionally works, I'm going to be using this method a lot throughout my program, and this solution seems a bit bulky compared to EPPlus LoadFromTable method. I'm thinking there has to be a better way...
I have function that saves all records from SQL table to excel worksheet using EPPlus.
If I export small amount of data everything works fine, but with 200+ columns and 500 000+ rows I get OutOfMemory exception.
I'd like to modify my code in a way to be able to save 50 000 records per file.
Here is my code that works for small data:
private Task SaveAsync(string tableName)
{
return Task.Run(() =>
{
try
{
using (var conn = new SqlConnection(_connectionString))
{
using (var cmd = new SqlCommand(string.Format(DataQuery, tableName), conn))
{
cmd.CommandType = CommandType.Text;
cmd.CommandTimeout = 360;
conn.Open();
using (SqlDataReader sdr = cmd.ExecuteReader())
{
var fileName = string.Format(TargetFile, tableName);
if (File.Exists(fileName))
{
File.Delete(fileName);
}
sdr.Read();
var numberOfRecordsInTable = sdr.GetInt32(0);
sdr.NextResult();
using (ExcelPackage pck = new ExcelPackage(new FileInfo(fileName)))
{
ExcelWorksheet ws = pck.Workbook.Worksheets.Add("Results");
int count = sdr.FieldCount;
int col = 1, row = 1;
for (int i = 0; i < count; i++)
{
ws.SetValue(row, col++, sdr.GetName(i));
}
row++;
col = 1;
while (sdr.Read())
{
for (int i = 0; i < count; i++)
{
var val = sdr.GetValue(i);
ws.SetValue(row, col++, val);
}
row++;
col = 1;
}
//autosize
ws.Cells[ws.Dimension.Address].AutoFitColumns();
//autofiltr
ws.Cells[1, 1, 1, count].AutoFilter = true;
}
}
conn.Close();
}
}
}
catch (Exception e)
{
Debug.WriteLine("Error at: " + Thread.CurrentThread.ManagedThreadId);
Debug.WriteLine(e);
}
});
}
and my modified code that splits records 50 000 per file:
private Task SaveAsync2(string tableName)
{
return Task.Run(() =>
{
try
{
using (var conn = new SqlConnection(_connectionString))
{
using (var cmd = new SqlCommand(string.Format(DataQuery, tableName), conn))
{
cmd.CommandType = CommandType.Text;
cmd.CommandTimeout = 360;
conn.Open();
using (SqlDataReader sdr = cmd.ExecuteReader())
{
var fileName = string.Format(TargetFile, tableName,"");
if (File.Exists(fileName))
{
File.Delete(fileName);
}
sdr.Read();
var max = sdr.GetInt32(0);
int filesCount = 1;
if (max > 50000)
{
fileName = string.Format(TargetFile, tableName, filesCount);
}
sdr.NextResult();
ExcelPackage pck = new ExcelPackage(new FileInfo(fileName));
ExcelWorksheet ws = pck.Workbook.Worksheets.Add("RESULTS");
int count = sdr.FieldCount;
int col = 1, row = 1;
for (int i = 0; i < count; i++)
{
ws.SetValue(row, col++, sdr.GetName(i));
}
row++;
col = 1;
while (sdr.Read())
{
for (int i = 0; i < count; i++)
{
var val = sdr.GetValue(i);
ws.SetValue(row, col++, val);
}
row++;
col = 1;
if (row > 50000)
{
pck.Save();
filesCount++;
fileName = string.Format(TargetFile, tableName, filesCount);
pck = new ExcelPackage(new FileInfo(fileName));
ws = pck.Workbook.Worksheets.Add("RESULTS");
count = sdr.FieldCount;
col = 1;
row = 1;
for (int i = 0; i < count; i++)
{
ws.SetValue(row, col++, sdr.GetName(i));
}
row++;
col = 1;
}
}
//autosize
ws.Cells[ws.Dimension.Address].AutoFitColumns();
//autofiltr
ws.Cells[1, 1, 1, count].AutoFilter = true;
pck.Save();
}
}
conn.Close();
}
}
catch (Exception e)
{
Debug.WriteLine("Error at: " + Thread.CurrentThread.ManagedThreadId);
Debug.WriteLine(e);
}
});
}
basically this works fine, but in first version of my code I was using everything inside using statement, when in second version I'm calling same code twice.
How can I fix my code to remove duplicate code and put everything inside using.
Can I add next set (50 000 records) as new worksheet instead of creating new file?
What would be EPPlus limit when saving data to file? rows x columns? I found information that EPPlus should handle more than million rows, but not so much columns as I have. I thinks that I can export million rows with single column, but for 200+ columns for me 50 000 rows is limit. I'm wondering if there is number (rows x columns) that will be limit to which my export will work fine. I want that export function to be universal, so when I pass datatable with 50 columns it will export for example 100 000 rows per file and for 2 columns it will export half million per file.
I've run up against memory limits with EPPlus in the past, and ended up generating multiple .xlsx files as a workaround (similar to your approach). Another alternative would be to change your compiler settings to target 64-bit only (if you can get by without supporting 32-bit platforms). As I recall, EPPlus is compiled for "Any CPU", so if you can change your code to target "x64" that would likely relax the memory limitations and allow you to generate a single .xlsx file. Targeting x64 likely would have worked in my case, but I didn't think of it until after the fact, so I never had an opportunity to test.
UPDATE:
I just ran a quick test using EPPlus 3.1.3 creating 500,000 rows, 70 columns each. My 32-bit application was able to generate about 119,000 rows before generating an out-of-memory exception. After switching the target to x64, it successfully generated all 500,000 rows, although it took forever. Creating the actual worksheet took just a few minutes, but ExcelPackage.SaveAs() took close to 20 minutes. RAM consumption was quite high as well (roughly 11GB of RAM). The resulting .xlsx is 220MB, which 32-bit Excel is not able to open (out of memory). The bottom line: Targeting x64 is probably not a viable solution; you'd be better off splitting the output into multiple .xlsx files.
I was tempted to delete this answer since it has turned out to be a dead end, but decided to leave it in case it helps someone else avoid this path in the future.
Unfortunately, there is no easy way to merge that much data with Epplus in a single file. Basically, the entire file is loaded into memory when open - its either all or nothing. In theory, you could generate the XML files that XLSX contains (they are zip files renamed) and manually insert it since it would have a smaller memory footprint but that is no small feat.
For you current code, you could always just call .dispose() manually if you want to avoid the using statement. But I understand you wanting to avoid duplicate code. What about something like this (but watch for memory usage when copying all the object data):
const int max = 10;
var loop = 0;
using (var sdr = cmd.ExecuteReader())
{
var fieldcount = sdr.FieldCount;
var getfi = new Func<int, FileInfo>(i =>
{
var fi = new FileInfo(String.Format(#"c:\temp\Multi_Files{0}.xlsx", i));
if (fi.Exists) fi.Delete();
return fi;
});
var savefile = new Action<FileInfo, List<Object[]>>((info, rows) =>
{
using (var pck = new ExcelPackage(info))
{
var wb = pck.Workbook;
var ws = wb.Worksheets.Add("RESULTS");
for (var row = 0; row < rows.Count; row++)
for (var col = 0; col < fieldcount; col++)
ws.SetValue(row + 1, col + 1, rows[row][col]);
pck.Save();
}
});
var rowlist = new List<Object[]>();
while (sdr.Read())
{
var rowdata = new Object[sdr.FieldCount];
sdr.GetValues(rowdata);
rowlist.Add(rowdata);
if (rowlist.Count == max)
{
savefile(getfi(++loop), rowlist);
rowlist.Clear();
}
}
if (rowlist.Count > 0)
savefile(getfi(++loop), rowlist);
}
Since you are creating a new excel file (correct me if I'm wrong), you can simply write an XML file with some specific contents. Excel supports .xml files if they contain the correct stuff.
You can simply create the contents of the XML file in-memory, and afterwards write this contents to an .XML file. You do not need the EPPlus package, therefore you are bypassing the limitations of the EPPlus package.
Of course, you have to figure out manually what you need to write in the .XML file. Of you are going to use formatting and formulas, it might be a complex.
See here:
https://www.google.nl/search?q=excel%2520xml%2520format&oq=excel%2520xml%2520format
https://technet.microsoft.com/en-gb/magazine/2006.01.blogtales.aspx
http://blogs.msdn.com/b/brian_jones/archive/2005/06/27/433152.aspx
simple solution without any tricks (not tested, but the intentions should be clear)
using (var conn = new SqlConnection(_connectionString))
{
int filesCount = 1;
int col = 1, row = 1;
string fileName = String.Empty;
int count;
ExcelPackage pck;
ExcelWorksheet ws;
using (var cmd = new SqlCommand(string.Format(DataQuery, tableName), conn))
{
cmd.CommandType = CommandType.Text;
cmd.CommandTimeout = 360;
conn.Open();
using (SqlDataReader sdr = cmd.ExecuteReader())
{
while (sdr.Read())
{
if (row == 1)
{
fileName = string.Format(TargetFile, tableName, filesCount);
if (File.Exists(fileName))
{
File.Delete(fileName);
}
pck = new ExcelPackage(new FileInfo(fileName));
ws = pck.Workbook.Worksheets.Add("RESULTS");
}
count = sdr.FieldCount;
for (int i = 0; i < count; i++)
{
var val = sdr.GetValue(i);
ws.SetValue(row, col++, val);
}
row++;
col = 1;
if (row >= 50000)
{
ws.Cells[ws.Dimension.Address].AutoFitColumns();
ws.Cells[1, 1, 1, count].AutoFilter = true;
pck.Save();
row = 1;
filesCount+
}
}
}
if (row > 1)
{
ws.Cells[ws.Dimension.Address].AutoFitColumns();
ws.Cells[1, 1, 1, count].AutoFilter = true;
pck.Save();
}
}
}
conn.Close();
I am exporting Sql data to Excel. The code I am using currently is :
DataTable dt = new DataTable();
// Create sql connection string
string conString = "Data Source=DELL\\SQLSERVER1;Trusted_Connection=True;DATABASE=Zelen;CONNECTION RESET=FALSE";
SqlConnection sqlCon = new SqlConnection(conString);
sqlCon.Open();
SqlDataAdapter da = new SqlDataAdapter("select LocalSKU,ItemName, QOH,Price,Discontinued,CAST(Barcode As varchar(25)) As Barcode,Integer2,Integer3,ISNULL(SalePrice,0.0000)AS SalePrice,SaleOn,ISNULL(Price2,0.0000)AS Price2 from dbo.Inventory", sqlCon);
System.Data.DataTable dtMainSQLData = new System.Data.DataTable();
da.Fill(dtMainSQLData);
DataColumnCollection dcCollection = dtMainSQLData.Columns;
// Export Data into EXCEL Sheet
Microsoft.Office.Interop.Excel.ApplicationClass ExcelApp = new Microsoft.Office.Interop.Excel.ApplicationClass();
ExcelApp.Application.Workbooks.Add(Type.Missing);
int i = 1;
int j = 1;
int s = 1;
//header row
foreach (DataColumn col in dtMainSQLData.Columns)
{
ExcelApp.Cells[i, j] = col.ColumnName;
j++;
ExcelApp.Rows.AutoFit();
ExcelApp.Columns.AutoFit();
}
i++;
//data rows
foreach (DataRow row in dtMainSQLData.Rows)
{
for (int k = 1; k < dtMainSQLData.Columns.Count + 1; k++)
{
ExcelApp.Cells[i, k] = "'" + row[k - 1].ToString();
}
i++;
s++;
Console.Write(s);
Console.Write("\n\r");
ExcelApp.Columns.AutoFit();
ExcelApp.Rows.AutoFit();
}
var b = Environment.CurrentDirectory + #"\Sheet1.xlsx";
ExcelApp.ActiveWorkbook.SaveCopyAs(b);
ExcelApp.ActiveWorkbook.Saved = true;
ExcelApp.Quit();
Console.WriteLine(".xlsx file Exported succssessfully.");
Takes are 70000 rows in my sql database. I am running this script in Console application.
It takes more then an hour to export it to excel file.
How can I use this to export it faster?
Examples would be appreciated.
Option 1:
See this answer. Use a library called ClosedXML to write the data to Excel.
Option 2:
Get a range big enough for all of the data and set the value equal to a 2 dimensional range. This works very fast without another referencing another library. I tried with 70000 records.
// Get an excel instance
Microsoft.Office.Interop.Excel.Application excel = new Microsoft.Office.Interop.Excel.Application();
// Get a workbook
Workbook wb = excel.Workbooks.Add();
// Get a worksheet
Worksheet ws = wb.Worksheets.Add();
ws.Name = "Test Export";
// Add column names to the first row
int col = 1;
foreach (DataColumn c in table.Columns) {
ws.Cells[1, col] = c.ColumnName;
col++;
}
// Create a 2D array with the data from the table
int i = 0;
string[,] data = new string[table.Rows.Count, table.Columns.Count];
foreach (DataRow row in table.Rows) {
int j = 0;
foreach (DataColumn c in table.Columns) {
data[i,j] = row[c].ToString();
j++;
}
i++;
}
// Set the range value to the 2D array
ws.Range[ws.Cells[2, 1], ws.Cells[table.Rows.Count + 1, table.Columns.Count]].value = data;
// Auto fit columns and rows, show excel, save.. etc
excel.Columns.AutoFit();
excel.Rows.AutoFit();
excel.Visible = true;
Edit: This version exported a million records on my machine it takes about a minute. This example uses Excel interop and breaks the rows in to chunks of 100,000.
// Start a stopwatch to time the process
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
// Check if there are rows to process
if (table != null && table.Rows.Count > 0) {
// Determine the number of chunks
int chunkSize = 100000;
double chunkCountD = (double)table.Rows.Count / (double)chunkSize;
int chunkCount = table.Rows.Count / chunkSize;
chunkCount = chunkCountD > chunkCount ? chunkCount + 1 : chunkCount;
// Instantiate excel
Microsoft.Office.Interop.Excel.Application excel = new Microsoft.Office.Interop.Excel.Application();
// Get a workbook
Workbook wb = excel.Workbooks.Add();
// Get a worksheet
Worksheet ws = wb.Worksheets.Add();
ws.Name = "Test Export";
// Add column names to excel
int col = 1;
foreach (DataColumn c in table.Columns) {
ws.Cells[1, col] = c.ColumnName;
col++;
}
// Build 2D array
int i = 0;
string[,] data = new string[table.Rows.Count, table.Columns.Count];
foreach (DataRow row in table.Rows) {
int j = 0;
foreach (DataColumn c in table.Columns) {
data[i, j] = row[c].ToString();
j++;
}
i++;
}
int processed = 0;
int data2DLength = data.GetLength(1);
for (int chunk = 1; chunk <= chunkCount; chunk++) {
if (table.Rows.Count - processed < chunkSize) chunkSize = table.Rows.Count - processed;
string[,] chunkData = new string[chunkSize, data2DLength];
int l = 0;
for (int k = processed; k < chunkSize + processed; k++) {
for (int m = 0; m < data2DLength; m++) {
chunkData[l,m] = table.Rows[k][m].ToString();
}
l++;
}
// Set the range value to the chunk 2d array
ws.Range[ws.Cells[2 + processed, 1], ws.Cells[processed + chunkSize + 1, data2DLength]].value = chunkData;
processed += chunkSize;
}
// Auto fit columns and rows, show excel, save.. etc
excel.Columns.AutoFit();
excel.Rows.AutoFit();
excel.Visible = true;
}
// Stop the stopwatch and display the seconds elapsed
sw.Stop();
MessageBox.Show(sw.Elapsed.TotalSeconds.ToString());
If you save your data to as CSV formant you can load that into Excel, Here is some code i have modified from The Code Project site here http://www.codeproject.com/Tips/665519/Writing-a-DataTable-to-a-CSV-file
public class Program
{
static void Main(string[] args)
{
Stopwatch stopwatch = new Stopwatch();
stopwatch.Start();
DataTable dt = new DataTable();
// Create Connection object
using (SqlConnection conn = new SqlConnection(#"<Your Connection String>"))
{
// Create Command object
conn.Open();
using (SqlCommand cmd = new SqlCommand("SELECT * FROM <Your Table>", conn))
{
using (SqlDataReader reader = cmd.ExecuteReader())
{
try
{
dt.Load(reader);
using (StreamWriter writer = new StreamWriter("C:\\Temp\\dump.csv"))
{
DataConvert.ToCSV(dt, writer, false);
}
}
catch (Exception)
{
throw;
}
}
}
}
// Stop timing
stopwatch.Stop();
// Write result
Console.WriteLine("Time elapsed: {0}",
stopwatch.Elapsed);
Console.ReadKey();
}
}
public static class DataConvert
{
public static void ToCSV(DataTable sourceTable, TextWriter writer, bool includeHeaders)
{
if (includeHeaders)
{
List<string> headerValues = new List<string>();
foreach (DataColumn column in sourceTable.Columns)
{
headerValues.Add(QuoteValue(column.ColumnName));
}
writer.WriteLine(String.Join(",", headerValues.ToArray()));
}
string[] items = null;
foreach (DataRow row in sourceTable.Rows)
{
items = row.ItemArray.Select(o => QuoteValue(o.ToString())).ToArray();
writer.WriteLine(String.Join(",", items));
}
writer.Flush();
}
private static string QuoteValue(string value)
{
return String.Concat("\"", value.Replace("\"", "\"\""), "\"");
}
}
}
On my PC this took 30 seconds to process 1 million records...
you can try this function:
After set your data in a datatable.
Public Shared Sub ExportDataSetToExcel(ByVal ds As DataTable, ByVal filename As String)
Dim response As HttpResponse = HttpContext.Current.Response
response.Clear()
response.Buffer = True
response.Charset = ""
response.ContentType = "application/vnd.ms-excel"
Using sw As New StringWriter()
Using htw As New HtmlTextWriter(sw)
Dim dg As New DataGrid()
dg.DataSource = ds
dg.DataBind()
dg.RenderControl(htw)
response.Charset = "UTF-8"
response.ContentEncoding = System.Text.Encoding.UTF8
response.BinaryWrite(System.Text.Encoding.UTF8.GetPreamble())
response.Output.Write(sw.ToString())
response.[End]()
End Using
End Using
End Sub
I prefer Microsoft Open XML SDK's Open XML Writer. Open XML is the format all the new office files are in.
Export a large data query (60k+ rows) to Excel
Vincent Tan has a nice article on the topic.
http://polymathprogrammer.com/2012/08/06/how-to-properly-use-openxmlwriter-to-write-large-excel-files/
I know that there are different ways to read an Excel file:
Iterop
Oledb
Open Xml SDK
Compatibility is not a question because the program will be executed in a controlled environment.
My Requirement :
Read a file to a DataTable / CUstom Entities (I don't know how to make dynamic properties/fields to an object[column names will be variating in an Excel file])
Use DataTable/Custom Entities to perform some operations using its data.
Update DataTable with the results of the operations
Write it back to excel file.
Which would be simpler.
Also if possible advice me on custom Entities (adding properties/fields to an object dynamically)
Take a look at Linq-to-Excel. It's pretty neat.
var book = new LinqToExcel.ExcelQueryFactory(#"File.xlsx");
var query =
from row in book.Worksheet("Stock Entry")
let item = new
{
Code = row["Code"].Cast<string>(),
Supplier = row["Supplier"].Cast<string>(),
Ref = row["Ref"].Cast<string>(),
}
where item.Supplier == "Walmart"
select item;
It also allows for strongly-typed row access too.
I realize this question was asked nearly 7 years ago but it's still a top Google search result for certain keywords regarding importing excel data with C#, so I wanted to provide an alternative based on some recent tech developments.
Importing Excel data has become such a common task to my everyday duties, that I've streamlined the process and documented the method on my blog: best way to read excel file in c#.
I use NPOI because it can read/write Excel files without Microsoft Office installed and it doesn't use COM+ or any interops. That means it can work in the cloud!
But the real magic comes from pairing up with NPOI Mapper from Donny Tian because it allows me to map the Excel columns to properties in my C# classes without writing any code. It's beautiful.
Here is the basic idea:
I create a .net class that matches/maps the Excel columns I'm interested in:
class CustomExcelFormat
{
[Column("District")]
public int District { get; set; }
[Column("DM")]
public string FullName { get; set; }
[Column("Email Address")]
public string EmailAddress { get; set; }
[Column("Username")]
public string Username { get; set; }
public string FirstName
{
get
{
return Username.Split('.')[0];
}
}
public string LastName
{
get
{
return Username.Split('.')[1];
}
}
}
Notice, it allows me to map based on column name if I want to!
Then when I process the excel file all I need to do is something like this:
public void Execute(string localPath, int sheetIndex)
{
IWorkbook workbook;
using (FileStream file = new FileStream(localPath, FileMode.Open, FileAccess.Read))
{
workbook = WorkbookFactory.Create(file);
}
var importer = new Mapper(workbook);
var items = importer.Take<CustomExcelFormat>(sheetIndex);
foreach(var item in items)
{
var row = item.Value;
if (string.IsNullOrEmpty(row.EmailAddress))
continue;
UpdateUser(row);
}
DataContext.SaveChanges();
}
Now, admittedly, my code does not modify the Excel file itself. I am instead saving the data to a database using Entity Framework (that's why you see "UpdateUser" and "SaveChanges" in my example). But there is already a good discussion on SO about how to save/modify a file using NPOI.
Using OLE Query, it's quite simple (e.g. sheetName is Sheet1):
DataTable LoadWorksheetInDataTable(string fileName, string sheetName)
{
DataTable sheetData = new DataTable();
using (OleDbConnection conn = this.returnConnection(fileName))
{
conn.Open();
// retrieve the data using data adapter
OleDbDataAdapter sheetAdapter = new OleDbDataAdapter("select * from [" + sheetName + "$]", conn);
sheetAdapter.Fill(sheetData);
conn.Close();
}
return sheetData;
}
private OleDbConnection returnConnection(string fileName)
{
return new OleDbConnection("Provider=Microsoft.Jet.OLEDB.4.0;Data Source=" + fileName + "; Jet OLEDB:Engine Type=5;Extended Properties=\"Excel 8.0;\"");
}
For newer Excel versions:
return new OleDbConnection("Provider=Microsoft.ACE.OLEDB.12.0;Data Source=" + fileName + ";Extended Properties=Excel 12.0;");
You can also use Excel Data Reader an open source project on CodePlex. Its works really well to export data from Excel sheets.
The sample code given on the link specified:
FileStream stream = File.Open(filePath, FileMode.Open, FileAccess.Read);
//1. Reading from a binary Excel file ('97-2003 format; *.xls)
IExcelDataReader excelReader = ExcelReaderFactory.CreateBinaryReader(stream);
//...
//2. Reading from a OpenXml Excel file (2007 format; *.xlsx)
IExcelDataReader excelReader = ExcelReaderFactory.CreateOpenXmlReader(stream);
//...
//3. DataSet - The result of each spreadsheet will be created in the result.Tables
DataSet result = excelReader.AsDataSet();
//...
//4. DataSet - Create column names from first row
excelReader.IsFirstRowAsColumnNames = true;
DataSet result = excelReader.AsDataSet();
//5. Data Reader methods
while (excelReader.Read())
{
//excelReader.GetInt32(0);
}
//6. Free resources (IExcelDataReader is IDisposable)
excelReader.Close();
Reference: How do I import from Excel to a DataSet using Microsoft.Office.Interop.Excel?
Try to use this free way to this, https://freenetexcel.codeplex.com
Workbook workbook = new Workbook();
workbook.LoadFromFile(#"..\..\parts.xls",ExcelVersion.Version97to2003);
//Initialize worksheet
Worksheet sheet = workbook.Worksheets[0];
DataTable dataTable = sheet.ExportDataTable();
If you can restrict it to just (Open Office XML format) *.xlsx files, then probably the most popular library would be EPPLus.
Bonus is, there are no other dependencies. Just install using nuget:
Install-Package EPPlus
Try to use Aspose.cells library (not free, but trial is enough to read), it is quite good
Install-package Aspose.cells
There is sample code:
using Aspose.Cells;
using System;
namespace ExcelReader
{
class Program
{
static void Main(string[] args)
{
// Replace path for your file
readXLS(#"C:\MyExcelFile.xls"); // or "*.xlsx"
Console.ReadKey();
}
public static void readXLS(string PathToMyExcel)
{
//Open your template file.
Workbook wb = new Workbook(PathToMyExcel);
//Get the first worksheet.
Worksheet worksheet = wb.Worksheets[0];
//Get cells
Cells cells = worksheet.Cells;
// Get row and column count
int rowCount = cells.MaxDataRow;
int columnCount = cells.MaxDataColumn;
// Current cell value
string strCell = "";
Console.WriteLine(String.Format("rowCount={0}, columnCount={1}", rowCount, columnCount));
for (int row = 0; row <= rowCount; row++) // Numeration starts from 0 to MaxDataRow
{
for (int column = 0; column <= columnCount; column++) // Numeration starts from 0 to MaxDataColumn
{
strCell = "";
strCell = Convert.ToString(cells[row, column].Value);
if (String.IsNullOrEmpty(strCell))
{
continue;
}
else
{
// Do your staff here
Console.WriteLine(strCell);
}
}
}
}
}
}
Read from excel, modify and write back
/// <summary>
/// /Reads an excel file and converts it into dataset with each sheet as each table of the dataset
/// </summary>
/// <param name="filename"></param>
/// <param name="headers">If set to true the first row will be considered as headers</param>
/// <returns></returns>
public DataSet Import(string filename, bool headers = true)
{
var _xl = new Excel.Application();
var wb = _xl.Workbooks.Open(filename);
var sheets = wb.Sheets;
DataSet dataSet = null;
if (sheets != null && sheets.Count != 0)
{
dataSet = new DataSet();
foreach (var item in sheets)
{
var sheet = (Excel.Worksheet)item;
DataTable dt = null;
if (sheet != null)
{
dt = new DataTable();
var ColumnCount = ((Excel.Range)sheet.UsedRange.Rows[1, Type.Missing]).Columns.Count;
var rowCount = ((Excel.Range)sheet.UsedRange.Columns[1, Type.Missing]).Rows.Count;
for (int j = 0; j < ColumnCount; j++)
{
var cell = (Excel.Range)sheet.Cells[1, j + 1];
var column = new DataColumn(headers ? cell.Value : string.Empty);
dt.Columns.Add(column);
}
for (int i = 0; i < rowCount; i++)
{
var r = dt.NewRow();
for (int j = 0; j < ColumnCount; j++)
{
var cell = (Excel.Range)sheet.Cells[i + 1 + (headers ? 1 : 0), j + 1];
r[j] = cell.Value;
}
dt.Rows.Add(r);
}
}
dataSet.Tables.Add(dt);
}
}
_xl.Quit();
return dataSet;
}
public string Export(DataTable dt, bool headers = false)
{
var wb = _xl.Workbooks.Add();
var sheet = (Excel.Worksheet)wb.ActiveSheet;
//process columns
for (int i = 0; i < dt.Columns.Count; i++)
{
var col = dt.Columns[i];
//added columns to the top of sheet
var currentCell = (Excel.Range)sheet.Cells[1, i + 1];
currentCell.Value = col.ToString();
currentCell.Font.Bold = true;
//process rows
for (int j = 0; j < dt.Rows.Count; j++)
{
var row = dt.Rows[j];
//added rows to sheet
var cell = (Excel.Range)sheet.Cells[j + 1 + 1, i + 1];
cell.Value = row[i];
}
currentCell.EntireColumn.AutoFit();
}
var fileName="{somepath/somefile.xlsx}";
wb.SaveCopyAs(fileName);
_xl.Quit();
return fileName;
}
I used Office's NuGet Package: DocumentFormat.OpenXml and pieced together the code from that component's doc site.
With the below helper code, was similar in complexity to my other CSV file format parsing in that project...
public static async Task ImportXLSX(Stream stream, string sheetName) {
{
// This was necessary for my Blazor project, which used a BrowserFileStream object
MemoryStream ms = new MemoryStream();
await stream.CopyToAsync(ms);
using (var document = SpreadsheetDocument.Open(ms, false))
{
// Retrieve a reference to the workbook part.
WorkbookPart wbPart = document.WorkbookPart;
// Find the sheet with the supplied name, and then use that
// Sheet object to retrieve a reference to the first worksheet.
Sheet theSheet = wbPart?.Workbook.Descendants<Sheet>().Where(s => s?.Name == sheetName).FirstOrDefault();
// Throw an exception if there is no sheet.
if (theSheet == null)
{
throw new ArgumentException("sheetName");
}
WorksheetPart wsPart = (WorksheetPart)(wbPart.GetPartById(theSheet.Id));
// For shared strings, look up the value in the
// shared strings table.
var stringTable =
wbPart.GetPartsOfType<SharedStringTablePart>()
.FirstOrDefault();
// I needed to grab 4 cells from each row
// Starting at row 11, until the cell in column A is blank
int row = 11;
while (true) {
var accountNameCell = GetCell(wsPart, "A" + row.ToString());
var accountName = GetValue(accountNameCell, stringTable);
if (string.IsNullOrEmpty(accountName)) {
break;
}
var investmentNameCell = GetCell(wsPart, "B" + row.ToString());
var investmentName = GetValue(investmentNameCell, stringTable);
var symbolCell = GetCell(wsPart, "D" + row.ToString());
var symbol = GetValue(symbolCell, stringTable);
var marketValue = GetCell(wsPart, "J" + row.ToString()).InnerText;
// DO STUFF with data
row++;
}
}
}
private static string? GetValue(Cell cell, SharedStringTablePart stringTable) {
try {
return stringTable.SharedStringTable.ElementAt(int.Parse(cell.InnerText)).InnerText;
} catch (Exception) {
return null;
}
}
private static Cell GetCell(WorksheetPart wsPart, string cellReference) {
return wsPart.Worksheet.Descendants<Cell>().Where(c => c.CellReference.Value == cellReference)?.FirstOrDefault();
}