I'm attempting to take a list of Contacts that are retrieved from an OleDB query, add them to a List and then load the List into a DataTable. When I count the number of items in the list it results in the correct number (27000).
However, when I count the number of rows in the DataTable, it results in 0. After doing this I want to write the DataTable to CSV using FileHelpers, however the CSV file is empty.
This is the code I am using;
var listOfContacts = new List<Contact>();
using (OleDbConnection dbfCon = new OleDbConnection(dbfConstr))
{
dbfCon.Open();
var dbfCmd = new OleDbCommand(#"SELECT ct_id, ct_cmpid, ct_empid,
ct_pplid, ct_cntid, ct_pplnm, ct_date, ct_time, ct_type, ct_doneby, ct_desc
FROM contacts", dbfCon);
using (var myReader = dbfCmd.ExecuteReader())
{
while (myReader.Read())
{
var newContact = new Contact()
{
ContactID = Convert.ToInt32(myReader[0]),
CompanyID = Convert.ToInt32(myReader[1]),
EmployeeID = Convert.ToInt32(myReader[2]),
PersonID = Convert.ToInt32(myReader[3]),
ContractID = Convert.ToInt32(myReader[4]),
PersonName = myReader[5].ToString(),
ContactDate = Convert.ToDateTime(myReader[6]),
ContactTime = Convert.ToDateTime(myReader[7]),
TypeOfContact = myReader[8].ToString(),
ContactMadeBy = myReader[9].ToString(),
ContactDescription = myReader[10].ToString(),
};
listOfContacts.Add(newContact);
}
}
DataTable dTable = new DataTable();
dTable.Columns.Add("ContactID");
dTable.Columns.Add("CompanyID");
dTable.Columns.Add("EmployeeID");
dTable.Columns.Add("PersonID");
dTable.Columns.Add("ContractID");
dTable.Columns.Add("PersonName");
dTable.Columns.Add("ContactDate");
dTable.Columns.Add("ContactTime");
dTable.Columns.Add("TypeOfContact");
dTable.Columns.Add("ContactMadeBy");
dTable.Columns.Add("ContactDescription");
MessageBox.Show(listOfContacts.Count.ToString());
foreach (var contact in listOfContacts)
{
var newRow = dTable.NewRow();
newRow["ContactID"] = contact.ContactID;
newRow["CompanyID"] = contact.CompanyID;
newRow["EmployeeID"] = contact.EmployeeID;
newRow["PersonID"] = contact.PersonID;
newRow["ContractID"] = contact.ContractID;
newRow["PersonName"] = contact.PersonName;
newRow["ContactDate"] = contact.ContactDate;
newRow["ContactTime"] = contact.ContactTime;
newRow["TypeOfContact"] = contact.TypeOfContact;
newRow["ContactMadeBy"] = contact.ContactMadeBy;
newRow["ContactDescription"] = contact.ContactDescription;
}
MessageBox.Show(dTable.Rows.Count.ToString());
You can see the two MessageBox that result in the numbers, am I loading the Data into the DataTable incorrectly?
You have to add the new row to the DataTable:
foreach (var contact in listOfContacts)
{
var newRow = dTable.NewRow();
newRow["ContactID"] = contact.ContactID;
newRow["CompanyID"] = contact.CompanyID;
newRow["EmployeeID"] = contact.EmployeeID;
newRow["PersonID"] = contact.PersonID;
newRow["ContractID"] = contact.ContractID;
newRow["PersonName"] = contact.PersonName;
newRow["ContactDate"] = contact.ContactDate;
newRow["ContactTime"] = contact.ContactTime;
newRow["TypeOfContact"] = contact.TypeOfContact;
newRow["ContactMadeBy"] = contact.ContactMadeBy;
newRow["ContactDescription"] = contact.ContactDescription;
dTable.Rows.Add(newRow); // YOU NEED THIS LINE TO ADD THE NEWROW TO DATATABLE
}
In your foreach loop add this at the end :
dTable.Rows.Add(newRow);
Related
I have two groups like below, theyh have different data. Based on both I need to create an xml file .
How can I write a for-loop for both groups and generate a single xml file?
var groups = checkFile.AsEnumerable().GroupBy(x => new { DocNum = x.Field<int>("orderid"), Type = x.Field<string>("Type"), ProdName = x.Field<string>("ProdName"), Status = x.Field<string>("Status"), productno = x.Field<string>("productno"), uom = x.Field<string>("uom"), customer = x.Field<string>("customer"), remark = x.Field<string>("remark"), U_JobNumber = x.Field<string>("U_JobNumber"), U_SalesPerson = x.Field<string>("U_SalesPerson"), U_POnum = x.Field<string>("U_POnum"), U_JobType = x.Field<string>("U_JobType"), PlannedQty = x.Field<decimal>("PlannedQty"), OriginNum = x.Field<int?>("OriginNum"), orderdate = x.Field<DateTime>("orderdate"), duedate = x.Field<DateTime>("duedate"), DocTotal = x.Field<decimal>("DocTotal") });
var groups2 = checkFile2.AsEnumerable().GroupBy(x => new { DocNum = x.Field<int>("DocNum") });
//now i need to take both group data inside this loop to print the file
foreach (var group in groups)
{
var stringwriter = new StringWriter();
using (var xmlWriter = XmlWriter.Create(stringwriter, new XmlWriterSettings { Indent = true }))
{
xmlWriter.WriteStartDocument();
xmlWriter.WriteStartElement("Root");
xmlWriter.WriteEndElement();
}
var xml = stringwriter.ToString();
XmlDocument docSave = new XmlDocument();
docSave.LoadXml(stringwriter.ToString());
docSave.Save(System.IO.Path.Combine(#SystemSettings.ImportBankStatementPendingFolderPath, "DocNum -" + group.Key.DocNum + ".xml"));
count++;
}
Try following :
DataTable checkFile = new DataTable();
var groups = checkFile.AsEnumerable().GroupBy(x => new
{
DocNum = x.Field<int>("orderid"),
Type = x.Field<string>("Type"),
ProdName = x.Field<string>("ProdName"),
Status = x.Field<string>("Status"),
productno = x.Field<string>("productno"),
uom = x.Field<string>("uom"),
customer = x.Field<string>("customer"),
remark = x.Field<string>("remark"),
U_JobNumber = x.Field<string>("U_JobNumber"),
U_SalesPerson = x.Field<string>("U_SalesPerson"),
U_POnum = x.Field<string>("U_POnum"),
U_JobType = x.Field<string>("U_JobType"),
PlannedQty = x.Field<decimal>("PlannedQty"),
OriginNum = x.Field<int?>("OriginNum"),
orderdate = x.Field<DateTime>("orderdate"),
duedate = x.Field<DateTime>("duedate"),
DocTotal = x.Field<decimal>("DocTotal")
});
DataTable checkFile2 = new DataTable();
//now i need to take both group data inside this loop to print the file
foreach (var group in groups)
{
List<DataRow> groups2 = checkFile2.AsEnumerable().Where(x => group.Key.DocNum == x.Field<int>("DocNum")).ToList();
}
This is the strangest thing I've ever seen, but hopefully someone else has because I am clueless. I have the following code:
DataTable dt = (DataTable)dataGridView1.DataSource;
List<InvoiceItem> itemList = new List<InvoiceItem>();
int listSize = 30;
int listIndex = 0;
try
{
itemList = (from DataRow dr in dt.Rows
select new InvoiceItem()
{
CustomerRef = dr["CustomerRef"].ToString(),
Description = dr["Description"].ToString(),
ItemRef = dr["ItemRef"].ToString(),
Rate = Convert.ToDouble(dr["Rate"].ToString()),
Quantity = Convert.ToDouble(dr["Quantity"].ToString()),
PONumber = dr["PONumber"].ToString(),
UnitOfMeasure = dr["UnitOfMeasure"].ToString(),
RefNumber = dr["RefNumber"].ToString(),
Total = Convert.ToDouble(dr["Total"].ToString()),
Address1 = dr["Address1"].ToString(),
Address2 = dr["Address2"].ToString(),
Address3 = dr["Address3"].ToString(),
Address4 = dr["Address4"].ToString(),
City = dr["City"].ToString(),
State = dr["State"].ToString(),
PostalCode = dr["PostalCode"].ToString(),
ServiceDate = string.IsNullOrEmpty(dr["ServiceDate"].ToString()) ? (DateTime?)null : DateTime.Parse(dr["ServiceDate"].ToString()),
TxnDate = string.IsNullOrEmpty(dr["TxnDate"].ToString()) ? DateTime.Now : DateTime.Parse(dr["TxnDate"].ToString()),
Note = dr["Note"].ToString()
}).ToList();
List<string> list = new List<string>();
list = loadItems();
List<InvoiceItem> createNewItemsList = new List<InvoiceItem>();
foreach (var importing in itemList)
{
var matchingvalues = list.Where(l => l.Contains(importing.ItemRef));
//If there is no match in Quickbooks already...
if (matchingvalues.Count() < 1)
{
createNewItemsList.Add(new InvoiceItem
{
ItemRef = importing.ItemRef,
UnitOfMeasure = importing.UnitOfMeasure
});
}
}
Here is the Code for loadItems():
private List<string> loadItems()
{
string request = "ItemQueryRq";
connectToQB();
int count = getCount(request);
IMsgSetResponse responseMsgSet = sessionManager.processRequestFromQB(BuildItemQuery());
string[] itemList = parseItemQueryRs(responseMsgSet, count);
disconnectFromQB();
List<string> list = new List<string>(itemList);
return list;
}
Here is a view of the error:
here shows list count:
When I run this code on my deskotp, if matchingvalues.Count() = 0 it executes the code correctly. However, when I run the exact same code in debug on the server, that line of code errors out with "Object reference not set to an instance of an object." Can anybody explain why this might happen and if there is any work around for it?
How do i insert a blank row into the 2nd row?
BatchUpdateSpreadsheetRequest requestBody = new
BatchUpdateSpreadsheetRequest();
requestBody.Requests = new List<Request>();
Request r = new Request();
requestBody.Requests.Add(r);
r.InsertDimension = new InsertDimensionRequest();
var dr = new DimensionRange();
dr.SheetId = 0;
dr.Dimension="ROW";
dr.StartIndex = 0;
dr.EndIndex = 3;
r.InsertDimension.Range = dr;
r.InsertDimension.InheritFromBefore = false;
SpreadsheetsResource.BatchUpdateRequest bur =
service.Spreadsheets.BatchUpdate(requestBody, spreadsheetId);
bur.Execute();
This code adds 2 blank rows as specified in the sheets
I'm refering to page https://developers.google.com/sheets/api/samples/rowcolumn
I am using CsvHelper lib to read CSV file and I can successfully read the file with the lib. However I cannot use SQL condition to filter values. How can I do that without using SQL Server. I am really stuck on it.
It was very easy with Pandas and Pandasql libs in Python but it is being too hard in C#..
My Code:
public static void Main(string[] args)
{
var fileInfo = new FileInfo(#"filePath");
using (TextReader reader = fileInfo.OpenText())
using (var csvReader = new CsvReader(reader))
{
csvReader.Configuration.Delimiter = ",";
csvReader.Configuration.HasHeaderRecord = false;
csvReader.Configuration.IgnoreQuotes = true;
csvReader.Configuration.TrimFields = true;
csvReader.Configuration.WillThrowOnMissingField = false;
while (csvReader.Read())
{
var myStrinVar = csvReader.GetField<string>(0);
Console.Write(myStrinVar); //SELECT * FROM table...
}
}
}
I would suggest using LINQ to filter your results.
https://msdn.microsoft.com/en-us/library/bb397906.aspx
Say you have some class MyClass that you can serialize the lines in your file into.
For example:
public class MyClass
{
public int ID { get; set; }
}
var records = csv.GetRecords<MyClass>().ToList();
var filtered = records.Where(r => r.ID >= 10);
That example is a bit contrived but you can use any boolean expression you like in the where clause.
I know this is too late for OP, but the issue with the accepted answer is that you have to read in the entire result set to memory which may not be tenable for large files. Also, if you can extend this code below to get the top N rows without having to read the entire CSV if you find matches early in the file.
public static void Main(string[] args)
{
var fileInfo = new FileInfo(#"filePath");
var where = ""; //Code to set up where clause part of query goes here
using (TextReader reader = fileInfo.OpenText())
using (var csvReader = new CsvReader(reader))
{
csvReader.Configuration.Delimiter = ",";
csvReader.Configuration.HasHeaderRecord = false;
csvReader.Configuration.IgnoreQuotes = true;
csvReader.Configuration.TrimFields = true;
csvReader.Configuration.WillThrowOnMissingField = false;
DataTable dt = null;
while (csvReader.Read())
{
//Use the first row to initialize the columns.
if (dt == null)
{
dt = new DataTable();
for (var i = 0; i < csvReader.FieldCount; i++)
{
var fieldType = csvReader.GetFieldType(i);
DataColumn dc;
if (fieldType.IsNullableType())
{
dc = new DataColumn(csvReader.GetName(i), Nullable.GetUnderlyingType(fieldType));
dc.AllowDBNull = true;
}
else
dc = new DataColumn(csvReader.GetName(i), data.GetFieldType(i));
dt.Columns.Add(dc);
}
}
//Map DataReader to DataRow
var newRow = dt.Rows.Add();
foreach(DataColumn col in dt.Columns)
{
newRow[col.ColumnName] = csvReader[col.ColumnName];
}
//Create a temporary DataView and filter it with the where clause.
DataView dv = new DataView(dt);
dv.RowFilter = where;
var data = dv.Count > 0 ? dv[0] : null;
if(data != null)
{
//Row in here matches your where clause.
//Code to read this row or do something with it.
}
//Empty the temporary data table.
dt.Rows.Clear();
}
}
}
Let’s say source MS Access database has a table called MyTable1. And let’s say MyTable1 has a composite primary key (Combination of two separate field Phone and City). Now when I copy using the following code, it does not make City as part of composite key in target table. How do fix
ADOX.Table sourceTable = default(ADOX.Table);
sourceTable = sourceCat.Tables[tableName.Trim()];
ADOX.Table newTable = new ADOX.Table();
newTable.ParentCatalog = targetCat;
tempNewtableName = sourceCat.Tables[tableName.Trim()].Name;
newTable.Name = tempNewtableName;
ADOX.Column newCol = default(ADOX.Column);
DataTable primaryKeyDT = new DataTable();
primaryKeyDT.Columns.Add("FieldName");
primaryKeyDT.Columns.Add("Type");
foreach (ADOX.Index idx1 in sourceCat.Tables[tableName].Indexes)
{
if (idx1.PrimaryKey == true)
{
primaryKeyDT.Rows.Add(idx1.Columns[0].Name, idx1.Name);
}
}
foreach (ADOX.Column SourceCol in sourceTable.Columns)
{
newCol = new ADOX.Column();
newCol.Type = SourceCol.Type;
newCol.DefinedSize = SourceCol.DefinedSize;
newCol.ParentCatalog = targetCat;
newCol.Precision = SourceCol.Precision;
newCol.DefinedSize = SourceCol.DefinedSize;
newCol.Attributes = SourceCol.Attributes;
newCol.Name = SourceCol.Name;
newCol.NumericScale = SourceCol.NumericScale;
newTable.Columns.Append(newCol);
DataRow[] results = primaryKeyDT.Select("FieldName ='" + SourceCol.Name + "'");
if (results.Length > 0)
{
idx = new Index();
idx.Name = "idx_" + SourceCol.Name;
idx.PrimaryKey = true;
idx.Columns.Append(SourceCol.Name);
newTable.Indexes.Append(idx);
}
}
targetCat.Tables.Append(newTable);
You should iterate through results[] and add each field into idx.Columns collection