I am developing a web application in which i have to import the data in SQL Server from given Excel files using C# and ASP.NET MVC. For this purpose I followed this article. So I used ExcelDataReader to read the Excel files. Furthermore I have used SqlBulkCopy in my code to insert the data into the database. following is my code:
The Create method
var bData = getBillData();
var connString = ConfigurationManager.ConnectionStrings["WASABill"].ConnectionString;
DataTable table = new DataTable();
using (var reader = ObjectReader.Create(bData))
{
table.Load(reader);
}
using (SqlBulkCopy bcp = new SqlBulkCopy(connString))
{
bcp.ColumnMappings.Add("AccountNo", "AccountNo");
bcp.ColumnMappings.Add("BillNo", "BillNo");
bcp.ColumnMappings.Add("Category", "Category");
bcp.ColumnMappings.Add("Billing_Period", "Billing_Period");
bcp.ColumnMappings.Add("Name", "Name");
bcp.ColumnMappings.Add("Address", "Address");
bcp.ColumnMappings.Add("Issue_Date", "Issue_Date");
bcp.ColumnMappings.Add("Due_Date", "Due_Date");
bcp.ColumnMappings.Add("Water_Bill", "Water_Bill");
bcp.ColumnMappings.Add("Sewerage_Bill", "Sewerage_Bill");
bcp.ColumnMappings.Add("Aquifer_Charges", "Aquifer_Charges");
bcp.ColumnMappings.Add("Current_Amount", "Current_Amount");
bcp.ColumnMappings.Add("Arrears", "Arrears");
bcp.ColumnMappings.Add("Service_Charges", "Service_Charges");
bcp.ColumnMappings.Add("Payable_within_DueDate", "Payable_within_DueDate");
bcp.ColumnMappings.Add("Surcharge", "Surcharge");
bcp.ColumnMappings.Add("Payable_after_DueDate", "Payable_after_DueDate");
bcp.ColumnMappings.Add("Payment_History_1", "Payment_History_1");
bcp.ColumnMappings.Add("Paid_1", "Paid_1");
bcp.ColumnMappings.Add("Payment_History_2", "Payment_History_2");
bcp.ColumnMappings.Add("Paid_2", "Paid_2");
bcp.ColumnMappings.Add("Payment_History_3", "Payment_History_3");
bcp.ColumnMappings.Add("Paid_3", "Paid_3");
bcp.ColumnMappings.Add("Area", "Area");
bcp.ColumnMappings.Add("Water_Rate", "Water_Rate");
bcp.ColumnMappings.Add("Sewerage_Rate", "Sewerage_Rate");
bcp.ColumnMappings.Add("Discharge_Basis", "Discharge_Basis");
bcp.ColumnMappings.Add("Pump_Size", "Pump_Size");
bcp.ColumnMappings.Add("Ferrule_Size", "Ferrule_Size");
bcp.ColumnMappings.Add("Meter_Type", "Meter_Type");
bcp.ColumnMappings.Add("Meter_Status", "Meter_Status");
bcp.ColumnMappings.Add("Last_Readin", "Last_Readin");
bcp.ColumnMappings.Add("Current_Reading", "Current_Reading");
bcp.ColumnMappings.Add("Water_Aquiffer_Charges", "Water_Aquiffer_Charges");
bcp.DestinationTableName = "WASA_Bill_Detail";
bcp.WriteToServer(table);
}
var rowCount = table.Rows.Count; //Number of rows in data table
//if (ModelState.IsValid)
//{
// db.WASA_Bill_Detail.Add(wASA_Bill_Detail);
// db.SaveChanges();
// return RedirectToAction("Index");
//}
TempData["RowCount"] = rowCount;
return RedirectToAction("Index");
The method which reads the Excel file and returns the data as a list
public IEnumerable<WASA_Bill_Detail> getBillData()
{
List<WASA_Bill_Detail> billDetaileList = new List<WASA_Bill_Detail>();
//string path = #TempData["FilePath"].ToString();//#"E:\W317.xlsx";
string path = TempData["FilePath"].ToString();
string excelpath = Server.MapPath(path);
if(path!=null)
{
var excelData = new ExcelData(excelpath);
var billRecords = excelData.getData("Sheet1");
foreach (var row in billRecords)
{
var billDetail = new WASA_Bill_Detail()
{
AccountNo = row["ACCOUNT#"].ToString(),
BillNo = row["BILLNO"].ToString(),
Category = row["CATEGORY"].ToString(),
Billing_Period = row["BILLING_PERIOD"].ToString(),
Name = row["NAME"].ToString(),
Address = row["ADDRESS"].ToString(),
Issue_Date = row["ISSUE_DATE"].ToString(),
Due_Date = row["DUE_DATE"].ToString(),
Water_Bill = row["WATER_BILL"].ToString(),
Sewerage_Bill = row["SEWERAGE BILL"].ToString(),
Aquifer_Charges = row["AQUIFER"].ToString(),
Current_Amount = row["CURRENT AMOUNT"].ToString(),
Arrears = row["ARREARS"].ToString(),
Service_Charges = row["SERVICE CHARGES"].ToString(),
Payable_within_DueDate = row["PAYABLE WITHIN DUEDATE"].ToString(),
Surcharge = row["SURCHARGE"].ToString(),
Payable_after_DueDate = row["AFTER DUE DATE"].ToString(),
Payment_History_1 = row["PAY HISTORY 1"].ToString(),
Paid_1 = row["PAID 1"].ToString(),
Payment_History_2 = row["PAY HISOTRY 2"].ToString(),
Paid_2 = row["PAID 2"].ToString(),
Payment_History_3 = row["PAY HISOTRY 3"].ToString(),
Paid_3 = row["PAID 3"].ToString(),
Area = row["AREA"].ToString(),
Water_Rate = row["WATER RATE"].ToString(),
Sewerage_Rate = row["SEWER RATE"].ToString(),
Discharge_Basis = row["DISCHAGE"].ToString(),
Pump_Size = row["PUMP SIZE"].ToString(),
Ferrule_Size = row["FERRULE SIZE"].ToString(),
Meter_Type = row["METER TYPE"].ToString(),
Meter_Status = row["METER STATUS"].ToString(),
Last_Readin = row["LAST READING"].ToString(),
Current_Reading = row["CURRENT READING"].ToString(),
Water_Aquiffer_Charges = row["AQUIFER CHARGES"].ToString(),
};
billDetaileList.Add(billDetail);
}
}
return billDetaileList;
}
Everything is working fine on my development machine. File uploaded properly and then inserted into the database using bcp.
But when I publish this to the hosting server the NullReferenceException occurred at
WASAWeb.Controllers.AdminControllers.WASA_Bill_DetailController.getBillData() +128
I could not understand as it is working 100% fine in my development machine. I have checked that file is properly uploaded to the server.
Any help with this?
you can use this:
private string GetStringValue(object obj)
{
string str = null;
if(obj != null)
str = obj.ToString().Trim();
return str;
}
call
......
AccountNo = GetStringValue(row["ACCOUNT#"])
......
Related
I want to read the table data from RFCtable in c# , I am able to get the table structure but I am not getting the data in the table.
Below is the code for that
RfcDestinationManager.RegisterDestinationConfiguration(config__);
RfcDestination prdData = RfcDestinationManager.GetDestination("Vim Analytics");
RfcRepository repos = prdData.Repository;
IRfcFunction InvoiceVIMAny = repos.CreateFunction("BAPI NAME");
InvoiceVIMAny.SetValue("I_INVOICE_NO", InvoiceNumber);
InvoiceVIMAny.SetValue("I_VENDOR_ID", VendorID);
InvoiceVIMAny.Invoke(prdData);
if (InvoiceVIMAny.ElementCount > 0 && InvoiceVIMAny.ElementCount != null)
{
IRfcTable rfcTable = InvoiceVIMAny.GetTable(0);
foreach (IRfcStructure dataRow in rfcTable)
{
invstatusResp.E_STATUS = dataRow.GetValue("E_STATUS").ToString();
invstatusResp.E_DOC_ID = dataRow.GetValue("E_DOC_ID").ToString();
invstatusResp.E_PO_NUMBER = dataRow.GetValue("E_PO_NUMBER").ToString();
invstatusResp.E_INVOICE = dataRow.GetValue("E_INVOICE").ToString();
invstatusResp.E_REASON = dataRow.GetValue("E_REASON").ToString();
invstatusResp.E_DOC_DATE = dataRow.GetValue("E_DOC_DATE").ToString();
invstatusResp.E_POSTING_DATE = dataRow.GetValue("E_POSTING_DATE").ToString();
invstatusResp.E_ENTRY_DATE = dataRow.GetValue("E_ENTRY_DATE").ToString();
invstatusResp.E_DUE_DATE = dataRow.GetValue("E_DUE_DATE").ToString();
invstatusResp.E_GROSS_AMT = dataRow.GetValue("E_GROSS_AMT").ToString();
invstatusResp.E_COMMENTS = dataRow.GetValue("E_COMMENTS").ToString();
invstatusResp.E_CUR_ROLE = dataRow.GetValue("E_CUR_ROLE").ToString();
listinvstatusResp.Add(invstatusResp);
}
}
RfcDestinationManager.UnregisterDestinationConfiguration(config__);
I have this function where it opens a .TXT file with some products and insert line by line on the sqlitedb. The process is working fine, the problem is. This file contains 2000+ lines, because of that, the process is taking several hours to finish. I wonder if there is a way to make the process a little bit faster.
here is the function:
private void carrega_produtos()
{
var assembly = typeof(sincroniza_page).GetTypeInfo().Assembly;
foreach (var res in assembly.GetManifestResourceNames())
{
if (res.Contains("produtos.txt"))
{
Stream stream = assembly.GetManifestResourceStream(res);
var st = res.Count();
using (var reader = new StreamReader(stream))
{
string linha;
acesso_banco_produtos banco = new acesso_banco_produtos();
while ((linha = reader.ReadLine()) != null)
{
List<string> lista = linha.Split(new char[] { 'ยง' }).ToList();
var cod = int.Parse(lista.ElementAt(0));
var nome_prod = lista.ElementAt(1);
var cod_grupo = lista.ElementAt(2);
var nm_grupo = lista.ElementAt(3);
var ind_ativo = lista.ElementAt(4);
var val_custo_unit = lista.ElementAt(5);
var val_custo = lista.ElementAt(6);
var perc_imposto = lista.ElementAt(7);
var unidade_med = lista.ElementAt(8);
var qtd_mes_1 = lista.ElementAt(9);
var qtd_mes_2 = lista.ElementAt(10);
var qtd_mes_3 = lista.ElementAt(11);
var qtd_mes_6 = lista.ElementAt(12);
var qtd_mes_12 = lista.ElementAt(13);
var data = lista.ElementAt(14);
var bd = new banco_produtos()
{
cod_produto = cod,
nm_produto = nome_prod,
cod_grupo = cod_grupo,
nm_grupo = nm_grupo,
ind_ativo = ind_ativo,
val_custo_unitario = Double.Parse(val_custo_unit),
val_lista_preco = val_custo,
perc_impostos = perc_imposto,
unidade_medida = unidade_med,
qtde_vendida_mes_1 = qtd_mes_1,
qtde_vendida_mes_2 = qtd_mes_2,
qtde_vendida_mes_3 = qtd_mes_3,
qtde_vendida_mes_6 = qtd_mes_6,
qtde_vendida_mes_12 = qtd_mes_12
};
//here i look in the DB if already exists the new product
var procura = banco.get_produto(cod);
if (procura == null)
{
// here is inserted to the db
banco.inserir_produto(bd);
}
}
valor += 1;
}
}
}
}
I'm not sure what is inside your method which inserts data into db but the most common issue with SQLite and massive inserts is the fact that SQLite by default wraps every insert with transaction which creates significant overhead. A good practice for such cases is to make signle transaction for all insterts which should singificantly improve the perfomance, see the example.
I did what #Dmytro said, I used the method "insertORIgnore". It improved a lot using that method.thank you for the help.
I've put together a CSV importer which I assume works, though I get this error, how do I allow this column to be null so when it adds it to the table it automatically sets the ID? I've tried:
csv.Configuration.WillThrowOnMissingFields = false;
but it doesn't recognise it, this is the error I get when attempting to upload:
CsvHelper.ValidationException: 'Header matching ['ID'] names at index 0 was not found. If you are expecting some headers to be missing and want to ignore this validation, set the configuration HeaderValidated to null. You can also change the functionality to do something else, like logging the issue.'
[HttpPost]
[ActionName("CreateBulk")]
public ActionResult CreateBulkUpload()
{
object db;
var file = Request.Files["attachmentcsv"];
using (var csv = new CsvReader(new StreamReader(file.InputStream), true))
{
var records = csv.GetRecords<Client>().ToList();
foreach (var item in records)
{
var strip = item.homePage.Replace("https://www.", "").Replace("http://www.", "")
.Replace("https://", "").Replace("http://", "").Replace("www.", "");
string[] URLtests =
{"https://www." + strip, "http://www." + strip, "https://" + strip, "http://" + strip};
string[] Metric = MajesticFunctions.MajesticChecker(URLtests);
var userId = User.Identity.GetHashCode();
var UserTableID = 1;
var newclient = new Client
{
clientN = item.clientN,
homePage = Metric[0],
clientEmail = item.clientEmail,
monthlyQuota = item.monthlyQuota,
TrustFlow = Int32.Parse(Metric[1]),
CitationFlow = Int32.Parse(Metric[2]),
RI = Int32.Parse(Metric[3]),
MJTopicsID = item.MJTopicsID,
UserTableID = UserTableID
};
ViewBag.newdomain = newclient;
return RedirectToAction("Index");
}
}
return RedirectToAction("Index");
}
Did you try out the suggestion mentioned in the error message?
like this?
csv.configuration.HeaderValidated = null;
The developer made some breaking changes this year, so the accepted answer will no longer work.
Instead, you have to create a configuration object in advance and inject it in the constructor:
var config = new CsvConfiguration(CultureInfo.InvariantCulture)
{
HeaderValidated = null
};
using (var reader = new StreamReader(file))
using (var csv = new CsvReader(reader, config))
Make sure to include both these lines:
csv.Configuration.HeaderValidated = null;
csv.Configuration.MissingFieldFound = null;
I have created database in SqLite in my windows app, now I want to fetch the data from my database. This is the code how I inserted the data in databse.
var dbpath = Path.Combine(Windows.Storage.ApplicationData.Current.LocalFolder.Path, "scrapbook.sqlite");
using (var db = new SQLite.SQLiteConnection(dbpath))
{
// Create the tables if they don't exist
var bg = listAlbumContainer[0] as AlbumContainer;
var albumbg = bg.BackgroundImageName;
var sk = listAlbumContainer[0].listAlbumContainer;
var _audio = listAlbumContainer[0].listAlbumContainer5;
var _video = listAlbumContainer[0].listAlbumContainer3;
var wa = listAlbumContainer[0].listAlbumContainer4;
var ci = listAlbumContainer[0].listAlbumContainer2;
var gi = listAlbumContainer[0].listAlbumContainer1;
string snodesticker = Serializeanddeserializwhelper.Serialize(sk);
string nodegi = Serializeanddeserializwhelper.Serialize(gi);
string nodeci = Serializeanddeserializwhelper.Serialize(ci);
string nodewa = Serializeanddeserializwhelper.Serialize(wa);
string nodevideo = Serializeanddeserializwhelper.Serialize(_video);
string nodeaudio = Serializeanddeserializwhelper.Serialize(_audio);
var TittledataInsertCheck = db.Table<Title>().Where(x => x.ALBUM_TITLE.Contains(nametxt.Text)).FirstOrDefault();
if (TittledataInsertCheck == null)
{
db.Insert(new Title()
{
ALBUM_TITLE = nametxt.Text
});
var TittledataInsert = db.Table<Title>().Where(x => x.ALBUM_TITLE.Contains(nametxt.Text)).FirstOrDefault();
if (TittledataInsert != null)
{
db.Insert(new PAGE()
{
PAGE_BACKGROUND = albumbg,
TITLE_ID = TittledataInsert.ID
});
}
}
else
{
await new MessageDialog("Tittle Already Found Please change tittle").ShowAsync();
return;
}
var PagedataInsert = db.Table<PAGE>().Where(x => x.PAGE_BACKGROUND.Contains(albumbg)).FirstOrDefault();
if (PagedataInsert != null)
{
db.Insert(new CONTENT
{
PAGE_ID = PagedataInsert.ID,
STICKERS = snodesticker,
AUDIO = nodeaudio,
VIDEO = nodevideo,
GALLERY_IMAGES = nodegi,
CAMERA_IMAGES = nodeci,
WOARD_ART = nodewa
});
}
db.Commit();
db.Dispose();
db.Close();
var line = new MessageDialog("Records Inserted");
await line.ShowAsync();
}
Is this the right way to insert the data?
I have a canvas on which I set the background image. And on this background some images, video and audio.
I try to import data from my POS System to Quickbooks to create General Journal entries. When i add some static(hard coded) data all saved successfully. But when I try to add data from my app QB add data to dataService but when i sync QB data didn't come. Please could you help with my problem. Here is my code.
//main code to add General Journal Entry to QB
var header = GenerateReportHeader(model);
var creditInfo = GenerateJournalEntryLines(model.CreditInformation, PostingTypeEnum.Credit);
var debitInfo = GenerateJournalEntryLines(model.DebitInformation, PostingTypeEnum.Debit);
var allLines = creditInfo.Concat(debitInfo).ToArray();
var result = new JournalEntry();
result.Header = header;
result.Line = allLines;
dataService.Add(result);
//Add Header
private JournalEntryHeader GenerateReportHeader(GeneralJournalModel model)
{
var result = new JournalEntryHeader
{
TxnDate = new DateTime(2013,7,1),
Status = "Payable",
Adjustment = false,
TxnDateSpecified = true
};
if (!String.IsNullOrEmpty(model.EntryNo))
{
result.DocNumber = model.EntryNo;
}
return result;
}
//Add Line
private JournalEntryLine[] GenerateJournalEntryLines(List<GeneralJournalEntryModel> model, PostingTypeEnum postType)
{
var result = new JournalEntryLine[model.Count];
for (int i = 0; i < model.Count; i++)
{
var journalEntryLine = model[i];
var account = GetAccountByNumber(journalEntryLine.AccountNumber);
var toAdd = new JournalEntryLine
{
AccountId = account.Id,
AccountType = account.Type,
AccountName = account.Name,
Amount = Convert.ToDecimal(journalEntryLine.Amount),
AmountSpecified = true,
PostingType = postType,
AccountTypeSpecified = true,
Id = new IdType(),
PostingTypeSpecified = true
};
if (journalEntryLine.EntityId != null)
{
toAdd.EntityId = GetEntityId(journalEntryLine.EntityType, journalEntryLine.EntityId);
toAdd.EntityType = GetEntityType(journalEntryLine.EntityType);
toAdd.EntityName = GetEntityName(journalEntryLine.EntityType, journalEntryLine.EntityId);
toAdd.EntityTypeSpecified = true;
}
result[i] = toAdd;
}
return result;
}
Did you check the SyncStatus, and find out why not?
This should be where you start:
https://developer.intuit.com/docs/0025_quickbooksapi/0050_data_services/v2/0500_quickbooks_windows/0600_object_reference/syncstatus
It will give you more detail about specifically why the data failed to sync over.