I have two tables (qf_schoolQuestion, qf_schoolOption) qf_schoolOption table having foreign key of questionId which is primary key in qf_schoolQuestion Table.
TABLES:
qf_schoolQuestion | qf_schoolOption
----------------------------------
questionId | schoolOptionId
questionTitle | questionId
etc columns | firstChoice
| secondChoice
| thirdChoice
| fourthChoice
I want to Bulk insert the Excel Data to Both Tables, but problem is that my code will insert the record in both tables but when data insert in qf_schoolQuestion, it will insert all data but when it insert the record in another table i.e. qf_schoolOption then problem occurred...
The Problem is that Suppose we have two records in excel sheet then data insert in f_schoolQuestion table is showing two records with two generated primary key, but when the data insert in another table i.e. qf_schoolOption table it will enter 4 records into sql server,
suppose id is generated by qf_schoolQuestion table are '2044', '2045', then qf_schoolOption table will insert Both rows choice from excel sheet to '2044' Id and Both Records for '2045' ID.
HttpPost:
public JsonResult SchoolQuesImport(HttpPostedFileBase fileUpload)
{
List<string> data = new List<string>();
if (fileUpload != null)
{
if (fileUpload.ContentType == "application/vnd.ms-excel" || fileUpload.ContentType == "application/octet-stream" || fileUpload.ContentType == "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
{
string filename = fileUpload.FileName;
string targetpath = Server.MapPath("~/UploadContent/");
fileUpload.SaveAs(targetpath + filename);
string pathToExcelFile = targetpath + filename;
var connectionString = "";
if (filename.EndsWith(".xls"))
{
connectionString = string.Format("Provider=Microsoft.Jet.OLEDB.4.0; data source={0}; Extended Properties=Excel 8.0; HDR=YES", pathToExcelFile);
}
else if (filename.EndsWith(".xlsx"))
{
connectionString = string.Format("Provider=Microsoft.ACE.OLEDB.12.0;Data Source={0};Extended Properties=\"Excel 12.0 Xml;HDR=YES;IMEX=1\";", pathToExcelFile);
}
//"Xml;HDR=YES;IMEX=1\";
var adapter = new OleDbDataAdapter("SELECT * FROM [Sheet1$]", connectionString);
var ds = new DataSet();
adapter.Fill(ds, "ExcelTable");
DataTable dtable = ds.Tables["ExcelTable"];
string sheetName = "Sheet1";
var excelFile = new ExcelQueryFactory(pathToExcelFile);
var artistAlbums = from a in excelFile.Worksheet<qf_schoolQuestion>(sheetName) select a;
var parentartistalbum = from b in excelFile.Worksheet<qf_schoolOption>(sheetName) select b;
foreach (var a in artistAlbums)
{
try
{
if (a.questionTitle != "" && a.questionLevel != "" && a.questionLang != "")
{
qf_schoolQuestion sq = new qf_schoolQuestion();
#region School Question
//sq.questionId = a.questionId;
sq.examTypeId = a.examTypeId;
sq.examCategoryId = a.examCategoryId;
sq.examSubCategoryId = a.examSubCategoryId;
sq.examSubjectId = a.examSubjectId;
sq.questionLang = a.questionLang;
sq.questionLevel = a.questionLevel;
sq.questionRefBook = a.questionRefBook;
sq.questionTitle = a.questionTitle;
sq.questionYear = a.questionYear;
sq.createdDate = DateTime.Now;
sq.createdBy = User.Identity.Name;
#endregion
context.qf_schoolQuestion.Add(sq);
context.SaveChanges();
foreach (var b in parentartistalbum)
{
try
{
if (b.firstChoice != "")
{
qf_schoolOption so = new qf_schoolOption();
#region School Options
so.questionId = sq.questionId;
so.firstChoice = b.firstChoice;
so.secondChoice = b.secondChoice;
so.thirdChoice = b.thirdChoice;
so.fourthChoice = b.fourthChoice;
#endregion
context.qf_schoolOption.Add(so);
context.SaveChanges();
}
else
{
data.Add("<ul>");
if (b.firstChoice == "" || b.secondChoice == null) data.Add("<li>Choices are required.</li>");
data.Add("</ul>");
data.ToArray();
return Json(data, JsonRequestBehavior.AllowGet);
}
}
catch (DbEntityValidationException ex)
{
foreach (var entityValidationErrors in ex.EntityValidationErrors)
{
foreach (var validationError in entityValidationErrors.ValidationErrors)
{
Response.Write("Property: " + validationError.PropertyName + " Error: " + validationError.ErrorMessage);
}
}
}
}
}
else
{
data.Add("<ul>");
if (a.questionLang == "" || a.questionLang == null) data.Add("<li>Question Language is required.</li>");
data.Add("</ul>");
data.ToArray();
return Json(data, JsonRequestBehavior.AllowGet);
}
}
catch (DbEntityValidationException ex)
{
foreach (var entityValidationErrors in ex.EntityValidationErrors)
{
foreach (var validationError in entityValidationErrors.ValidationErrors)
{
Response.Write("Property: " + validationError.PropertyName + " Error: " + validationError.ErrorMessage);
}
}
}
}
//deleting excel file from folder
if ((System.IO.File.Exists(pathToExcelFile)))
{
System.IO.File.Delete(pathToExcelFile);
}
return Json("success", JsonRequestBehavior.AllowGet);
}
else
{
//alert message for invalid file format
data.Add("Only Excel file format is allowed");
data.ToArray();
return Json(data, JsonRequestBehavior.AllowGet);
}
}
else
{
if (fileUpload == null) data.Add("Please choose Excel file");
data.ToArray();
return Json(data, JsonRequestBehavior.AllowGet);
}
}
Honestly the description of the issue is not very clear, but I'll try to propose a solution: it seems to me you firstly have to insert all records in table qf_schoolQuestion and then insert all records table qf_schoolOption; in this way you should avoid record duplication.
I have to do a lot of SQL inserts without using stored procedures.
For big classes, the insert strings get huge so I was thinking of building a generalized insert function to handle it when passing in an object. What I've written below works but it's not ideal because (1) I have to specify all possible data types and (2) I have to convert all values back to strings to build the insert string, I'd rather be using parameters with values.
I just want a function where I pass it an object class and the function inserts the object's values into the database (given all the column names in the table matches the property names of the object)
Any ideas would be greatly appreciated, thanks.
public static IEnumerable<KeyValuePair<string, T>> PropertiesOfType<T>(object obj)
{
return from p in obj.GetType().GetProperties()
where p.PropertyType == typeof(T)
select new KeyValuePair<string, T>(p.Name, (T)p.GetValue(obj, null));
}
public string InsertString(string _table, object _class)
{
Dictionary<string, string> returnDict = new Dictionary<string, string>();
StringBuilder sb = new StringBuilder();
foreach (var property in PropertiesOfType<DateTime>(_class))
returnDict.Add(property.Key, property.Value.ToString("yyyy-MM-dd HH:mm:ss"));
foreach (var property in PropertiesOfType<string>(_class))
returnDict.Add(property.Key, property.Value);
foreach (var property in PropertiesOfType<int>(_class))
{
if (property.Key == "Key")
continue;
returnDict.Add(property.Key, property.Value.ToString());
}
foreach (var property in PropertiesOfType<bool>(_class))
{
if (property.Value)
returnDict.Add(property.Key, "1");
else
returnDict.Add(property.Key, "0");
}
foreach (var property in PropertiesOfType<decimal>(_class))
returnDict.Add(property.Key, property.Value.ToString());
foreach (var property in PropertiesOfType<long>(_class))
returnDict.Add(property.Key, property.Value.ToString());
if (returnDict.Count == 1)
{
sb.Append(string.Format("INSERT INTO [{0}] ({1}) VALUES ('{2}')", _table, returnDict.ElementAt(0).Key, returnDict.ElementAt(0).Value));
}
else
{
for (int i = 0; i < returnDict.Count; i++)
{
if (i == 0)
sb.Append(string.Format("INSERT INTO [{0}] ({1}, ", _table, returnDict.ElementAt(i).Key));
else if (i == returnDict.Count - 1)
sb.Append(string.Format("{0}) ", returnDict.ElementAt(i).Key));
else
sb.Append(string.Format("{0}, ", returnDict.ElementAt(i).Key));
}
for (int i = 0; i < returnDict.Count; i++)
{
if (i == 0)
sb.Append(string.Format("VALUES ('{0}', ", returnDict.ElementAt(i).Value));
else if (i == returnDict.Count - 1)
sb.Append(string.Format("'{0}')", returnDict.ElementAt(i).Value));
else
sb.Append(string.Format("'{0}', ", returnDict.ElementAt(i).Value));
}
}
return sb.ToString();
}
string query = InsertString(_table, _obj);
I've managed to find a way to do this that I'm pretty happy about that doesn't require any external libraries or frameworks.
Basing on #HardikParmar's suggestion I built a new process on converting a class object into a datatable, this will then store all the relevant datatypes as columns.
Then add a row into the structured datatable using the class object.
Now what you have a datatable with one row of values.
Then I create a PARAMATERIZED insert statement. Then in my command text I add the values to the parameters.
Almost clean, always room for improvement.
//this function creates the datatable from a specified class type, you may exclude properties such as primary keys
public DataTable ClassToDataTable<T>(List<string> _excludeList = null)
{
Type classType = typeof(T);
List<PropertyInfo> propertyList = classType.GetProperties().ToList();
DataTable result = new DataTable(classType.Name);
foreach (PropertyInfo prop in propertyList)
{
if (_excludeList != null)
{
bool toContinue = false;
foreach (string excludeName in _excludeList)
{
if (excludeName == prop.Name)
{
toContinue = true;
break;
}
}
if (toContinue)
continue;
}
result.Columns.Add(prop.Name, prop.PropertyType);
}
return result;
}
//add data to the table
public void AddRow(ref DataTable table, object data)
{
Type classType = data.GetType();
string className = classType.Name;
if (!table.TableName.Equals(className))
{
throw new Exception("DataTableConverter.AddRow: " +
"TableName not equal to className.");
}
DataRow row = table.NewRow();
List<PropertyInfo> propertyList = classType.GetProperties().ToList();
foreach (PropertyInfo prop in propertyList)
{
foreach (DataColumn col in table.Columns)
{
if (col.ColumnName == prop.Name)
{
if (table.Columns[prop.Name] == null)
{
throw new Exception("DataTableConverter.AddRow: " +
"Column name does not exist: " + prop.Name);
}
row[prop.Name] = prop.GetValue(data, null);
}
}
}
table.Rows.Add(row);
}
//creates the insert string
public string MakeInsertParamString(string _tableName, DataTable _dt, string _condition=null)
{
StringBuilder sb = new StringBuilder();
sb.Append(string.Format("INSERT INTO [{0}] (", _tableName));
for (int i = 0; i < _dt.Columns.Count; i++)
{
sb.Append(string.Format("{0}", _dt.Columns[i].ColumnName));
if (i < _dt.Columns.Count - 1)
sb.Append(", ");
}
sb.Append(") VALUES (");
for (int i = 0; i < _dt.Columns.Count; i++)
{
sb.Append(string.Format("#{0}", _dt.Columns[i].ColumnName));
if (i < _dt.Columns.Count - 1)
sb.Append(", ");
}
sb.Append(")");
if (!string.IsNullOrEmpty(_condition))
sb.Append(" WHERE " + _condition);
return sb.ToString();
}
//inserts into the database
public string InsertUsingDataRow(string _tableName, DataTable _dt, string _condition = null)
{
try
{
using (SQLiteConnection conn = new SQLiteConnection(_dbPath))
{
string query = MakeInsertParamString(_tableName, _dt, _condition);
SQLiteCommand cmd = new SQLiteCommand(query, conn);
foreach (DataColumn col in _dt.Columns)
{
var objectValue = _dt.Rows[0][col.ColumnName];
cmd.Parameters.AddWithValue("#" + col.ColumnName, objectValue);
}
conn.Open();
cmd.ExecuteNonQuery();
conn.Close();
}
//return MakeInsertParamString(_tableName, _dt, _condition);
return "Success";
}
catch (Exception ex) { return ex.Message; }
}
I know this question has been asked many times, but I have the following piece of code that I am trying to use to default null values. Can someone please help me. I tried this code but instead of giving me "NO DATA" for the null values, it doesnt display anything. Not sure where I am going wrong.
More Detail: This code does not replace null values with "NO DATA" string. What is wrong here? What do I need to change in order for it to display "NO DATA"?
protected override void Execute(NativeActivityContext context)
{
DataSet dataset = GetDataSet.Get(context);
foreach (DataTable dt in dataset.Tables)
{
foreach (DataRow row in dataset.Tables[0].Rows)
{
if (row["USER_COMMENT"] is System.DBNull)
{
ConvertNullToEmptyString(dt);
Console.WriteLine("In if");
}
else
{
Console.WriteLine("out if");
}
}
}
TransformResult.Set(context, dataset);
}
private static string ConvertNullToEmptyString(DataTable element)
{
if (element.Rows[0]["USER_COMMENT"] == DBNull.Value || element.Rows[0]["USER_COMMENT"] == null)
{
return "NO DATA";
}
else
{
return element.Rows[0]["USER_COMMENT"].ToString();
}
}
Here is what I would do
String stringtocompare;
if(String.isnullorwhitespace(stringtocompare)){
stringtocompare = "No VALUE";
}
No need of extra function there. You just have to insert the "No DATA" in the loop like below
foreach (DataRow row in dataset.Tables[0].Rows)
{
if (row["USER_COMMENT"] is System.DBNull)
{
row["USER_COMMENT"] = "NO DATA";
Console.WriteLine("In if");
}
else
{
Console.WriteLine("out if");
}
}
A couple things that might help:
You probably want to change this:
foreach (DataTable dt in dataset.Tables)
{
foreach (DataRow row in dataset.Tables[0].Rows)
{
...
to this:
foreach (DataTable dt in dataset.Tables)
{
foreach (DataRow row in dt.Rows)
{
...
Or else you will only be querying 1 table in your loop.
Also, I'd use String.IsNullOrEmpty() to interrogate the data.
If you don't need a second function to do it, try something like this:
protected override void Execute(NativeActivityContext context) {
DataSet dataset = GetDataSet.Get(context);
foreach(DataTable dt in dataset.Tables) {
foreach(DataRow row in dt.Rows) {
row["USER_COMMENT"] = String.IsNullOrEmpty(row["USER_COMMENT"].ToString()) ? "NO DATA" : row["USER_COMMENT"];
}
}
TransformResult.Set(context, dataset);
}
But, with a second function to convert, it would look something like this:
protected override void Execute(NativeActivityContext context) {
DataSet dataset = GetDataSet.Get(context);
foreach(DataTable dt in dataset.Tables) {
foreach(DataRow row in dt.Rows) {
row["USER_COMMENT"] = ConvertNullToEmptyString(row["USER_COMMENT"]);
}
}
TransformResult.Set(context, dataset);
}
private static object ConvertNullToEmptyString(object element) {
if(String.IsNullOrEmpty(element.ToString())) {
return "NO DATA";
} else {
return element;
}
}
Use empty string instead of null
if(row["USER_COMMENT"] == string.Empty)
Did you try
private static string ConvertNullToEmptyString(DataTable element)
{
if (string.IsNullOrEmpty(element.Rows[0]["USER_COMMENT"]))
{
return "NO DATA";
}
else
{
return element.Rows[0]["USER_COMMENT"].ToString();
}
}
I am using SharePoint Server 2013. I am trying to show a list data in a DataGridView in a windows forms application(client application). I obtained the ListItemCollection object related to the specific list. How can I map that object to the datasource of the DataGridView?
I can't find any specific way to obtain a DataTable object from the ListItemCollection object. Because the list I used to obtain data is selected by a drop down list. So there is no predetermined columns for a Datatable object. Thanks in advance. :)
try this one -
ListItemCollection items = GetListItemCollections(); //Get list item collection
DataTable dt = new DataTable();
foreach (var field in items[0].FieldValues.Keys)
{
dt.Columns.Add(field);
}
foreach (var item in items)
{
DataRow dr = dt.NewRow();
foreach (var obj in item.FieldValues)
{
if (obj.Value != null)
{
string type = obj.Value.GetType().FullName;
if (type == "Microsoft.SharePoint.Client.FieldLookupValue")
{
dr[obj.Key] = ((FieldLookupValue)obj.Value).LookupValue;
}
else if (type == "Microsoft.SharePoint.Client.FieldUserValue")
{
dr[obj.Key] = ((FieldUserValue)obj.Value).LookupValue;
}
else
{
dr[obj.Key] = obj.Value;
}
}
else
{
dr[obj.Key] = null;
}
}
dt.Rows.Add(dr);
}
ResetDataGridView(); //Clear contents of datagridview
dataGridView1.DataSource = dt;
Hope this helps..
Thanks
Below function will return you DataTable.
internal DataTable GetDataTableFromListItemCollection()
{
string strWhere = string.Empty;
DataTable dtGetReqForm = new DataTable();
using (var clientContext = new ClientContext("sharepoint host url"))
{
try
{
Microsoft.SharePoint.Client.List spList = clientContext.Web.Lists.GetByTitle("ReqList");
clientContext.Load(spList);
clientContext.ExecuteQuery();
if (spList != null && spList.ItemCount > 0)
{
Microsoft.SharePoint.Client.CamlQuery camlQuery = new CamlQuery();
camlQuery.ViewXml =
#"<View>" +
"<Query> " +
"<Where>" +
"<And>" +
"<IsNotNull><FieldRef Name='ID' /></IsNotNull>" +
"<Eq><FieldRef Name='ReqNo' /><Value Type='Text'>123</Value></Eq>" +
"</And>" +
"</Where>" +
"</Query> " +
"<ViewFields>" +
"<FieldRef Name='ID' />" +
"</ViewFields>" +
"</View>";
SPClient.ListItemCollection listItems = spList.GetItems(camlQuery);
clientContext.Load(listItems);
clientContext.ExecuteQuery();
if (listItems != null && listItems.Count > 0)
{
foreach (var field in listItems[0].FieldValues.Keys)
{
dtGetReqForm.Columns.Add(field);
}
foreach (var item in listItems)
{
DataRow dr = dtGetReqForm.NewRow();
foreach (var obj in item.FieldValues)
{
if (obj.Value != null)
{
string key = obj.Key;
string type = obj.Value.GetType().FullName;
if (type == "Microsoft.SharePoint.Client.FieldLookupValue")
{
dr[obj.Key] = ((FieldLookupValue)obj.Value).LookupValue;
}
else if (type == "Microsoft.SharePoint.Client.FieldUserValue")
{
dr[obj.Key] = ((FieldUserValue)obj.Value).LookupValue;
}
else if (type == "Microsoft.SharePoint.Client.FieldUserValue[]")
{
FieldUserValue[] multValue = (FieldUserValue[])obj.Value;
foreach (FieldUserValue fieldUserValue in multValue)
{
dr[obj.Key] += (fieldUserValue).LookupValue;
}
}
else if (type == "System.DateTime")
{
if (obj.Value.ToString().Length > 0)
{
var date = obj.Value.ToString().Split(' ');
if (date[0].Length > 0)
{
dr[obj.Key] = date[0];
}
}
}
else
{
dr[obj.Key] = obj.Value;
}
}
else
{
dr[obj.Key] = null;
}
}
dtGetReqForm.Rows.Add(dr);
}
}
}
}
catch (Exception ex)
{
}
finally
{
if (clientContext != null)
clientContext.Dispose();
}
}
return dtGetReqForm;
}
//once you have the DataTable() with you you can set the DataSource
//DataGridView1 is the id value
DataGridView1.DataSource = GetDataTableFromListItemCollection();
I'm using the Save() method to insert or update records, but I would like to make it perform a bulk insert and bulk update with only one database hit. How do I do this?
In my case, I took advantage of the database.Execute() method.
I created a SQL parameter that had the first part of my insert:
var sql = new Sql("insert into myTable(Name, Age, Gender) values");
for (int i = 0; i < pocos.Count ; ++i)
{
var p = pocos[i];
sql.Append("(#0, #1, #2)", p.Name, p.Age , p.Gender);
if(i != pocos.Count -1)
sql.Append(",");
}
Database.Execute(sql);
I tried two different methods for inserting a large quantity of rows faster than the default Insert (which is pretty slow when you have a lot of rows).
1) Making up a List<T> with the poco's first and then inserting them at once within a loop (and in a transaction):
using (var tr = PetaPocoDb.GetTransaction())
{
foreach (var record in listOfRecords)
{
PetaPocoDb.Insert(record);
}
tr.Complete();
}
2) SqlBulkCopy a DataTable:
var bulkCopy = new SqlBulkCopy(connectionString, SqlBulkCopyOptions.TableLock);
bulkCopy.DestinationTableName = "SomeTable";
bulkCopy.WriteToServer(dt);
To get my List <T> to a DataTable I used Marc Gravells Convert generic List/Enumerable to DataTable? function which worked ootb for me (after I rearranged the Poco properties to be in the exact same order as the table fields in the db.)
The SqlBulkCopy was fastest, 50% or so faster than the transactions method in the (quick) perf tests I did with ~1000 rows.
Hth
Insert in one SQL query is much faster.
Here is a customer method for PetaPoco.Database class that adds ability to do a bulk insert of any collection:
public void BulkInsertRecords<T>(IEnumerable<T> collection)
{
try
{
OpenSharedConnection();
using (var cmd = CreateCommand(_sharedConnection, ""))
{
var pd = Database.PocoData.ForType(typeof(T));
var tableName = EscapeTableName(pd.TableInfo.TableName);
string cols = string.Join(", ", (from c in pd.QueryColumns select tableName + "." + EscapeSqlIdentifier(c)).ToArray());
var pocoValues = new List<string>();
var index = 0;
foreach (var poco in collection)
{
var values = new List<string>();
foreach (var i in pd.Columns)
{
values.Add(string.Format("{0}{1}", _paramPrefix, index++));
AddParam(cmd, i.Value.GetValue(poco), _paramPrefix);
}
pocoValues.Add("(" + string.Join(",", values.ToArray()) + ")");
}
var sql = string.Format("INSERT INTO {0} ({1}) VALUES {2}", tableName, cols, string.Join(", ", pocoValues));
cmd.CommandText = sql;
cmd.ExecuteNonQuery();
}
}
finally
{
CloseSharedConnection();
}
}
Here is the updated verision of Steve Jansen answer that splits in chuncs of maximum 2100 pacos
I commented out the following code as it produces duplicates in the database...
//using (var reader = cmd.ExecuteReader())
//{
// while (reader.Read())
// {
// inserted.Add(reader[0]);
// }
//}
Updated Code
/// <summary>
/// Performs an SQL Insert against a collection of pocos
/// </summary>
/// <param name="pocos">A collection of POCO objects that specifies the column values to be inserted. Assumes that every POCO is of the same type.</param>
/// <returns>An array of the auto allocated primary key of the new record, or null for non-auto-increment tables</returns>
public object BulkInsert(IEnumerable<object> pocos)
{
Sql sql;
IList<PocoColumn> columns = new List<PocoColumn>();
IList<object> parameters;
IList<object> inserted;
PocoData pd;
Type primaryKeyType;
object template;
string commandText;
string tableName;
string primaryKeyName;
bool autoIncrement;
int maxBulkInsert;
if (null == pocos)
{
return new object[] { };
}
template = pocos.First<object>();
if (null == template)
{
return null;
}
pd = PocoData.ForType(template.GetType());
tableName = pd.TableInfo.TableName;
primaryKeyName = pd.TableInfo.PrimaryKey;
autoIncrement = pd.TableInfo.AutoIncrement;
//Calculate the maximum chunk size
maxBulkInsert = 2100 / pd.Columns.Count;
IEnumerable<object> pacosToInsert = pocos.Take(maxBulkInsert);
IEnumerable<object> pacosremaining = pocos.Skip(maxBulkInsert);
try
{
OpenSharedConnection();
try
{
var names = new List<string>();
var values = new List<string>();
var index = 0;
foreach (var i in pd.Columns)
{
// Don't insert result columns
if (i.Value.ResultColumn)
continue;
// Don't insert the primary key (except under oracle where we need bring in the next sequence value)
if (autoIncrement && primaryKeyName != null && string.Compare(i.Key, primaryKeyName, true) == 0)
{
primaryKeyType = i.Value.PropertyInfo.PropertyType;
// Setup auto increment expression
string autoIncExpression = _dbType.GetAutoIncrementExpression(pd.TableInfo);
if (autoIncExpression != null)
{
names.Add(i.Key);
values.Add(autoIncExpression);
}
continue;
}
names.Add(_dbType.EscapeSqlIdentifier(i.Key));
values.Add(string.Format("{0}{1}", _paramPrefix, index++));
columns.Add(i.Value);
}
string outputClause = String.Empty;
if (autoIncrement)
{
outputClause = _dbType.GetInsertOutputClause(primaryKeyName);
}
commandText = string.Format("INSERT INTO {0} ({1}){2} VALUES",
_dbType.EscapeTableName(tableName),
string.Join(",", names.ToArray()),
outputClause
);
sql = new Sql(commandText);
parameters = new List<object>();
string valuesText = string.Concat("(", string.Join(",", values.ToArray()), ")");
bool isFirstPoco = true;
var parameterCounter = 0;
foreach (object poco in pacosToInsert)
{
parameterCounter++;
parameters.Clear();
foreach (PocoColumn column in columns)
{
parameters.Add(column.GetValue(poco));
}
sql.Append(valuesText, parameters.ToArray<object>());
if (isFirstPoco && pocos.Count() > 1)
{
valuesText = "," + valuesText;
isFirstPoco = false;
}
}
inserted = new List<object>();
using (var cmd = CreateCommand(_sharedConnection, sql.SQL, sql.Arguments))
{
if (!autoIncrement)
{
DoPreExecute(cmd);
cmd.ExecuteNonQuery();
OnExecutedCommand(cmd);
PocoColumn pkColumn;
if (primaryKeyName != null && pd.Columns.TryGetValue(primaryKeyName, out pkColumn))
{
foreach (object poco in pocos)
{
inserted.Add(pkColumn.GetValue(poco));
}
}
return inserted.ToArray<object>();
}
object id = _dbType.ExecuteInsert(this, cmd, primaryKeyName);
if (pacosremaining.Any())
{
return BulkInsert(pacosremaining);
}
return id;
//using (var reader = cmd.ExecuteReader())
//{
// while (reader.Read())
// {
// inserted.Add(reader[0]);
// }
//}
//object[] primaryKeys = inserted.ToArray<object>();
//// Assign the ID back to the primary key property
//if (primaryKeyName != null)
//{
// PocoColumn pc;
// if (pd.Columns.TryGetValue(primaryKeyName, out pc))
// {
// index = 0;
// foreach (object poco in pocos)
// {
// pc.SetValue(poco, pc.ChangeType(primaryKeys[index]));
// index++;
// }
// }
//}
//return primaryKeys;
}
}
finally
{
CloseSharedConnection();
}
}
catch (Exception x)
{
if (OnException(x))
throw;
return null;
}
}
Below is a BulkInsert method of PetaPoco that expands on taylonr's very clever idea to use the SQL technique of insert multiple rows via INSERT INTO tab(col1, col2) OUTPUT inserted.[ID] VALUES (#0, #1), (#2, 3), (#4, #5), ..., (#n-1, #n).
It also returns the auto-increment (identity) values of inserted records, which I don't believe happens in IvoTops' implementation.
NOTE: SQL Server 2012 (and below) has a limit of 2,100 parameters per query. (This is likely the source of the stack overflow exception referenced by Zelid's comment). You will need to manually split your batches up based on the number of columns that are not decorated as Ignore or Result. For example, a POCO with 21 columns should be sent in batch sizes of 99, or (2100 - 1) / 21. I may refactor this to dynamically split batches based on this limit for SQL Server; however, you will always see the best results by managing the batch size external to this method.
This method showed an approximate 50% gain in execution time over my previous technique of using a shared connection in a single transaction for all inserts.
This is one area where Massive really shines - Massive has a Save(params object[] things) that builds an array of IDbCommands, and executes each one on a shared connection. It works out of the box, and doesn't run into parameter limits.
/// <summary>
/// Performs an SQL Insert against a collection of pocos
/// </summary>
/// <param name="pocos">A collection of POCO objects that specifies the column values to be inserted. Assumes that every POCO is of the same type.</param>
/// <returns>An array of the auto allocated primary key of the new record, or null for non-auto-increment tables</returns>
/// <remarks>
/// NOTE: As of SQL Server 2012, there is a limit of 2100 parameters per query. This limitation does not seem to apply on other platforms, so
/// this method will allow more than 2100 parameters. See http://msdn.microsoft.com/en-us/library/ms143432.aspx
/// The name of the table, it's primary key and whether it's an auto-allocated primary key are retrieved from the attributes of the first POCO in the collection
/// </remarks>
public object[] BulkInsert(IEnumerable<object> pocos)
{
Sql sql;
IList<PocoColumn> columns = new List<PocoColumn>();
IList<object> parameters;
IList<object> inserted;
PocoData pd;
Type primaryKeyType;
object template;
string commandText;
string tableName;
string primaryKeyName;
bool autoIncrement;
if (null == pocos)
return new object[] {};
template = pocos.First<object>();
if (null == template)
return null;
pd = PocoData.ForType(template.GetType());
tableName = pd.TableInfo.TableName;
primaryKeyName = pd.TableInfo.PrimaryKey;
autoIncrement = pd.TableInfo.AutoIncrement;
try
{
OpenSharedConnection();
try
{
var names = new List<string>();
var values = new List<string>();
var index = 0;
foreach (var i in pd.Columns)
{
// Don't insert result columns
if (i.Value.ResultColumn)
continue;
// Don't insert the primary key (except under oracle where we need bring in the next sequence value)
if (autoIncrement && primaryKeyName != null && string.Compare(i.Key, primaryKeyName, true) == 0)
{
primaryKeyType = i.Value.PropertyInfo.PropertyType;
// Setup auto increment expression
string autoIncExpression = _dbType.GetAutoIncrementExpression(pd.TableInfo);
if (autoIncExpression != null)
{
names.Add(i.Key);
values.Add(autoIncExpression);
}
continue;
}
names.Add(_dbType.EscapeSqlIdentifier(i.Key));
values.Add(string.Format("{0}{1}", _paramPrefix, index++));
columns.Add(i.Value);
}
string outputClause = String.Empty;
if (autoIncrement)
{
outputClause = _dbType.GetInsertOutputClause(primaryKeyName);
}
commandText = string.Format("INSERT INTO {0} ({1}){2} VALUES",
_dbType.EscapeTableName(tableName),
string.Join(",", names.ToArray()),
outputClause
);
sql = new Sql(commandText);
parameters = new List<object>();
string valuesText = string.Concat("(", string.Join(",", values.ToArray()), ")");
bool isFirstPoco = true;
foreach (object poco in pocos)
{
parameters.Clear();
foreach (PocoColumn column in columns)
{
parameters.Add(column.GetValue(poco));
}
sql.Append(valuesText, parameters.ToArray<object>());
if (isFirstPoco)
{
valuesText = "," + valuesText;
isFirstPoco = false;
}
}
inserted = new List<object>();
using (var cmd = CreateCommand(_sharedConnection, sql.SQL, sql.Arguments))
{
if (!autoIncrement)
{
DoPreExecute(cmd);
cmd.ExecuteNonQuery();
OnExecutedCommand(cmd);
PocoColumn pkColumn;
if (primaryKeyName != null && pd.Columns.TryGetValue(primaryKeyName, out pkColumn))
{
foreach (object poco in pocos)
{
inserted.Add(pkColumn.GetValue(poco));
}
}
return inserted.ToArray<object>();
}
// BUG: the following line reportedly causes duplicate inserts; need to confirm
//object id = _dbType.ExecuteInsert(this, cmd, primaryKeyName);
using(var reader = cmd.ExecuteReader())
{
while (reader.Read())
{
inserted.Add(reader[0]);
}
}
object[] primaryKeys = inserted.ToArray<object>();
// Assign the ID back to the primary key property
if (primaryKeyName != null)
{
PocoColumn pc;
if (pd.Columns.TryGetValue(primaryKeyName, out pc))
{
index = 0;
foreach(object poco in pocos)
{
pc.SetValue(poco, pc.ChangeType(primaryKeys[index]));
index++;
}
}
}
return primaryKeys;
}
}
finally
{
CloseSharedConnection();
}
}
catch (Exception x)
{
if (OnException(x))
throw;
return null;
}
}
Here is the code for BulkInsert that you can add to v5.01 PetaPoco.cs
You can paste it somewhere close the regular insert at line 1098
You give it an IEnumerable of Pocos and it will send it to the database
in batches of x together. The code is 90% from the regular insert.
I do not have performance comparison, let me know :)
/// <summary>
/// Bulk inserts multiple rows to SQL
/// </summary>
/// <param name="tableName">The name of the table to insert into</param>
/// <param name="primaryKeyName">The name of the primary key column of the table</param>
/// <param name="autoIncrement">True if the primary key is automatically allocated by the DB</param>
/// <param name="pocos">The POCO objects that specifies the column values to be inserted</param>
/// <param name="batchSize">The number of POCOS to be grouped together for each database rounddtrip</param>
public void BulkInsert(string tableName, string primaryKeyName, bool autoIncrement, IEnumerable<object> pocos, int batchSize = 25)
{
try
{
OpenSharedConnection();
try
{
using (var cmd = CreateCommand(_sharedConnection, ""))
{
var pd = PocoData.ForObject(pocos.First(), primaryKeyName);
// Create list of columnnames only once
var names = new List<string>();
foreach (var i in pd.Columns)
{
// Don't insert result columns
if (i.Value.ResultColumn)
continue;
// Don't insert the primary key (except under oracle where we need bring in the next sequence value)
if (autoIncrement && primaryKeyName != null && string.Compare(i.Key, primaryKeyName, true) == 0)
{
// Setup auto increment expression
string autoIncExpression = _dbType.GetAutoIncrementExpression(pd.TableInfo);
if (autoIncExpression != null)
{
names.Add(i.Key);
}
continue;
}
names.Add(_dbType.EscapeSqlIdentifier(i.Key));
}
var namesArray = names.ToArray();
var values = new List<string>();
int count = 0;
do
{
cmd.CommandText = "";
cmd.Parameters.Clear();
var index = 0;
foreach (var poco in pocos.Skip(count).Take(batchSize))
{
values.Clear();
foreach (var i in pd.Columns)
{
// Don't insert result columns
if (i.Value.ResultColumn) continue;
// Don't insert the primary key (except under oracle where we need bring in the next sequence value)
if (autoIncrement && primaryKeyName != null && string.Compare(i.Key, primaryKeyName, true) == 0)
{
// Setup auto increment expression
string autoIncExpression = _dbType.GetAutoIncrementExpression(pd.TableInfo);
if (autoIncExpression != null)
{
values.Add(autoIncExpression);
}
continue;
}
values.Add(string.Format("{0}{1}", _paramPrefix, index++));
AddParam(cmd, i.Value.GetValue(poco), i.Value.PropertyInfo);
}
string outputClause = String.Empty;
if (autoIncrement)
{
outputClause = _dbType.GetInsertOutputClause(primaryKeyName);
}
cmd.CommandText += string.Format("INSERT INTO {0} ({1}){2} VALUES ({3})", _dbType.EscapeTableName(tableName),
string.Join(",", namesArray), outputClause, string.Join(",", values.ToArray()));
}
// Are we done?
if (cmd.CommandText == "") break;
count += batchSize;
DoPreExecute(cmd);
cmd.ExecuteNonQuery();
OnExecutedCommand(cmd);
}
while (true);
}
}
finally
{
CloseSharedConnection();
}
}
catch (Exception x)
{
if (OnException(x))
throw;
}
}
/// <summary>
/// Performs a SQL Bulk Insert
/// </summary>
/// <param name="pocos">The POCO objects that specifies the column values to be inserted</param>
/// <param name="batchSize">The number of POCOS to be grouped together for each database rounddtrip</param>
public void BulkInsert(IEnumerable<object> pocos, int batchSize = 25)
{
if (!pocos.Any()) return;
var pd = PocoData.ForType(pocos.First().GetType());
BulkInsert(pd.TableInfo.TableName, pd.TableInfo.PrimaryKey, pd.TableInfo.AutoIncrement, pocos);
}
And in the same lines if you want BulkUpdate:
public void BulkUpdate<T>(string tableName, string primaryKeyName, IEnumerable<T> pocos, int batchSize = 25)
{
try
{
object primaryKeyValue = null;
OpenSharedConnection();
try
{
using (var cmd = CreateCommand(_sharedConnection, ""))
{
var pd = PocoData.ForObject(pocos.First(), primaryKeyName);
int count = 0;
do
{
cmd.CommandText = "";
cmd.Parameters.Clear();
var index = 0;
var cmdText = new StringBuilder();
foreach (var poco in pocos.Skip(count).Take(batchSize))
{
var sb = new StringBuilder();
var colIdx = 0;
foreach (var i in pd.Columns)
{
// Don't update the primary key, but grab the value if we don't have it
if (string.Compare(i.Key, primaryKeyName, true) == 0)
{
primaryKeyValue = i.Value.GetValue(poco);
continue;
}
// Dont update result only columns
if (i.Value.ResultColumn)
continue;
// Build the sql
if (colIdx > 0)
sb.Append(", ");
sb.AppendFormat("{0} = {1}{2}", _dbType.EscapeSqlIdentifier(i.Key), _paramPrefix,
index++);
// Store the parameter in the command
AddParam(cmd, i.Value.GetValue(poco), i.Value.PropertyInfo);
colIdx++;
}
// Find the property info for the primary key
PropertyInfo pkpi = null;
if (primaryKeyName != null)
{
pkpi = pd.Columns[primaryKeyName].PropertyInfo;
}
cmdText.Append(string.Format("UPDATE {0} SET {1} WHERE {2} = {3}{4};\n",
_dbType.EscapeTableName(tableName), sb.ToString(),
_dbType.EscapeSqlIdentifier(primaryKeyName), _paramPrefix,
index++));
AddParam(cmd, primaryKeyValue, pkpi);
}
if (cmdText.Length == 0) break;
if (_providerName.IndexOf("oracle", StringComparison.OrdinalIgnoreCase) >= 0)
{
cmdText.Insert(0, "BEGIN\n");
cmdText.Append("\n END;");
}
DoPreExecute(cmd);
cmd.CommandText = cmdText.ToString();
count += batchSize;
cmd.ExecuteNonQuery();
OnExecutedCommand(cmd);
} while (true);
}
}
finally
{
CloseSharedConnection();
}
}
catch (Exception x)
{
if (OnException(x))
throw;
}
}
Here's a nice 2018 update using FastMember from NuGet:
private static async Task SqlBulkCopyPocoAsync<T>(PetaPoco.Database db, IEnumerable<T> data)
{
var pd = PocoData.ForType(typeof(T), db.DefaultMapper);
using (var bcp = new SqlBulkCopy(db.ConnectionString))
using (var reader = ObjectReader.Create(data))
{
// set up a mapping from the property names to the column names
var propNames = typeof(T).GetProperties().Where(p => Attribute.IsDefined(p, typeof(ResultColumnAttribute)) == false).Select(propertyInfo => propertyInfo.Name).ToArray();
foreach (var propName in propNames)
{
bcp.ColumnMappings.Add(propName, "[" + pd.GetColumnName(propName) + "]");
}
bcp.DestinationTableName = pd.TableInfo.TableName;
await bcp.WriteToServerAsync(reader).ConfigureAwait(false);
}
}
You can just do a foreach on your records.
foreach (var record in records) {
db.Save(record);
}