For inserting a huge amount of data in a database, I used to collect all the inserting information into a list and convert this list into a DataTable. I then insert that list to a database via SqlBulkCopy.
Where I send my generated list LiMyList which contain information of all bulk data which I want to insert to database and pass it to my bulk insertion operation
InsertData(LiMyList, "MyTable");
Where InsertData is
public static void InsertData<T>(List<T> list,string TableName)
{
DataTable dt = new DataTable("MyTable");
clsBulkOperation blk = new clsBulkOperation();
dt = ConvertToDataTable(list);
ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.PerUserRoamingAndLocal);
using (SqlBulkCopy bulkcopy = new SqlBulkCopy(ConfigurationManager.ConnectionStrings["SchoolSoulDataEntitiesForReport"].ConnectionString))
{
bulkcopy.BulkCopyTimeout = 660;
bulkcopy.DestinationTableName = TableName;
bulkcopy.WriteToServer(dt);
}
}
public static DataTable ConvertToDataTable<T>(IList<T> data)
{
PropertyDescriptorCollection properties = TypeDescriptor.GetProperties(typeof(T));
DataTable table = new DataTable();
foreach (PropertyDescriptor prop in properties)
table.Columns.Add(prop.Name, Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType);
foreach (T item in data)
{
DataRow row = table.NewRow();
foreach (PropertyDescriptor prop in properties)
row[prop.Name] = prop.GetValue(item) ?? DBNull.Value;
table.Rows.Add(row);
}
return table;
}
Now I want to do an update operation, is there any way as for inserting data is done by SqlBulkCopy for Updating data to DataBase From C#.Net
What I've done before is perform a bulk insert from the data into a temp table, and then use a command or stored procedure to update the data relating the temp table with the destination table. The temp table is an extra step, but you can have a performance gain with the bulk insert and massive update if the amount of rows is big, compared to updating the data row by row.
Example:
public static void UpdateData<T>(List<T> list,string TableName)
{
DataTable dt = new DataTable("MyTable");
dt = ConvertToDataTable(list);
using (SqlConnection conn = new SqlConnection(ConfigurationManager.ConnectionStrings["SchoolSoulDataEntitiesForReport"].ConnectionString))
{
using (SqlCommand command = new SqlCommand("", conn))
{
try
{
conn.Open();
//Creating temp table on database
command.CommandText = "CREATE TABLE #TmpTable(...)";
command.ExecuteNonQuery();
//Bulk insert into temp table
using (SqlBulkCopy bulkcopy = new SqlBulkCopy(conn))
{
bulkcopy.BulkCopyTimeout = 660;
bulkcopy.DestinationTableName = "#TmpTable";
bulkcopy.WriteToServer(dt);
bulkcopy.Close();
}
// Updating destination table, and dropping temp table
command.CommandTimeout = 300;
command.CommandText = "UPDATE T SET ... FROM " + TableName + " T INNER JOIN #TmpTable Temp ON ...; DROP TABLE #TmpTable;";
command.ExecuteNonQuery();
}
catch (Exception ex)
{
// Handle exception properly
}
finally
{
conn.Close();
}
}
}
}
Notice that a single connection is used to perform the whole operation, in order to be able to use the temp table in each step, because the scope of the temp table is per connection.
In my personal experience, the best way to handled this situation is utilizing a Stored Procedure with a Table-Valued Parameter and a User-Defined Table Type. Just set up the type with the columns of the data table, and pass in said-data table as a parameter in the SQL command.
Within the Stored Procedure, you can either join directly on some unique key (if all rows you are updating exist), or - if you might run into a situation where you are having to do both updates and inserts - use the SQL Merge command within the stored procedure to handle both the updates and inserts as applicable.
Microsoft has both syntax reference and an article with examples for the Merge.
For the .NET piece, it's a simple matter of setting the parameter type as SqlDbType.Structured and setting the value of said-parameter to the Data Table that contains the records you want to update.
This method provides the benefit of both clarity and ease of maintenance. While there may be ways that offer performance improvements (such as dropping it into a temporary table then iterating over that table), I think they're outweighed by the simplicity of letting .NET and SQL handle transferring the table and updating the records itself. K.I.S.S.
Bulk Update:
Step 1: put the data which you want to update and primary key in a list.
Step 2: pass this list and ConnectionString to BulkUpdate Method As shown below
Example:
//Method for Bulk Update the Data
public static void BulkUpdateData<T>(List<T> list, string connetionString)
{
DataTable dt = new DataTable("MyTable");
dt = ConvertToDataTable(list);
using (SqlConnection conn = new SqlConnection(connetionString))
{
using (SqlCommand command = new SqlCommand("CREATE TABLE
#TmpTable([PrimaryKey],[ColumnToUpdate])", conn))
{
try
{
conn.Open();
command.ExecuteNonQuery();
using (SqlBulkCopy bulkcopy = new SqlBulkCopy(conn))
{
bulkcopy.BulkCopyTimeout = 6600;
bulkcopy.DestinationTableName = "#TmpTable";
bulkcopy.WriteToServer(dt);
bulkcopy.Close();
}
command.CommandTimeout = 3000;
command.CommandText = "UPDATE P SET P.[ColumnToUpdate]= T.[ColumnToUpdate] FROM [TableName Where you want to update ] AS P INNER JOIN #TmpTable AS T ON P.[PrimaryKey] = T.[PrimaryKey] ;DROP TABLE #TmpTable;";
command.ExecuteNonQuery();
}
catch (Exception ex)
{
// Handle exception properly
}
finally
{
conn.Close();
}
}
}
}
Step 3: put The ConvertToDataTable Method as shown Below.
Example:
public static DataTable ConvertToDataTable<T>(IList<T> data)
{
PropertyDescriptorCollection properties = TypeDescriptor.GetProperties(typeof(T));
DataTable table = new DataTable();
foreach (PropertyDescriptor prop in properties)
table.Columns.Add(prop.Name, Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType);
foreach (T item in data)
{
DataRow row = table.NewRow();
foreach (PropertyDescriptor prop in properties)
row[prop.Name] = prop.GetValue(item) ?? DBNull.Value;
table.Rows.Add(row);
}
return table;
}
Notes: WhereEver SquareBracket[] is there, put your own value.
Try out SqlBulkTools available on Nuget.
Disclaimer: I'm the author of this library.
var bulk = new BulkOperations();
var records = GetRecordsToUpdate();
using (TransactionScope trans = new TransactionScope())
{
using (SqlConnection conn = new SqlConnection(ConfigurationManager
.ConnectionStrings["SqlBulkToolsTest"].ConnectionString))
{
bulk.Setup<MyTable>()
.ForCollection(records)
.WithTable("MyTable")
.AddColumn(x => x.SomeColumn1)
.AddColumn(x => x.SomeColumn2)
.BulkUpdate()
.MatchTargetOn(x => x.Identifier)
.Commit(conn);
}
trans.Complete();
}
Only 'SomeColumn1' and 'SomeColumn2' will be updated. More examples can be found here
I would insert new values in a temporary table and then do a merge against the destination table, something like this:
MERGE [DestTable] AS D
USING #SourceTable S
ON D.ID = S.ID
WHEN MATCHED THEN
UPDATE SET ...
WHEN NOT MATCHED
THEN INSERT (...)
VALUES (...);
You could try to build a query that contains all data. Use a case. It could look like this
update your_table
set some_column = case when id = 1 then 'value of 1'
when id = 5 then 'value of 5'
when id = 7 then 'value of 7'
when id = 9 then 'value of 9'
end
where id in (1,5,7,9)
I'd go for a TempTable approach because that way you aren't locking anything. But if your logic needs to be only in the front end and you need to use bulk copy, I'd try a Delete/Insert approach but in the same SqlTransaction to ensure integrity which would be something like this:
// ...
dt = ConvertToDataTable(list);
using (SqlConnection cnx = new SqlConnection(myConnectionString))
{
using (SqlTranscation tran = cnx.BeginTransaction())
{
DeleteData(cnx, tran, list);
using (SqlBulkCopy bulkcopy = new SqlBulkCopy(cnx, SqlBulkCopyOptions.Default, tran))
{
bulkcopy.BulkCopyTimeout = 660;
bulkcopy.DestinationTableName = TabelName;
bulkcopy.WriteToServer(dt);
}
tran.Commit();
}
}
Complete answer, disclaimer: arrow code; this is mine built from research; Published in SqlRapper. It uses custom attributes over properties to determine whether a key is primary. Yes, super complicated. Yes super reusable. Yes, needs to be refactored. Yes, it is a nuget package. No, the documentation isn't great on github, but it exists. Will it work for everything? Probably not. Will it work for simple stuff? Oh yeah.
How easy is it to use after setup?
public class Log
{
[PrimaryKey]
public int? LogId { get; set; }
public int ApplicationId { get; set; }
[DefaultKey]
public DateTime? Date { get; set; }
public string Message { get; set; }
}
var logs = new List<Log>() { log1, log2 };
success = db.BulkUpdateData(logs);
Here's how it works:
public class PrimaryKeyAttribute : Attribute
{
}
private static bool IsPrimaryKey(object[] attributes)
{
bool skip = false;
foreach (var attr in attributes)
{
if (attr.GetType() == typeof(PrimaryKeyAttribute))
{
skip = true;
}
}
return skip;
}
private string GetSqlDataType(Type type, bool isPrimary = false)
{
var sqlType = new StringBuilder();
var isNullable = false;
if (Nullable.GetUnderlyingType(type) != null)
{
isNullable = true;
type = Nullable.GetUnderlyingType(type);
}
switch (Type.GetTypeCode(type))
{
case TypeCode.String:
isNullable = true;
sqlType.Append("nvarchar(MAX)");
break;
case TypeCode.Int32:
case TypeCode.Int64:
case TypeCode.Int16:
sqlType.Append("int");
break;
case TypeCode.Boolean:
sqlType.Append("bit");
break;
case TypeCode.DateTime:
sqlType.Append("datetime");
break;
case TypeCode.Decimal:
case TypeCode.Double:
sqlType.Append("decimal");
break;
}
if (!isNullable || isPrimary)
{
sqlType.Append(" NOT NULL");
}
return sqlType.ToString();
}
/// <summary>
/// SqlBulkCopy is allegedly protected from Sql Injection.
/// Updates a list of simple sql objects that mock tables.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="rows">A list of rows to insert</param>
/// <param name="tableName">a Table name if your class isn't your table name minus s.</param>
/// <returns>bool success</returns>
public bool BulkUpdateData<T>(List<T> rows, string tableName = null)
{
var template = rows.FirstOrDefault();
string tn = tableName ?? template.GetType().Name + "s";
int updated = 0;
using (SqlConnection con = new SqlConnection(ConnectionString))
{
using (SqlCommand command = new SqlCommand("", con))
{
using (SqlBulkCopy sbc = new SqlBulkCopy(con))
{
var dt = new DataTable();
var columns = template.GetType().GetProperties();;
var colNames = new List<string>();
string keyName = "";
var setStatement = new StringBuilder();
int rowNum = 0;
foreach (var row in rows)
{
dt.Rows.Add();
int colNum = 0;
foreach (var col in columns)
{
var attributes = row.GetType().GetProperty(col.Name).GetCustomAttributes(false);
bool isPrimary = IsPrimaryKey(attributes);
var value = row.GetType().GetProperty(col.Name).GetValue(row);
if (rowNum == 0)
{
colNames.Add($"{col.Name} {GetSqlDataType(col.PropertyType, isPrimary)}");
dt.Columns.Add(new DataColumn(col.Name, Nullable.GetUnderlyingType(col.PropertyType) ?? col.PropertyType));
if (!isPrimary)
{
setStatement.Append($" ME.{col.Name} = T.{col.Name},");
}
}
if (isPrimary)
{
keyName = col.Name;
if (value == null)
{
throw new Exception("Trying to update a row whose primary key is null; use insert instead.");
}
}
dt.Rows[rowNum][colNum] = value ?? DBNull.Value;
colNum++;
}
rowNum++;
}
setStatement.Length--;
try
{
con.Open();
command.CommandText = $"CREATE TABLE [dbo].[#TmpTable]({String.Join(",", colNames)})";
//command.CommandTimeout = CmdTimeOut;
command.ExecuteNonQuery();
sbc.DestinationTableName = "[dbo].[#TmpTable]";
sbc.BulkCopyTimeout = CmdTimeOut * 3;
sbc.WriteToServer(dt);
sbc.Close();
command.CommandTimeout = CmdTimeOut * 3;
command.CommandText = $"UPDATE ME SET {setStatement} FROM {tn} as ME INNER JOIN #TmpTable AS T on ME.{keyName} = T.{keyName}; DROP TABLE #TmpTable;";
updated = command.ExecuteNonQuery();
}
catch (Exception ex)
{
if (con.State != ConnectionState.Closed)
{
sbc.Close();
con.Close();
}
//well logging to sql might not work... we could try... but no.
//So Lets write to a local file.
_logger.Log($"Failed to Bulk Update to Sql: {rows.ToCSV()}", ex);
throw ex;
}
}
}
}
return (updated > 0) ? true : false;
}
Related
I am trying to implement an ADO.NET code which executes the SQL query with multiple parameters. Looks like SQL parameter limit is 2100 and does not accept more than this limit. How do I achieve with my below code to have this accept more than the limitation.
I am finding it difficult to understand the implementations when validating online articles related how to send the queries in subsets or chunks to fulfill my request.
This is my code:
using (Connection = new SqlConnection(CS))
{
Connection.Open();
string query = "SELECT FamilyID, FullName, Alias FROM TABLE (nolock) WHERE FamilyID IN ({0})";
var stringBuiler = new StringBuilder();
var familyIds = new List<string>();
string line;
while ((line = TextFileReader.ReadLine()) != null)
{
line = line.Trim();
if (!familyIds.Contains(line) & !string.IsNullOrEmpty(line))
{
familyIds.Add(line);
}
}
var sqlCommand = new SqlCommand
{
Connection = Connection,
CommandType = CommandType.Text
};
var index = 0; // Reset the index
var idParameterList = new List<string>();
foreach (var familyId in familyIds)
{
var paramName = "#familyId" + index;
sqlCommand.Parameters.AddWithValue(paramName, familyId);
idParameterList.Add(paramName);
index++;
}
sqlCommand.CommandText = String.Format(query, string.Join(",", idParameterList));
var dt = new DataTable();
using (SqlDataReader sqlReader = sqlCommand.ExecuteReader())
{
dt.Load(sqlReader);
}
try
{
if (dt.Rows.Count > 0)
{
OutputdataGridView.DataSource = lstDownloadOwnerOutput;
OutputdataGridView.ColumnHeadersDefaultCellStyle.Font = new Font(DataGridView.DefaultFont, FontStyle.Bold);
OutputdataGridView.Columns[0].AutoSizeMode = DataGridViewAutoSizeColumnMode.AllCells;
Gridviewdisplaylabel.Text = "Total no of rows: " + this.OutputdataGridView.Rows.Count.ToString();
}
else if (dt.Rows.Count == 0)
{
MessageBox.Show("Data returned blank!!!");
}
}
catch (Exception Ex)
{
if (Connection != null)
{
Connection.Close();
}
MessageBox.Show(Ex.Message);
}
}
Having a WHERE IN clause with 2100, or even 100, parameters is generally not good coding practice. You might want to consider putting those values into a separate bona fide table, e.g.
families (ID int PK, ...)
Then, you may rewrite your query as:
SELECT FamilyID, FullName, Alias
FROM TABLE (nolock)
WHERE FamilyID IN (SELECT ID FROM families);
You could also express the above using an EXISTS clause or a join, but all three approaches might just optimize to a very similar query plan anyway.
You can just add a table load call every 2000 parameters in your code:
var index = 0; // Reset the index
var idParameterList = new List<string>();
var dt = new DataTable();
foreach (var familyId in familyIds) {
var paramName = "#familyId" + index;
sqlCommand.Parameters.AddWithValue(paramName, familyId);
idParameterList.Add(paramName);
index++;
if (index > 2000) {
sqlCommand.CommandText = String.Format(query, string.Join(",", idParameterList));
using (SqlDataReader sqlReader = sqlCommand.ExecuteReader())
dt.Load(sqlReader);
sqlCommand.Parameters.Clear();
idParameterList.Clear();
index = 0;
}
}
if (index > 0) {
sqlCommand.CommandText = String.Format(query, string.Join(",", idParameterList));
using (SqlDataReader sqlReader = sqlCommand.ExecuteReader())
dt.Load(sqlReader);
}
For dynamic sql like this, I generally recommend using a Table-Valued Parameter.
It does require a bit of setup: you have to create a user-defined Type in the DB to hold the values, but that is a fairly trivial operation:
CREATE TYPE PrimaryKeyType AS TABLE ( VALUE INT NOT NULL );
We generally use these in conjunction with stored procedures:
CREATE PROCEDURE dbo.getFamily(#PrimaryKeys PrimaryKeyType READONLY)
AS
SELECT FamilyID, FullName, Alias
FROM TABLE (nolock) INNER JOIN #PrimaryKeys ON TABLE.FamilyID = #PrimaryKeys.Value
GO
However, you can also use inline SQL if you prefer.
Assigning the values to the stored proc or inline parameter is fairly straightforward, but there is one gotcha (more later):
public static void AssignValuesToPKTableTypeParameter(DbParameter parameter, ICollection<int> primaryKeys)
{
// Exceptions are handled by the caller
var sqlParameter = parameter as SqlParameter;
if (sqlParameter != null && sqlParameter.SqlDbType == SqlDbType.Structured)
{
// The type name may look like DatabaseName.dbo.PrimaryKeyType,
// so remove the database name if it is present
var parts = sqlParameter.TypeName.Split('.');
if (parts.Length == 3)
{
sqlParameter.TypeName = parts[1] + "." + parts[2];
}
}
if (primaryKeys == null)
{
primaryKeys = new List<int>();
}
var table = new DataTable();
table.Columns.Add("Value", typeof(int));
foreach (var wPrimaryKey in primaryKeys)
{
table.Rows.Add(wPrimaryKey);
}
parameter.Value = table;
}
The thing to watch out for here is the naming of the parameter. See the code in the method above that removes the database name to resolve this issue.
If you have dynamic SQL, you can generate a correct parameter using the following method:
public static SqlParameter CreateTableValuedParameter(string typeName, string parameterName)
{
// Exceptions are handled by the caller
var oParameter = new SqlParameter();
oParameter.ParameterName = parameterName;
oParameter.SqlDbType = SqlDbType.Structured;
oParameter.TypeName = typeName;
return oParameter;
}
Where typeName is the name of your type in the DB.
I'm trying to use SqlBulkCopy as a way of doing multiple INSERTs at once but for some reason, I'm getting a unique constraint violation when running WriteToServer(DataTable). The odd thing about this SqlException is it's saying that .
My table schema:
CREATE TABLE Product (
ID INT IDENTITY (1, 1) PRIMARY KEY,
Name NVARCHAR(450) UNIQUE NOT NULL, -- Unique constraint being called
BulkInsertID NCHAR(6) -- Column the constraint is being called on
);
The only reason I can think of as to why this is happening is because I mixed up the column names when assigning them inside the DataColumns but I checked them multiple times and I cannot find any issues with them.
Minimal, Complete and Verifiable Example:
class Program
{
private static SqlConnection connection;
private static string connectionURL = "Server=ASUS-X750JA\\DIRECTORY;Database=directory;Integrated Security=True;";
private static Random _random = new Random();
public static SqlConnection openConnection()
{
connection = new SqlConnection(connectionURL);
connection.Open();
Console.WriteLine("Opened connection to DB");
return connection;
}
public static void closeConnection()
{
connection.Close();
Console.WriteLine("Closed connection to DB");
}
static void Main(string[] args)
{
List<string> productNames = new List<string>();
productNames.Add("Diamond");
productNames.Add("Gold");
productNames.Add("Silver");
productNames.Add("Platinum");
productNames.Add("Pearl");
addProducts(productNames);
}
private static void addProducts(List<string> productNames)
{
const string tableName = "Product";
DataTable table = new DataTable(tableName);
string bulkInsertID;
do
{
bulkInsertID = generateID();
} while (isDuplicateBulkInsertID(tableName, bulkInsertID));
DataColumn nameColumn = new DataColumn("Name");
nameColumn.Unique = true;
nameColumn.AllowDBNull = false;
DataColumn bulkInsertIDColumn = new DataColumn("BulkInsertID");
bulkInsertIDColumn.Unique = false;
bulkInsertIDColumn.AllowDBNull = true;
table.Columns.Add(nameColumn);
table.Columns.Add(bulkInsertIDColumn);
foreach (string productName in productNames)
{
DataRow row = table.NewRow();
row[nameColumn] = productName;
row[bulkInsertIDColumn] = bulkInsertID;
table.Rows.Add(row);
}
using (SqlConnection connection = openConnection())
{
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(connection))
{
bulkCopy.DestinationTableName = table.TableName;
bulkCopy.WriteToServer(table);
}
}
}
/// <summary>
/// Generates random 6-character string but it's not like GUID so may need to check for duplicates
/// </summary>
/// <returns></returns>
public static string generateID()
{
char[] _base62chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz".ToCharArray();
int length = 6;
var sb = new StringBuilder(length);
for (int i = 0; i < length; i++)
sb.Append(_base62chars[_random.Next(62)]);
return sb.ToString();
}
public static bool isDuplicateBulkInsertID(string tableName, string bulkInsertID)
{
string query = string.Format("SELECT BulkInsertID FROM {0} WHERE BulkInsertID = #bulkinsertid", tableName);
SqlCommand command = new SqlCommand(query, openConnection());
SqlParameter bulkInsertIDParam = new SqlParameter("#bulkinsertid", SqlDbType.NChar, bulkInsertID.Length);
bulkInsertIDParam.Value = bulkInsertID;
command.Parameters.Add(bulkInsertIDParam);
command.Prepare();
Task<SqlDataReader> asyncTask = command.ExecuteReaderAsync();
SqlDataReader reader = asyncTask.Result;
bool isDuplicate = reader.HasRows;
closeConnection();
return isDuplicate;
}
}
The unique constraint shown in the screenshot belongs to the Name column but the duplicate key value is being sent to the BulkInsertID column and I don't know why the error is being thrown.
EDIT: I just changed my schema to use uniqueidentifier as the bulkInsertID column and changed row[bulkInsertIDColumn] = bulkInsertID to row[bulkInsertIDColumn] = Guid.NewGuid().ToString(). When I reran my code, I found that the generated GUID ran but when I looked at the table, the GUID was in the name column. So I can conclude it's not a server issue but a problem in the program.
Because you have a identity column bulk insert is trying to insert nameColumn in to ID (and ignoring it because the column is a identity column) and bulkInsertIDColumn in to Name. Just add the following to your insert to tell it to go to the correct columns.
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(connection))
{
bulkCopy.ColumnMappings.Add("Name", "Name"); //NEW
bulkCopy.ColumnMappings.Add("BulkInsertID", "BulkInsertID"); //NEW
bulkCopy.DestinationTableName = table.TableName;
bulkCopy.WriteToServer(table);
}
The other option is add a ID column to table and just don't put any values in it.
DataColumn idColumn = new DataColumn("ID");
DataColumn nameColumn = new DataColumn("Name");
//nameColumn.Unique = true; //SqlBulkCopy does not care about these settings.
//nameColumn.AllowDBNull = false;
DataColumn bulkInsertIDColumn = new DataColumn("BulkInsertID");
//bulkInsertIDColumn.Unique = false;
//bulkInsertIDColumn.AllowDBNull = true;
table.Columns.Add(ID);
table.Columns.Add(nameColumn);
table.Columns.Add(bulkInsertIDColumn);
foreach (string productName in productNames)
{
DataRow row = table.NewRow();
//We don't do anything with row[idColumn]
row[nameColumn] = productName;
row[bulkInsertIDColumn] = bulkInsertID;
table.Rows.Add(row);
}
Looks like it's throwing UNIQUE constraint violation for column BulkInsertID though from posted table schema don't see it's marked with that constraint and in your code I see you have bulkInsertIDColumn.Unique = false;. Are you sure you are not setting it to true anywhere else.
BTW, to me looks it's throwing that exception cause you are trying to create a new instance of Random() in loop as seen below pointed code block
do
{
bulkInsertID = generateID(); //calling method generateID
} while (isDuplicateBulkInsertID(tableName, bulkInsertID));
Where as in generateID() you are creating new instance of Random class
public static string generateID()
{
........
Random _random = new Random(); // creating new instance every time
I have been struggling to get the right c# code for getting the values after a PRAGMA table_info query.
Since my edit with extra code was rejected in this post, I made this question for other people that would otherwise waste hours for a fast solution.
Assuming you want a DataTable with the list of field of your table:
using (var con = new SQLiteConnection(preparedConnectionString))
{
using (var cmd = new SQLiteCommand("PRAGMA table_info(" + tableName + ");"))
{
var table = new DataTable();
cmd.Connection = con;
cmd.Connection.Open();
SQLiteDataAdapter adp = null;
try
{
adp = new SQLiteDataAdapter(cmd);
adp.Fill(table);
con.Close();
return table;
}
catch (Exception ex)
{ }
}
}
Return result is:
cid: id of the column
name: the name of the column
type: the type of the column
notnull: 0 or 1 if the column can contains null values
dflt_value: the default value
pk: 0 or 1 if the column partecipate to the primary key
If you want only the column names into a List you can use (you have to include System.Data.DataSetExtension):
return table.AsEnumerable().Select(r=>r["name"].ToString()).ToList();
EDIT: Or you can avoid the DataSetExtension reference using this code:
using (var con = new SQLiteConnection(preparedConnectionString))
{
using (var cmd = new SQLiteCommand("PRAGMA table_info(" + tableName + ");"))
{
var table = new DataTable();
cmd.Connection = con;
cmd.Connection.Open();
SQLiteDataAdapter adp = null;
try
{
adp = new SQLiteDataAdapter(cmd);
adp.Fill(table);
con.Close();
var res = new List<string>();
for(int i = 0;i<table.Rows.Count;i++)
res.Add(table.Rows[i]["name"].ToString());
return res;
}
catch (Exception ex){ }
}
}
return new List<string>();
There are a lot of PRAGMA statements that you can use in SQLite, have a look at the link.
About the using statement: it's very simple, it is used to be sure that disposable objects will be disposed whatever can happen in your code: see this link or this reference
Code:
DB = new SQLiteConnection(#"Data Source="+DBFileName);
DB.Open();
SQLiteCommand command = new SQLiteCommand("PRAGMA table_info('tracks')", DB);
DataTable dataTable = new DataTable();
SQLiteDataAdapter dataAdapter = new SQLiteDataAdapter(command);
dataAdapter.Fill(dataTable);
DB.Close();
foreach (DataRow row in dataTable.Rows) {
DBColumnNames.Add((string)row[dataTable.Columns[1]]); }
//Out(String.Join(",",
DBColumnNames.ToArray()));//debug
All elements in the resulted rows:
int cid, string name, string type,int notnull, string dflt_value, int pk
More info on PRAGMA
Not sure if this exactly what you are after but this is how I have grabbed the data and subsequently used it. Hope it helps!
Obviously the switch is not covering all eventualities, just those I have needed to so far.
/// <summary>
/// Allows the programmer to easily update rows in the DB.
/// </summary>
/// <param name="tableName">The table to update.</param>
/// <param name="data">A dictionary containing Column names and their new values.</param>
/// <param name="where">The where clause for the update statement.</param>
/// <returns>A boolean true or false to signify success or failure.</returns>
public bool Update(String tableName, Dictionary<String, String> data, String where)
{
String vals = "";
Boolean returnCode = true;
//Need to determine the dataype of fields to update as this affects the way the sql needs to be formatted
String colQuery = "PRAGMA table_info(" + tableName + ")";
DataTable colDataTypes = GetDataTable(colQuery);
if (data.Count >= 1)
{
foreach (KeyValuePair<String, String> pair in data)
{
DataRow[] colDataTypeRow = colDataTypes.Select("name = '" + pair.Key.ToString() + "'");
String colDataType="";
if (pair.Key.ToString()== "rowid" || pair.Key.ToString()== "_rowid_" || pair.Key.ToString()=="oid")
{
colDataType = "INT";
}
else
{
colDataType = colDataTypeRow[0]["type"].ToString();
}
colDataType = colDataType.Split(' ').FirstOrDefault();
if ( colDataType == "VARCHAR")
{
colDataType = "VARCHAR";
}
switch(colDataType)
{
case "INTEGER": case "INT": case "NUMERIC": case "REAL":
vals += String.Format(" {0} = {1},", pair.Key.ToString(), pair.Value.ToString());
break;
case "TEXT": case "VARCHAR": case "DATE": case "DATETIME":
vals += String.Format(" {0} = '{1}',", pair.Key.ToString(), pair.Value.ToString());
break;
}
}
vals = vals.Substring(0, vals.Length - 1);
}
try
{
string sql = String.Format("update {0} set {1} where {2};", tableName, vals, where);
//dbl.AppendLine(sql);
dbl.AppendLine(sql);
this.ExecuteNonQuery(sql);
}
catch(Exception crap)
{
OutCrap(crap);
returnCode = false;
}
return returnCode;
}
I'm feeding a ReportDataSource with a query using Dapper.
However, I have an empty report, even with an IEnumerable loaded data.
When you spend a Datatable works.
How do I pass data from a query using Dapper for ReportViewer?
this.reportViewer.LocalReport.DataSources.Clear();
DataTable dt = new DataTable();
dt = CN.Query(Sql, param);
Microsoft.Reporting.WinForms.ReportDataSource rprtDTSource = new Microsoft.Reporting.WinForms.ReportDataSource(dt.TableName, dt);
this.reportViewer.LocalReport.DataSources.Add(rprtDTSource);
this.reportViewer.RefreshReport(); –
Looks like Dapper now supports the DataTable...
From the test:
public void ExecuteReader()
{
var dt = new DataTable();
dt.Load(connection.ExecuteReader("select 3 as [three], 4 as [four]"));
dt.Columns.Count.IsEqualTo(2);
dt.Columns[0].ColumnName.IsEqualTo("three");
dt.Columns[1].ColumnName.IsEqualTo("four");
dt.Rows.Count.IsEqualTo(1);
((int)dt.Rows[0][0]).IsEqualTo(3);
((int)dt.Rows[0][1]).IsEqualTo(4);
}
Also now supported is using a DataTable as a TableValueParameter:
public void DataTableParameters()
{
try { connection.Execute("drop proc #DataTableParameters"); } catch { }
try { connection.Execute("drop table #DataTableParameters"); } catch { }
try { connection.Execute("drop type MyTVPType"); } catch { }
connection.Execute("create type MyTVPType as table (id int)");
connection.Execute("create proc #DataTableParameters #ids MyTVPType readonly as select count(1) from #ids");
var table = new DataTable { Columns = { { "id", typeof(int) } }, Rows = { { 1 }, { 2 }, { 3 } } };
int count = connection.Query<int>("#DataTableParameters", new { ids = table.AsTableValuedParameter() }, commandType: CommandType.StoredProcedure).First();
count.IsEqualTo(3);
count = connection.Query<int>("select count(1) from #ids", new { ids = table.AsTableValuedParameter("MyTVPType") }).First();
count.IsEqualTo(3);
try
{
connection.Query<int>("select count(1) from #ids", new { ids = table.AsTableValuedParameter() }).First();
throw new InvalidOperationException();
} catch (Exception ex)
{
ex.Message.Equals("The table type parameter 'ids' must have a valid type name.");
}
}
As I could not find another way to feed my ReportViewer to a query Dapper.Query then downloaded the source and added the code below.
#region CODTEC SISTEMAS
/// <summary>
/// Return a typed list of objects, reader is closed after the call
/// </summary>
public static DataTable Query(this IDbConnection cnn, string sql, object param, IDbTransaction transaction, int? commandTimeout, CommandType? commandType)
{
var identity = new Identity(sql, commandType, cnn, typeof(DapperRow), param == null ? null : param.GetType(), null);
var info = GetCacheInfo(identity);
IDbCommand cmd = null;
IDataReader reader = null;
bool wasClosed = cnn.State == ConnectionState.Closed;
try
{
cmd = SetupCommand(cnn, transaction, sql, info.ParamReader, param, commandTimeout, commandType);
if (wasClosed) cnn.Open();
reader = cmd.ExecuteReader(wasClosed ? CommandBehavior.CloseConnection : CommandBehavior.Default);
wasClosed = false; // *if* the connection was closed and we got this far, then we now have a reader
// with the CloseConnection flag, so the reader will deal with the connection; we
// still need something in the "finally" to ensure that broken SQL still results
// in the connection closing itself
DataTable dt = new DataTable();
dt.Load(reader);
// happy path; close the reader cleanly - no
// need for "Cancel" etc
reader.Dispose();
reader = null;
return dt;
}
finally
{
if (reader != null)
{
if (!reader.IsClosed) try { cmd.Cancel(); }
catch { /* don't spoil the existing exception */ }
reader.Dispose();
}
if (wasClosed) cnn.Close();
if (cmd != null) cmd.Dispose();
}
}
#endregion
To Get the DataTable from SqliteDB using Dapper.
Pass the query such as "select*from table" inside the method.
list is formed, by executing query.
Then serialize the object into Json and then deserialize the Json
into the DataTable.
public DataTable method(string query)
{
string connection = #"Data Source= C:\User\DBFolder\sampleDB.db;Version=3;New=False;Compress=True;";
using (IDbConnection dbConnection = new SQLiteConnection(connection))
{
dbConnection.Open();
var output1 = dbConnection.Query(query).ToList();
dbConnection.Close();
var json = JsonConvert.SerializeObject(output1);
DataTable dt = (DataTable)JsonConvert.DeserializeObject(json, (typeof(DataTable)));
return dt;
}
}
Not sure how Dapper works, but a datatable is binded like this:
DataTable dt = new DataTable();
DataColumn dc = dt.Columns.Add();
dc.ColumnName = "DataColumn1";
dc = dt.Columns.Add();
dc.ColumnName = "DataColumn2";
dt.Rows.Add(new object[] { "Frank", 32 });
this.reportViewer1.LocalReport.DataSources.Clear();
this.reportViewer1.LocalReport.DataSources.Add(new ReportDataSource("DataSet1_DataTable1", dt));
this.reportViewer1.RefreshReport();
and since I am using C#, I bind data sources like this:
this.bindingSource1.DataSource = getListMethod(); // this method/property returns a list of objects
this.reportViewer1.LocalReport.DataSources.Add(new ReportDataSource("Point", (this.bindingSource1)));
//Point is the datatable name in my Dataset.xsd file
in my code the user can upload an excel document wish contains it's phone contact list.Me as a developer should read that excel file turn it into a dataTable and insert it into the database .
The Problem is that some clients have a huge amount of contacts like saying 5000 and more contacts and when i am trying to insert this amount of data into the database it's crashing and giving me a timeout exception.
What would be the best way to avoid this kind of exception and is their any code that can reduce the time of the insert statement so the user don't wait too long ?
the code
public SqlConnection connection = new SqlConnection(System.Configuration.ConfigurationManager.ConnectionStrings["ConnectionString"].ConnectionString);
public void Insert(string InsertQuery)
{
SqlDataAdapter adp = new SqlDataAdapter();
adp.InsertCommand = new SqlCommand(InsertQuery, connection);
if (connection.State == System.Data.ConnectionState.Closed)
{
connection.Open();
}
adp.InsertCommand.ExecuteNonQuery();
connection.Close();
}
protected void submit_Click(object sender, EventArgs e)
{
string UploadFolder = "Savedfiles/";
if (Upload.HasFile) {
string fileName = Upload.PostedFile.FileName;
string path=Server.MapPath(UploadFolder+fileName);
Upload.SaveAs(path);
Msg.Text = "successfully uploaded";
DataTable ValuesDt = new DataTable();
ValuesDt = ConvertExcelFileToDataTable(path);
Session["valuesdt"] = ValuesDt;
Excel_grd.DataSource = ValuesDt;
Excel_grd.DataBind();
}
}
protected void SendToServer_Click(object sender, EventArgs e)
{
DataTable Values = Session["valuesdt"] as DataTable ;
if(Values.Rows.Count>0)
{
DataTable dv = Values.DefaultView.ToTable(true, "Mobile1", "Mobile2", "Tel", "Category");
double Mobile1,Mobile2,Tel;string Category="";
for (int i = 0; i < Values.Rows.Count; i++)
{
Mobile1 =Values.Rows[i]["Mobile1"].ToString()==""?0: double.Parse(Values.Rows[i]["Mobile1"].ToString());
Mobile2 = Values.Rows[i]["Mobile2"].ToString() == "" ? 0 : double.Parse(Values.Rows[i]["Mobile2"].ToString());
Tel = Values.Rows[i]["Tel"].ToString() == "" ? 0 : double.Parse(Values.Rows[i]["Tel"].ToString());
Category = Values.Rows[i]["Category"].ToString();
Insert("INSERT INTO client(Mobile1,Mobile2,Tel,Category) VALUES(" + Mobile1 + "," + Mobile2 + "," + Tel + ",'" + Category + "')");
Msg.Text = "Submitied successfully to the server ";
}
}
}
You can try SqlBulkCopy to insert Datatable to Database Table
Something like this,
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(sqlConnection, SqlBulkCopyOptions.KeepIdentity))
{
bulkCopy.DestinationTableName = DestTableName;
string[] DtColumnName = YourDataTableColumns;
foreach (string dbcol in DbColumnName)//To map Column of Datatable to that of DataBase tabele
{
foreach (string dtcol in DtColumnName)
{
if (dbcol.ToLower() == dtcol.ToLower())
{
SqlBulkCopyColumnMapping mapID = new SqlBulkCopyColumnMapping(dtcol, dbcol);
bulkCopy.ColumnMappings.Add(mapID);
break;
}
}
}
bulkCopy.WriteToServer(YourDataTableName.CreateDataReader());
bulkCopy.Close();
}
For more Read http://msdn.microsoft.com/en-us/library/system.data.sqlclient.sqlbulkcopy.aspx
You are inserting 1 row at a time, which is very expensive for this amount of data
In those cases you should use bulk insert, so the round trip to DB will be only once, if you need to roll back - all is the same transaction
You can use SqlBulkCopy which is more work, or you can use the batch update feature of the SqlAdpater. Instead of creating your own insert statement, then building a sqladapter, and then manually executing it, create a dataset, fill it, create one sqldataadpater, set the number of inserts in a batch, then execute the adapter once.
I could repeat the code, but this article shows exactly how to do it: http://msdn.microsoft.com/en-us/library/kbbwt18a%28v=vs.80%29.aspx
protected void SendToServer_Click(object sender, EventArgs e)
{
DataTable Values = Session["valuesdt"] as DataTable ;
if(Values.Rows.Count>0)
{
DataTable dv = Values.DefaultView.ToTable(true, "Mobile1", "Mobile2", "Tel", "Category");
//Fix up default values
for (int i = 0; i < Values.Rows.Count; i++)
{
Values.Rows[i]["Mobile1"] =Values.Rows[i]["Mobile1"].ToString()==""?0: double.Parse(Values.Rows[i]["Mobile1"].ToString());
Values.Rows[i]["Mobile2"] = Values.Rows[i]["Mobile2"].ToString() == "" ? 0 : double.Parse(Values.Rows[i]["Mobile2"].ToString());
Values.Rows[i]["Tel"] = Values.Rows[i]["Tel"].ToString() == "" ? 0 : double.Parse(Values.Rows[i]["Tel"].ToString());
Values.Rows[i]["Category"] = Values.Rows[i]["Category"].ToString();
}
BatchUpdate(dv,1000);
}
}
public static void BatchUpdate(DataTable dataTable,Int32 batchSize)
{
// Assumes GetConnectionString() returns a valid connection string.
string connectionString = GetConnectionString();
// Connect to the database.
using (SqlConnection connection = new SqlConnection(connectionString))
{
// Create a SqlDataAdapter.
SqlDataAdapter adapter = new SqlDataAdapter();
// Set the INSERT command and parameter.
adapter.InsertCommand = new SqlCommand(
"INSERT INTO client(Mobile1,Mobile2,Tel,Category) VALUES(#Mobile1,#Mobile2,#Tel,#Category);", connection);
adapter.InsertCommand.Parameters.Add("#Mobile1",
SqlDbType.Float);
adapter.InsertCommand.Parameters.Add("#Mobile2",
SqlDbType.Float);
adapter.InsertCommand.Parameters.Add("#Tel",
SqlDbType.Float);
adapter.InsertCommand.Parameters.Add("#Category",
SqlDbType.NVarchar, 50);
adapter.InsertCommand.UpdatedRowSource = UpdateRowSource.None;
// Set the batch size.
adapter.UpdateBatchSize = batchSize;
// Execute the update.
adapter.Update(dataTable);
}
}
I know this is a super old post, but you should not need to use the bulk operations explained in the existing answers for 5000 inserts. Your performance is suffering so much because you close and reopen the connection for each row insert. Here is some code I have used in the past that keeps one connection open and executes as many commands as needed to push all the data to the DB:
public static class DataWorker
{
public static Func<IEnumerable<T>, Task> GetStoredProcedureWorker<T>(Func<SqlConnection> connectionSource, string storedProcedureName, Func<T, IEnumerable<(string paramName, object paramValue)>> parameterizer)
{
if (connectionSource is null) throw new ArgumentNullException(nameof(connectionSource));
SqlConnection openConnection()
{
var conn = connectionSource() ?? throw new ArgumentNullException(nameof(connectionSource), $"Connection from {nameof(connectionSource)} cannot be null");
var connState = conn.State;
if (connState != ConnectionState.Open)
{
conn.Open();
}
return conn;
}
async Task DoStoredProcedureWork(IEnumerable<T> workData)
{
using (var connection = openConnection())
using (var command = connection.CreateCommand())
{
command.CommandType = CommandType.StoredProcedure;
command.CommandText = storedProcedureName;
command.Prepare();
foreach (var thing in workData)
{
command.Parameters.Clear();
foreach (var (paramName, paramValue) in parameterizer(thing))
{
command.Parameters.AddWithValue(paramName, paramValue ?? DBNull.Value);
}
await command.ExecuteNonQueryAsync().ConfigureAwait(false);
}
}
}
return DoStoredProcedureWork;
}
}
This was actually from a project where I was gathering emails for a restriction list, so kind of relevant example of what a parameterizer argument might look like and how to use the above code:
IEnumerable<(string,object)> RestrictionToParameter(EmailRestriction emailRestriction)
{
yield return ("#emailAddress", emailRestriction.Email);
yield return ("#reason", emailRestriction.Reason);
yield return ("#restrictionType", emailRestriction.RestrictionType);
yield return ("#dateTime", emailRestriction.Date);
}
var worker = DataWorker.GetStoredProcedureWorker<EmailRestriction>(ConnectionFactory, #"[emaildata].[AddRestrictedEmail]", RestrictionToParameter);
await worker(emailRestrictions).ConfigureAwait(false);