I am reading from a table with a million records, I perform some modifications to some columns and I would like to save them back in bulk. I would like to perform a bulk update after every 10000 records.
I'm using .Net Core 3.1
My code :
public void FixText()
{
SqlConnection cnn = new SqlConnection(strConn);
string queryString = "SELECT ClientID, ServerName, Text FROM [dbo].[Client]";
SqlCommand selectCmd = new SqlCommand(queryString, cnn);
cnn.Open();
int j = 0;
SqlDataReader reader = selectCmd.ExecuteReader();
List<Client> clients = new List<Client>();
try
{
while (reader.Read())
{
j++;
ClientID = (int)reader["ClientID"];
ServerName = reader["ServerName"].ToString();
Text = reader["Text"].ToString();
//Modify Text & set ServerName
string Text = UpdateText(Text);
if text.StartWith("doo"){ServerName = "re";}
//perform bulk update
if (j > 10000)
{
Client client = new Client()
{
ClientID = (int)ClientID,
ServerName = ServerName,
Text = Text,
};
clients.Add(client);
//I'm struggling here on what to use to do a bulk update
j = 0;
}
}
}
catch (Exception ex)
{
throw ex;
}
finally
{
reader.Close();
}
cnn.Close();
}
Any Help is appreciated!
You have two options, either use MERGE statement, or UPDATE.
I will do UPDATE option, as it's the easiest one. (This would need FastMember nuget).
private void ExecuteSql(SqlConnection connection , string sql , SqlParameter[] parameters = null)
{
if(connection == null)
{
throw new ArgumentNullException(nameof(connection));
}
if(string.IsNullOrWhiteSpace(sql))
{
throw new ArgumentNullException(nameof(sql));
}
using(var command = new SqlCommand(sql , connection))
{
if(parameters?.Length > 0)
{
command.Parameters.AddRange(parameters);
}
if(connection.State != ConnectionState.Open)
connection.Open();
command.ExecuteNonQuery();
}
}
private void ExecuteBulkCopy<T>(SqlConnection connection , IEnumerable<T> entries , string destinationTableName , string[] columns = null , int batchSize = 1000000)
{
if(connection == null)
{
throw new ArgumentNullException(nameof(connection));
}
if(entries == null)
{
throw new ArgumentNullException(nameof(entries));
}
if(string.IsNullOrWhiteSpace(destinationTableName))
{
throw new ArgumentNullException(nameof(destinationTableName));
}
if(connection.State != ConnectionState.Open)
connection.Open();
using(SqlBulkCopy sbc = new SqlBulkCopy(connection)
{
BulkCopyTimeout = 0 ,
DestinationTableName = destinationTableName ,
BatchSize = batchSize
})
{
using(var reader = ObjectReader.Create(entries , columns))
{
sbc.WriteToServer(reader);
}
}
}
private IEnumerable<Client> GetUpdatedClients(SqlConnection connection)
{
using(var command = new SqlCommand("SELECT ClientID, ServerName, Text FROM [dbo].[Client]", connection))
{
connection.Open();
using(SqlDataReader reader = _connection.ExecuteReader(query , parameters))
{
if(reader.HasRows)
{
while(reader.Read())
{
if(reader.IsDBNull(x)) { continue; }
var clientId = (int)reader["ClientID"];
var serverName = reader["ServerName"]?.ToString();
var text = reader["Text"]?.ToString();
//Modify Text & set ServerName
string textUpdated = UpdateText(text);
if(textUpdated.StartWith("doo"))
{
serverName = "re";
}
var client = new Client()
{
ClientID = clientId,
ServerName = serverName,
Text = textUpdated
};
yield return client;
}
}
}
}
}
private void BulkUpdateClients(SqlConnection connection, IEnumerable<Client> clients)
{
const string dropTempTable = "IF OBJECT_ID('[tempdb].[dbo].[##Client]') IS NOT NULL DROP TABLE [tempdb].[dbo].[##Client];";
// drop temp table if exists
ExecuteSql(connection ,dropTempTable);
// create the temp table
ExecuteSql($"SELECT TOP 1 [ClientID], [ServerName], [Text] INTO [tempdb].[dbo].[##Client] FROM [dbo].[Client];");
// copy rows to the temp table
ExecuteBulkCopy(connection, clients , "[tempdb].[dbo].[##Client]", new[] { "ClientID", "ServerName", "Text" });
// Use UPDATE JOIN
ExecuteSql("UPDATE t1 SET [ServerName] = t2.[ServerName], [Text] = t2.[Text] FROM [dbo].[Client] t1 JOIN [tempdb].[dbo].[##Client] t2 ON t1.[ClientID] = t2.[ClientID];");
// drop temp table
ExecuteSql(connection,dropTempTable);
}
public void BulkUpdateClients()
{
try
{
using(var connection = new SqlConnection(strConn))
{
connection.Open();
var clients = GetUpdatedClients(connection);
// it's important to use the same connection and keep it a live
// otherwise the temp table will be dropped.
BulkUpdate(connection, clients);
}
}
catch(Exception ex)
{
throw ex;
}
}
If you don't need to use temp table, you can change it to a permanent table (just change the temp table name).
To resolve this there are two ways.
You can use User-Defined Types or Sending array of record as JSON datatype through store procedure
UDT
OPENJSON
Related
I have a file with 9 million to 13 million records. As the list has a limit size I have broken the code into chunks of 1 million records and trying to insert the record.
Here is code to read and break the file in chunks:
public static void InsertBulkData(string file)
{
List<FileData> lstFileData = new List<FileData>();
using (FileStream fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
using (BufferedStream bs = new BufferedStream(fs))
{
using (StreamReader sr = new StreamReader(bs))
{
string line = string.Empty;
while ((line = sr.ReadLine()) != null)
{
FileData obj = new FileData();
obj.Property1= line.Substring(5, 9).Trim();
obj.Property2= line.Substring(19, 40).Trim();
lstFileData.Add(obj);
if (lstFileData.Count == 1000000)
{
InsertDataIntoDatabase(lstFileData);
lstFileData = null;
GC.Collect();
lstFileData = new List<FileData>();
}
}
}
}
}
}
Now we are getting in the loop 9 times. But when i comes to saving of data it saves for the first time only. rest 8 times it throws exception.
private static void InsertDataIntoDatabase(List<FileData> lstFileData)
{
String query = #"INSERT INTO table
(
PrimaryCol,
Column1,
Column2
)
VALUES
(
primaryCol.NEXTVAL,
:Property1,
:Property2,
)";
using (OracleConnection Conn = new OracleConnection())
{
try
{
Conn.ConnectionString = ConfigurationManager.ConnectionStrings["connection"].ToString();
Conn.Open();
using (var command = Conn.CreateCommand())
{
command.CommandText = query;
command.CommandType = CommandType.Text;
command.BindByName = true;
// In order to use ArrayBinding, the ArrayBindCount property
// of OracleCommand object must be set to the number of records to be inserted
command.ArrayBindCount = lstFileData.Count;
command.Parameters.Add(":Property1", OracleDbType.Varchar2, lstFileData.Select(c => c.Property1).ToArray(), ParameterDirection.Input);
command.Parameters.Add(":Property2", OracleDbType.Varchar2, lstFileData.Select(c => c.Property2).ToArray(), ParameterDirection.Input);
command.ExecuteNonQuery();
Conn.Close();
}
}
catch (Exception ex)
{
Conn.Close();
Console.WriteLine("Exception is:" + ex.InnerException);
}
}
}
So After first iteration we getting error in command.ExecuteNonQuery(). Can any one tell why and what is the possible solution?
Your problem comes from the use of a buffer that reaches its maximum size. Moreover, if you use successive inserts, you will have performance problems. You should use another method (with bulk copy). Here is a sample code:
public static void BulkCopy(string connectionString, DataTable dataTableToSend, string SQLTableName)
{
SqlConnection connexion = null;
try
{
connexion = new SqlConnection(connectionString);
BulkCopy(connexion, dataTableToSend, SQLTableName);
}
catch (Exception e)
{
throw;
}
finally
{
connexion?.Close();
connexion?.Dispose();
}
}
int Modulo=0;
//Create a datatable MyDataTable with same structure of your table in database
// you can do : select * from MyTable where 1<>1 with command for get structured dataset (and datatable)
using (System.IO.StreamReader file = new System.IO.StreamReader(fileName, Encoding.UTF8))
{
while ((Line = file.ReadLine()) != null)
{
counter++;
DataRow newrow = MyDataTable.NewRow();
//Set value
newrow["I_ID"] = counter;
newrow["L_NomFichier"] = FileObj.Name;
newrow["D_CreationFichier"] = FileObj.LastWriteTime;
newrow["L_Ligne"] = Line.DBNullIfNullOrEmpty();
newrow["D_DATCRE"] = newrow["D_DATMAJ"] = DateTime.Now;
newrow["C_UTICRE"] = newrow["C_UTIMAJ"] = GlobalParam.User;
MyDataTable.Rows.Add(newrow);
if (Modulo % 1000)
{
SqlHelper.BulkCopy(DBAccess.CONN_STRING, MyDataTable, "YOURTABLENAME");
MyDataTable.Rows.Clear();
}
}
SqlHelper.BulkCopy(DBAccess.CONN_STRING, MyDataTable, "YOURTABLENAME");
MyDataTable.Rows.Clear();
file.Close();
}
i am trying to insert items in a database using SQLite but when i call loadfunction i receive an error like out of index. I think the problem is when i call add function. I checked parameters values and all seems to be ok, but the elements are not inserted in the table. Bellow you will see my table, add function and load function.
The table:
CREATE TABLE `Fisiere` (
`Nume` TEXT,
`Dimensiune` INTEGER,
`Data` BLOB,
`Rating_imdb` REAL,
`Cale` TEXT
);
The insert function:
public void addFisier(DirectorVideo[] directors)
{
var dbCommand = new SQLiteCommand();
dbCommand.Connection = _dbConnection;
dbCommand.CommandText = "insert into Fisiere(Nume, Dimensiune, Data, Rating_imdb, Cale) values(#nume, #dimensiune, #data, #rating_imdb, #cale);";
try {
_dbConnection.Open();
dbCommand.Transaction = _dbConnection.BeginTransaction();
for (int i = 0; i < directors.Length - 1; i++)
{
for (int j = 0; j < directors[i].nrFisiere; j++)
{
var numeParam = new SQLiteParameter("#nume");
numeParam.Value = directors[i].fisiere[j].numeFisier;
var dimensiuneParam = new SQLiteParameter("#dimensiune");
dimensiuneParam.Value = directors[i].fisiere[j].dimensiune;
var dataParam = new SQLiteParameter("#data");
dataParam.Value = directors[i].fisiere[j].data;
var ratingParam = new SQLiteParameter("rating_imdb");
IMDb rat = new IMDb(directors[i].fisiere[j].numeFisier);
ratingParam.Value = rat.Rating;
var caleParam = new SQLiteParameter("cale");
caleParam.Value = directors[i].cale;
Console.WriteLine(numeParam.Value);
dbCommand.Parameters.Add(numeParam);
dbCommand.Parameters.Add(dimensiuneParam);
dbCommand.Parameters.Add(dataParam);
dbCommand.Parameters.Add(ratingParam);
dbCommand.Parameters.Add(caleParam);
Console.WriteLine(caleParam.Value);
dbCommand.Transaction.Commit();
Console.WriteLine("A fost inserat");
}
}
}
catch (Exception)
{
Console.WriteLine("muie");
dbCommand.Transaction.Rollback();
throw;
}
finally
{
if (_dbConnection.State != ConnectionState.Closed) _dbConnection.Close();
}
}
Load file function
public void LoadFiles()
{
const string stringSql = "PRAGMA database_list";
try
{
_dbConnection.Open();
SQLiteCommand sqlCommand = new SQLiteCommand(stringSql, _dbConnection);
SQLiteDataReader sqlReader = sqlCommand.ExecuteReader();
try
{
while (sqlReader.Read())
{
Console.WriteLine("se afiseaza");
Console.WriteLine((long)sqlReader["Id_fisier"]);
Console.WriteLine((string)sqlReader["Nume"]);
Console.WriteLine((long)sqlReader["Dimensiune"]);
Console.WriteLine(DateTime.Parse((string)sqlReader["Data"]));
Console.WriteLine((long)sqlReader["Rating_imdb"]);
Console.WriteLine((string)sqlReader["Cale"]);
}
}
finally
{
// Always call Close when done reading.
sqlReader.Close();
}
}
finally
{
if (_dbConnection.State != ConnectionState.Closed) _dbConnection.Close();
}
}
You are going to need to Commit your transaction. You call BeginTransaction() but you never commit your transaction so the data never gets written to the DB. Check this out for the details and workflow with Transactions. https://www.sqlite.org/lang_transaction.html
The other problem is that your Transactions and your Commands are all out of order. You need to execute your commands within your transaction and then commit the transaction.
public void addFisier(DirectorVideo[] directors)
{
SQLiteTransaction dbTrans;
try
{
_dbConnection.Open();
// Start Transaction first.
dbTrans = _dbConnection.BeginTransaction();
for (int i = 0; i < directors.Length - 1; i++)
{
for (int j = 0; j < directors[i].nrFisiere; j++)
{
// Create commands that run based on your number of inserts.
var dbCommand = new SQLiteCommand();
dbCommand.Connection = _dbConnection;
dbCommand.CommandText = "insert into Fisiere(Nume, Dimensiune, Data, Rating_imdb, Cale) values(#nume, #dimensiune, #data, #rating_imdb, #cale);";
dbCommand.Transaction = dbTrans;
var numeParam = new SQLiteParameter("#nume");
numeParam.Value = directors[i].fisiere[j].numeFisier;
var dimensiuneParam = new SQLiteParameter("#dimensiune");
dimensiuneParam.Value = directors[i].fisiere[j].dimensiune;
var dataParam = new SQLiteParameter("#data");
dataParam.Value = directors[i].fisiere[j].data;
var ratingParam = new SQLiteParameter("rating_imdb");
IMDb rat = new IMDb(directors[i].fisiere[j].numeFisier);
ratingParam.Value = rat;
var caleParam = new SQLiteParameter("cale");
caleParam.Value = directors[i].cale;
Console.WriteLine(numeParam.Value);
dbCommand.Parameters.Add(numeParam);
dbCommand.Parameters.Add(dimensiuneParam);
dbCommand.Parameters.Add(dataParam);
dbCommand.Parameters.Add(ratingParam);
dbCommand.Parameters.Add(caleParam);
Console.WriteLine(caleParam.Value);
Console.WriteLine("A fost inserat");
// Actually execute the commands.
dbCommand.ExecuteNonQuery();
}
}
// If everything is good, commit the transaction.
dbTrans.Commit();
}
catch (Exception)
{
Console.WriteLine("muie");
dbTrans.Rollback();
throw;
}
finally
{
if (_dbConnection.State != ConnectionState.Closed) _dbConnection.Close();
}
}
Here is also a snippet from the SQLiteTransaction Class documentation. I suggest reading it: https://www.devart.com/dotconnect/sqlite/docs/Devart.Data.SQLite~Devart.Data.SQLite.SQLiteTransaction.html
public static void RunSQLiteTransaction(string myConnString) {
using (SQLiteConnection sqConnection = new SQLiteConnection(myConnString)) {
sqConnection.Open();
// Start a local transaction
SQLiteTransaction myTrans = sqConnection.BeginTransaction(System.Data.IsolationLevel.ReadCommitted);
SQLiteCommand sqCommand = sqConnection.CreateCommand();
try {
sqCommand.CommandText = "INSERT INTO Dept(DeptNo, DName) Values(52, 'DEVELOPMENT')";
sqCommand.ExecuteNonQuery();
sqCommand.CommandText = "INSERT INTO Dept(DeptNo, DName) Values(62, 'PRODUCTION')";
sqCommand.ExecuteNonQuery();
myTrans.Commit();
Console.WriteLine("Both records are written to database.");
}
catch (Exception e) {
myTrans.Rollback();
Console.WriteLine(e.ToString());
Console.WriteLine("Neither record was written to database.");
}
finally {
sqCommand.Dispose();
myTrans.Dispose();
}
}
}
I am working in a simple registration page where the user can't enter the same user name or email, I made a code that prevent the user from entering the username and it worked but when I tried to prevent the user from entring the same username or email it didn't work.
and my question is, "How can I add another condition where the user can't enter email that already exists?"
I tried to do it in this code, but it did't work:
protected void Button_Click(object sender, EventArgs e)
{
SqlConnection con = new SqlConnection( ConfigurationManager.ConnectionStrings["ConnectionString"].ConnectionString );
SqlCommand cmd1 = new SqlCommand("select 1 from Table where Name =#UserName", con);
SqlCommand cmd2 = new SqlCommand("select 1 from Table where Email=#UserEmail", con);
con.Open();
cmd1.Parameters.AddWithValue("#UserName", Name_id.Text);
cmd2.Parameters.AddWithValue("#UserEmail", Email_id.Text);
using (var dr1 = cmd1.ExecuteReader())
{
if (dr1.HasRows)
{
Label1.Text = "user name already exists";
}
using (var dr2 = cmd2.ExecuteReader())
{
if (dr2.HasRows)
{
Label1.Text = "email already exists";
}
else
{
dr1.Close();
dr2.Close();
//add new users
con.Close();
}
}
}
}
but i get this error:
There is already an open DataReader associated with this Command which must be closed first.
Like I said in my comment your design is bad !
First you should have Data Access Layer. This should be project in big solutions but in your case you can put it like new directory. In this directory you create SqlManager class here is the code:
public class SqlManager
{
public static string ConnectionString
{
get
{
return ConfigurationManager.ConnectionStrings["DevConnString"].ConnectionString;
}
}
public static SqlConnection GetSqlConnection(SqlCommand cmd)
{
if (cmd.Connection == null)
{
SqlConnection conn = new SqlConnection(ConnectionString);
conn.Open();
cmd.Connection = conn;
return conn;
}
return cmd.Connection;
}
public static int ExecuteNonQuery(SqlCommand cmd)
{
SqlConnection conn = GetSqlConnection(cmd);
try
{
return cmd.ExecuteNonQuery();
}
catch
{
throw;
}
finally
{
conn.Close();
}
}
public static object ExecuteScalar(SqlCommand cmd)
{
SqlConnection conn = GetSqlConnection(cmd);
try
{
return cmd.ExecuteScalar();
}
catch
{
throw;
}
finally
{
conn.Close();
}
}
public static DataSet GetDataSet(SqlCommand cmd)
{
return GetDataSet(cmd, "Table");
}
public static DataSet GetDataSet(SqlCommand cmd, string defaultTable)
{
SqlConnection conn = GetSqlConnection(cmd);
try
{
DataSet resultDst = new DataSet();
using (SqlDataAdapter adapter = new SqlDataAdapter(cmd))
{
adapter.Fill(resultDst, defaultTable);
}
return resultDst;
}
catch
{
throw;
}
finally
{
conn.Close();
}
}
public static DataRow GetDataRow(SqlCommand cmd)
{
return GetDataRow(cmd, "Table");
}
public static DataRow GetDataRow(SqlCommand cmd, string defaultTable)
{
SqlConnection conn = GetSqlConnection(cmd);
try
{
DataSet resultDst = new DataSet();
using (SqlDataAdapter adapter = new SqlDataAdapter(cmd))
{
adapter.Fill(resultDst, defaultTable);
}
if (resultDst.Tables.Count > 0 && resultDst.Tables[0].Rows.Count > 0)
{
return resultDst.Tables[0].Rows[0];
}
else
{
return null;
}
}
catch
{
throw;
}
finally
{
conn.Close();
}
}
}
After that you should have Business Object Layer. In bigger solution is project in your case directory. If you are in the page TaxesEdit.aspx, you should add Tax.cs class in the BO(business object).
Example of methods for the class, for your first button:
public DataSet GetTaxesByUserName(string userName)
{
SqlCommand cmd = new SqlCommand(#"
select 1 from Table where Name =#UserName");
cmd.Parameters.AddWithValue("#UserName", userName);
return DA.SqlManager.GetDataSet(cmd);
}
You fetch all the needed data in datasets. After that you make checks like taxesDst.Tables[0].Rows.Count > 0 (or == 0)
For Insert you can have method like this:
public virtual void Insert(params object[] colValues)
{
if (colValues == null || colValues.Length % 2 != 0)
throw new ArgumentException("Invalid column values passed in. Expects pairs (ColumnName, ColumnValue).");
SqlCommand cmd = new SqlCommand("INSERT INTO " + TableName + " ( {0} ) VALUES ( {1} )");
string insertCols = string.Empty;
string insertParams = string.Empty;
for (int i = 0; i < colValues.Length; i += 2)
{
string separator = ", ";
if (i == colValues.Length - 2)
separator = "";
string param = "#P" + i;
insertCols += colValues[i] + separator;
insertParams += param + separator;
cmd.Parameters.AddWithValue(param, colValues[i + 1]);
}
cmd.CommandText = string.Format(cmd.CommandText, insertCols, insertParams);
DA.SqlManager.ExecuteNonQuery(cmd);
}
For this you need to have property TableName in the current BO class.
In this case this methods can be used everywhere and you need only one line of code to invoke them and no problems like yours will happen.
You have opened another DataReader inside the First and thats causing the problem. Here I have re-arranged your code a bit
SqlConnection con = new SqlConnection(ConfigurationManager.ConnectionStrings["ConnectionString"].ConnectionString);
SqlCommand cmd1 = new SqlCommand("select 1 from Table where Name =#UserName", con),
cmd2 = new SqlCommand("select 1 from Table where Email=#UserEmail", con);
con.Open();
cmd1.Parameters.AddWithValue("#UserName", Name_id.Text);
cmd2.Parameters.AddWithValue("#UserEmail", Email_id.Text);
bool userExists = false, mailExists = false;
using (var dr1 = cmd1.ExecuteReader())
if (userExists = dr1.HasRows) Label1.Text = "user name already exists";
using (var dr2 = cmd2.ExecuteReader())
if (mailExists = dr2.HasRows) Label1.Text = "email already exists";
if (!(userExists || mailExists)) {
// can add User
}
You need to close one datareader before opening the other one. Although it's not how I'd do it, but you can deal with the runtime error by closing the datareader after each IF:
using (var dr1 = cmd1.ExecuteReader())
{
if (dr1.HasRows)
{
string Text = "user name already exists";
}
dr1.Close();
}
using (var dr2 = cmd2.ExecuteReader())
{
if (dr2.HasRows)
{
string ext = "email already exists";
}
else
{
//add new users
}
dr2.Close();
}
con.Close();
This may work, although there are a few things I would do differently...
protected void Button_Click(object sender, EventArgs e)
{
bool inputIsValid = true;
var con = new SqlConnection(ConfigurationManager.ConnectionStrings["ConnectionString"].ConnectionString);
var userNameCmd = new SqlCommand("SELECT 1 FROM Table WHERE Name = #UserName", con);
var emailCmd = new SqlCommand("SELECT 1 FROM Table WHERE Email = #UserEmail", con);
con.Open();
userNameCmd.Parameters.AddWithValue("#UserName", Name_id.Text);
emailCmd.Parameters.AddWithValue("#UserEmail", Email_id.Text);
using (var userNameReader = userNameCmd.ExecuteReader())
{
if (userNameReader.HasRows)
{
inputIsValid = false;
Label1.Text = "User name already exists";
}
}
using (var emailReader = emailCmd.ExecuteReader())
{
if (emailReader.HasRows)
{
inputIsValid = false;
Label1.Text = "Email address already exists";
}
}
if (inputIsValid)
{
// Insert code goes here
}
con.Close();
}
Why don't you do something like this:
[Flags]
public enum ValidationStatus
{
Valid = 0 ,
UserNameInUse = 1 ,
EmailInUse = 2 ,
}
public ValidationStatus ValidateUser( string userName , string emailAddr )
{
ValidationStatus status ;
string connectionString = ConfigurationManager.ConnectionStrings["ConnectionString"].ConnectionString ;
using ( SqlConnection con = new SqlConnection( connectionString ) )
using ( SqlCommand cmd = con.CreateCommand() )
{
cmd.CommandText + #"
select status = coalesce( ( select 1 from dbo.myTable t where t.UserName = #UserName ) , 0 )
+ coalesce( ( select 2 from dbo.myTable t where t.UserEmail = #UserEmail ) , 0 )
" ;
cmd.Parameters.AddWithValue( "#UserName" , userName ) ;
cmd.Parameters.AddWithValue( "#emailAddr" , emailAddr ) ;
int value = (int) cmd.ExecuteScalar() ;
status = (ValidationStatus) value ;
}
return status ;
}
Aside from anything else, hitting the DB twice for something like this is silly. And this more clearly expresses intent.
Then you can use it in your button click handler, something like this:
protected void Button_Click( object sender , EventArgs e )
{
string userName = Name_id.Text ;
string emailAddr = Email_id.Text ;
ValidationStatus status = ValidateUser( userName , emailAddr ) ;
switch ( status )
{
case ValidationStatus.Valid :
Label1.Text = "" ;
break ;
case ValidationStatus.EmailInUse :
Label1.Text = "Email address in use" ;
break ;
case ValidationStatus.UserNameInUse :
Label1.Text = "User name in use" ;
break ;
case ValidationStatus.EmailInUse|ValidationStatus.UserNameInUse:
Label1.Text = "Both user name and email address in use." ;
break ;
default :
throw new InvalidOperationException() ;
}
if ( status == ValidationStatus.Valid )
{
CreateNewUser() ;
}
}
I have a DataClassLibrary attached to my ASP.Net project. I use it to access the database to get my values. I want to take the values given in the Line1 class and put them in the corresponding label. I tried DataLibraryClass.Line1 NewDataA = new DataLibraryClass.Line1(); but it gives me a zero I know that they have values. Could it be that my NewDataA = new is causing it to return zero? I also used breakpoints in the Line1 class and it never reaches the database query. How can I get the data I need into the labels properly?
DataLibraryClass
Line1:
var sqlString = new StringBuilder();
sqlString.Append("SELECT CaseNum6, CaseNum9, Group, Completion ");
sqlString.Append("FROM WorkOrder ");
sqlString.Append("WHERE Group = 1 OR Group = 2 ");
sqlString.Append("AND Completion = 0 ");
SqlDataReader reader = null;
SqlConnection dbConn = DBHelper.getConnection();
SqlParameter[] parameters = new SqlParameter[] { new SqlParameter("#CaseNum6", CaseNum6 )};
try
{
reader = DBHelper.executeQuery(dbConn, sqlString.ToString(), parameters);
if (reader != null)
{
if (reader.Read())
{
CaseNum6 = (int)reader["CaseNum6"];
CaseNum9 = (int)reader["CaseNum9"];
Group = (int)reader["Group"];
Completion = (bool)reader["Completion"];
}
else
throw new Exception("No record returned");
reader.Close();
reader.Dispose();
dbConn.Close();
dbConn.Dispose();
DataLibraryClass
DBHelper:
private DBHelper() { }
public static SqlConnection getConnection()
{
return new SqlConnection(ConfigurationManager.ConnectionStrings["Connection"].ConnectionString);
}
public static SqlConnection getFRESHConnection()
{
return new SqlConnection(ConfigurationManager.ConnectionStrings["FRESHConnection"].ConnectionString);
}
public static SqlDataReader executeQuery(SqlConnection dbConn, string sqlString, SqlParameter[] parameters)
{
SqlCommand cmd = null;
SqlDataReader reader = null;
try
{
if (dbConn.State == ConnectionState.Closed)
dbConn.Open();
cmd = dbConn.CreateCommand();
cmd.CommandText = sqlString;
if (parameters != null)
{
cmd.Parameters.AddRange(parameters);
}
reader = cmd.ExecuteReader();
cmd.Dispose();
}
catch (Exception ex)
{
throw ex;
}
return reader;
Code behind ASP page:
DataClassLibrary.LineAData NewDataA = new DataClassLibrary.LineAData();
DataClassLibrary.LineBData NewDataB = new DataClassLibrary.LineBData();
protected void Page_Load(object sender, EventArgs e)
{
L1.Text = NewDataA.CaseNum6.ToString();
L2.Text = NewDataA.CaseNum9.ToString();
L4.Text = NewDataB.CaseNum6.ToString();
L5.Text = NewDataB.CaseGNum9.ToString();
}
Upon setting up the webpage I failed to realize the behind code was set to .vb not .cs which is why everything was not working.
in my code the user can upload an excel document wish contains it's phone contact list.Me as a developer should read that excel file turn it into a dataTable and insert it into the database .
The Problem is that some clients have a huge amount of contacts like saying 5000 and more contacts and when i am trying to insert this amount of data into the database it's crashing and giving me a timeout exception.
What would be the best way to avoid this kind of exception and is their any code that can reduce the time of the insert statement so the user don't wait too long ?
the code
public SqlConnection connection = new SqlConnection(System.Configuration.ConfigurationManager.ConnectionStrings["ConnectionString"].ConnectionString);
public void Insert(string InsertQuery)
{
SqlDataAdapter adp = new SqlDataAdapter();
adp.InsertCommand = new SqlCommand(InsertQuery, connection);
if (connection.State == System.Data.ConnectionState.Closed)
{
connection.Open();
}
adp.InsertCommand.ExecuteNonQuery();
connection.Close();
}
protected void submit_Click(object sender, EventArgs e)
{
string UploadFolder = "Savedfiles/";
if (Upload.HasFile) {
string fileName = Upload.PostedFile.FileName;
string path=Server.MapPath(UploadFolder+fileName);
Upload.SaveAs(path);
Msg.Text = "successfully uploaded";
DataTable ValuesDt = new DataTable();
ValuesDt = ConvertExcelFileToDataTable(path);
Session["valuesdt"] = ValuesDt;
Excel_grd.DataSource = ValuesDt;
Excel_grd.DataBind();
}
}
protected void SendToServer_Click(object sender, EventArgs e)
{
DataTable Values = Session["valuesdt"] as DataTable ;
if(Values.Rows.Count>0)
{
DataTable dv = Values.DefaultView.ToTable(true, "Mobile1", "Mobile2", "Tel", "Category");
double Mobile1,Mobile2,Tel;string Category="";
for (int i = 0; i < Values.Rows.Count; i++)
{
Mobile1 =Values.Rows[i]["Mobile1"].ToString()==""?0: double.Parse(Values.Rows[i]["Mobile1"].ToString());
Mobile2 = Values.Rows[i]["Mobile2"].ToString() == "" ? 0 : double.Parse(Values.Rows[i]["Mobile2"].ToString());
Tel = Values.Rows[i]["Tel"].ToString() == "" ? 0 : double.Parse(Values.Rows[i]["Tel"].ToString());
Category = Values.Rows[i]["Category"].ToString();
Insert("INSERT INTO client(Mobile1,Mobile2,Tel,Category) VALUES(" + Mobile1 + "," + Mobile2 + "," + Tel + ",'" + Category + "')");
Msg.Text = "Submitied successfully to the server ";
}
}
}
You can try SqlBulkCopy to insert Datatable to Database Table
Something like this,
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(sqlConnection, SqlBulkCopyOptions.KeepIdentity))
{
bulkCopy.DestinationTableName = DestTableName;
string[] DtColumnName = YourDataTableColumns;
foreach (string dbcol in DbColumnName)//To map Column of Datatable to that of DataBase tabele
{
foreach (string dtcol in DtColumnName)
{
if (dbcol.ToLower() == dtcol.ToLower())
{
SqlBulkCopyColumnMapping mapID = new SqlBulkCopyColumnMapping(dtcol, dbcol);
bulkCopy.ColumnMappings.Add(mapID);
break;
}
}
}
bulkCopy.WriteToServer(YourDataTableName.CreateDataReader());
bulkCopy.Close();
}
For more Read http://msdn.microsoft.com/en-us/library/system.data.sqlclient.sqlbulkcopy.aspx
You are inserting 1 row at a time, which is very expensive for this amount of data
In those cases you should use bulk insert, so the round trip to DB will be only once, if you need to roll back - all is the same transaction
You can use SqlBulkCopy which is more work, or you can use the batch update feature of the SqlAdpater. Instead of creating your own insert statement, then building a sqladapter, and then manually executing it, create a dataset, fill it, create one sqldataadpater, set the number of inserts in a batch, then execute the adapter once.
I could repeat the code, but this article shows exactly how to do it: http://msdn.microsoft.com/en-us/library/kbbwt18a%28v=vs.80%29.aspx
protected void SendToServer_Click(object sender, EventArgs e)
{
DataTable Values = Session["valuesdt"] as DataTable ;
if(Values.Rows.Count>0)
{
DataTable dv = Values.DefaultView.ToTable(true, "Mobile1", "Mobile2", "Tel", "Category");
//Fix up default values
for (int i = 0; i < Values.Rows.Count; i++)
{
Values.Rows[i]["Mobile1"] =Values.Rows[i]["Mobile1"].ToString()==""?0: double.Parse(Values.Rows[i]["Mobile1"].ToString());
Values.Rows[i]["Mobile2"] = Values.Rows[i]["Mobile2"].ToString() == "" ? 0 : double.Parse(Values.Rows[i]["Mobile2"].ToString());
Values.Rows[i]["Tel"] = Values.Rows[i]["Tel"].ToString() == "" ? 0 : double.Parse(Values.Rows[i]["Tel"].ToString());
Values.Rows[i]["Category"] = Values.Rows[i]["Category"].ToString();
}
BatchUpdate(dv,1000);
}
}
public static void BatchUpdate(DataTable dataTable,Int32 batchSize)
{
// Assumes GetConnectionString() returns a valid connection string.
string connectionString = GetConnectionString();
// Connect to the database.
using (SqlConnection connection = new SqlConnection(connectionString))
{
// Create a SqlDataAdapter.
SqlDataAdapter adapter = new SqlDataAdapter();
// Set the INSERT command and parameter.
adapter.InsertCommand = new SqlCommand(
"INSERT INTO client(Mobile1,Mobile2,Tel,Category) VALUES(#Mobile1,#Mobile2,#Tel,#Category);", connection);
adapter.InsertCommand.Parameters.Add("#Mobile1",
SqlDbType.Float);
adapter.InsertCommand.Parameters.Add("#Mobile2",
SqlDbType.Float);
adapter.InsertCommand.Parameters.Add("#Tel",
SqlDbType.Float);
adapter.InsertCommand.Parameters.Add("#Category",
SqlDbType.NVarchar, 50);
adapter.InsertCommand.UpdatedRowSource = UpdateRowSource.None;
// Set the batch size.
adapter.UpdateBatchSize = batchSize;
// Execute the update.
adapter.Update(dataTable);
}
}
I know this is a super old post, but you should not need to use the bulk operations explained in the existing answers for 5000 inserts. Your performance is suffering so much because you close and reopen the connection for each row insert. Here is some code I have used in the past that keeps one connection open and executes as many commands as needed to push all the data to the DB:
public static class DataWorker
{
public static Func<IEnumerable<T>, Task> GetStoredProcedureWorker<T>(Func<SqlConnection> connectionSource, string storedProcedureName, Func<T, IEnumerable<(string paramName, object paramValue)>> parameterizer)
{
if (connectionSource is null) throw new ArgumentNullException(nameof(connectionSource));
SqlConnection openConnection()
{
var conn = connectionSource() ?? throw new ArgumentNullException(nameof(connectionSource), $"Connection from {nameof(connectionSource)} cannot be null");
var connState = conn.State;
if (connState != ConnectionState.Open)
{
conn.Open();
}
return conn;
}
async Task DoStoredProcedureWork(IEnumerable<T> workData)
{
using (var connection = openConnection())
using (var command = connection.CreateCommand())
{
command.CommandType = CommandType.StoredProcedure;
command.CommandText = storedProcedureName;
command.Prepare();
foreach (var thing in workData)
{
command.Parameters.Clear();
foreach (var (paramName, paramValue) in parameterizer(thing))
{
command.Parameters.AddWithValue(paramName, paramValue ?? DBNull.Value);
}
await command.ExecuteNonQueryAsync().ConfigureAwait(false);
}
}
}
return DoStoredProcedureWork;
}
}
This was actually from a project where I was gathering emails for a restriction list, so kind of relevant example of what a parameterizer argument might look like and how to use the above code:
IEnumerable<(string,object)> RestrictionToParameter(EmailRestriction emailRestriction)
{
yield return ("#emailAddress", emailRestriction.Email);
yield return ("#reason", emailRestriction.Reason);
yield return ("#restrictionType", emailRestriction.RestrictionType);
yield return ("#dateTime", emailRestriction.Date);
}
var worker = DataWorker.GetStoredProcedureWorker<EmailRestriction>(ConnectionFactory, #"[emaildata].[AddRestrictedEmail]", RestrictionToParameter);
await worker(emailRestrictions).ConfigureAwait(false);