Unable to get updated value from method of other class - c#

Below is my class:
MsSql.cs:
public class MSSqlBLL
{
public static long RowsCopied { get; set; }
public long BulkCopy()
{
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(conn))
{
bulkCopy.DestinationTableName = "dbo.Table1";
bulkCopy.BatchSize = 100;
bulkCopy.SqlRowsCopied +=
new SqlRowsCopiedEventHandler(OnSqlRowsCopied);
bulkCopy.NotifyAfter = 100;
try
{
bulkCopy.WriteToServer(reader);
}
return RowsCopied;
}
}
private static void OnSqlRowsCopied(object sender, SqlRowsCopiedEventArgs e)
{
RowsCopied = RowsCopied + e.RowsCopied;
}
}
I am calling BulkCopy function from this class and i want to get currently processed record in my affected records variable.
For eg :For each iteration of loop i would like to get affected records in my affectedRows variable.
public class MySqlBLL
{
public void GetTotalRows()
{
int totalRecords = 500;
var table = "Table1";
for (int i = 0; i < totalRecords / 100; i++) //
{
query = "SELECT * FROM " + table + " LIMIT " + 0 + "," + 100;
var reader = Execute(conn, query);
long affectedRecords = msSql.BulkCopy();
reader.Close();
}
}
}
In the above method i am sending chunk by chunk data to BulkCopy method to perform bulk copy but for each bulk copy i would like to get number of records that are processed by bulk copy but the problem is i am getting 0 in affectedRecords variable.
I want to get access current rows processed by sql bulk copy.

The RowsCopied property is only updated after 100 records are copied (as set using NotifyAfter). If you place
Console.WriteLine("Copied {0} so far...", e.RowsCopied);
in OnSqlRowsCopied event handler you will get ongoing progress in case of Console app.
But in your case you can simply select count(*) from source table to show the count.
-Source

Related

C# Entityframework core error executing raw sqlstring with more parameter values

I am trying to build a SqlQuery at runtime and trying to execute the _context.Database.ExecuteSqlCommandAsync() but getting the following error. There are around 620 rows to be inserted with total number of parameter values around 4400. Not sure how to handle this. Can anyone help me how to solve this.
code:
var sqlSb = new StringBuilder();
sqlSb.AppendLine("INSERT INTO [dbo].[UserTag](UserId, TagId, Disabled, Created, CreatedBy, Modified, ModifiedBy)");
sqlSb.AppendLine("VALUES");
var index = 0;
var noOfcolumnsToInsert = 7;
var query = new SqlQuery();
foreach (var userTag in userTags)
{
var countFrom = index * noOfcolumnsToInsert;
index++;
sqlSb.AppendLine($"({{{countFrom}}}, {{{countFrom + 1}}}, {{{countFrom + 2}}}, {{{countFrom + 3}}}, {{{countFrom + 4}}}, {{{countFrom + 5}}}, {{{countFrom + 6}}}){(index < userTags.Count ? "," : "")}");
query.ParameterValues.AddRange(new List<object> { userTag.UserId, userTag.TagId, userTag.Disabled, currentDateTime, sessionUserGuidAsString, currentDateTime, sessionUserGuidAsString });
}
query.Sql = sqlSb.ToString();
await _context.Database.ExecuteSqlCommandAsync(query.Sql, query.ParameterValues.ToArray());
SqlQuery class:
public class SqlQuery
{
public string Sql { get; set; }
public List<object> ParameterValues { get; set; }
public SqlQuery()
{
ParameterValues = new List<object>();
}
}
Error:
The incoming request has too many parameters. The server supports a maximum of 2100 parameters. Reduce the number of parameters and resend the request

How to input into DataTable quickly? Or save data permanently into DataTable?

I am inputting a text file into a DataTable and then using SqlBulkCopy to copy to a Database. While BulkCopy is fast, inserting 50000+ lines into DataTable is not (around 5 mins). How do I make it efficient?
Can I insert data into the DataTable quickly?
If not, is there a way to save the inserted data permanently into the DataTable so I don't have to insert it every time I run the program?
for (; i < fares.Length; )
{
k = i;
Console.WriteLine("Inserting " + k + " out of " + (fares.Length));
for (; i <= (k + 3); i++)
{
if (i % 4 == 0)
{
for (int j = 0; j < fares.Length - 1; j++)
{
{
int space = fares[i].IndexOf(" ");
startStation = fares[i].Substring(0, space);
endStation = fares[i].Substring(space + 1, fares[i].Length - space - 1);
}
}
}
else if (i % 4 == 1)
{
valueFare = fares[i];
}
else if (i % 4 == 2)
{
standardFare = fares[i];
}
else if (i % 4 == 3)
{
time = int.Parse(fares[i]);
}
}
faresDT.Rows.Add(startStation, endStation, valueFare, standardFare, time);
If what you want is to optimize your load to the database, I suggest that you get rid of the DataTable completely. By making use of Marc Gravell's FastMember (and anyone who's using SqlBulkCopy should be using FastMember IMHO) you can get a DataReader directly from any IEnumerable.
I would use some variation of the below code whenever writing from a file directly to a database. The below code will stream the contents of the file directly to the SqlBulkCopy operation thru the clever use of yield returns and lazy load of IEnumerable.
using System;
using System.Collections.Generic;
using System.Data.SqlClient;
using System.IO;
using System.Text;
using FastMember;
namespace BulkCopyTest
{
public class Program
{
public static void Main(string[] args)
{
const string filePath = "SOME FILE THAT YOU WANT TO LOAD TO A DB";
WriteData(GetData<dynamic>(filePath));
}
private static void WriteData<T>(IEnumerable<T> data)
{
using (var bcp = new SqlBulkCopy(GetConnection(), SqlBulkCopyOptions.TableLock, null))
using (var reader = ObjectReader.Create(data))
{
SetColumnMappings<T>(bcp.ColumnMappings);
bcp.BulkCopyTimeout = 300;
bcp.BatchSize = 150000;
bcp.DestinationTableName = ""; //TODO: Set correct TableName
bcp.WriteToServer(reader);
}
}
private static void SetColumnMappings<T>(SqlBulkCopyColumnMappingCollection mappings)
{
//Setup your column mappings
}
private static IEnumerable<T> GetData<T>(string filePath)
{
using (var fileStream = File.OpenRead(filePath))
using (var reader = new StreamReader(fileStream, Encoding.UTF8))
{
string line;
while ((line = reader.ReadLine()) != null)
{
//TODO: Add actual parsing logic and whatever else is needed to create an instance of T
yield return Activator.CreateInstance<T>();
}
}
}
private static SqlConnection GetConnection()
{
return new SqlConnection(new SqlConnectionStringBuilder
{
//TODO: Set Connection information here
}.ConnectionString);
}
}
}
In this case I think you should take advantage of the BeginLoadData, LoadDataRow and EndLoadData methods provided in the DataTable class, you could use them like this:
try
{
faresDT.BeginLoadData();
// Your for loop...
{
// Logic defining the value of startStation, endStation, valueFare, standardFare and time removed for briefness.
faresDT.LoadDataRow(new object[] {startStation, endStation, valueFare, standardFare, time}, true);
}
}
finally
{
faresDT.EndLoadData();
}
What BeginLoadData() does is turning off some processing that happens every time you add a row, and only does it once when you are done loading data by calling EndLoadData().
You can find more details about these APIs here:
https://learn.microsoft.com/en-us/dotnet/api/system.data.datatable.loaddatarow?view=netframework-4.7.2

C# SQL Server data to long array

I am trying to use C# with a SQL Server database and I have a problem.
I have an array something like that actually original array have a 10001x1 size
long[] lvl = { 0, 7200000, 15840000, 25920000, 37440000, 50400000, 64800000, 80640000 }
When I try to take same long[] array from the database, I am getting an error.
string sorgu = "select * from paragon";
var komut = new SqlCommand(sorgu, baglanti);
var reader = komut.ExecuteReader();
IList<long> lvl = new List<long>();
while (reader.Read())
{
lvl.Add((long)reader["Paragon"]);
}
reader.Close();
reader.Dispose();
long ns = Convert.ToInt64(textBox1.Text);
long sns = Convert.ToInt64(textBox2.Text);
long nsxp = lvl[ns];
long snsxp = lvl[sns];
long toplam = nsxp + snsxp;
for (int i = 0; i < lvl.Count; i++)
{
if (toplam < lvl[i])
{
textBox3.Text = Convert.ToString(i - 1);
break;
}
}
İmage 1
Error image
your problem is data type mismatch.
reader gives you one value in every reader.Read() operation.
IList<long> myArray = new List<myArray>();
while (reader.Read())
{
myArray.Add(reader.GetInt64(0));
}
reader.Close();
reader.Dispose(); // always close and dispose your reader whenever you are done.
long ns = Convert.ToInt64(textBox1.Text);
long sns = Convert.ToInt64(textBox2.Text);
long nsxp = lvl[ns];
long snsxp = lvl[sns];
long toplam = nsxp + snsxp;
for (int i = 0; i < lvl.Length; i++)
{
if (toplam < lvl[i])
{
textBox3.Text = Convert.ToString(i - 1);
break;
}
}
SqlDataReader's Read() reads a single record from the database. You are trying to use it to read all records at once. This code illustrates an example of how to read each value sequentially:
while (reader.Read()) // This returns true if a record is available, and false once all records have been read.
{
var paragonValue = reader.GetInt64(0); // This reads the current record's Paragon value.
// Do something with paragonValue.
}
See the Microsoft Docs on SqlDataReader.Read for more information.

c# Better way to retrieve large bulks of data from SQL Server 2014?

Context: I retrieve bulks of data to index them in an ElasticSearch cluster (it has no-sql format).
I do this by determining how many delegates do I need to create with a for loop depending of how many rows does the current analyzed table has compared to my bulk_size, and execute them to do the code below.
I use offset-fetch. tb represents the current table, and my current bulk_size is 5000.
My code looks like this:
using (SqlConnection db = new SqlConnection(tb.Conn))
{
int offset = i * BULK_SIZE;
int nextRows = BULK_SIZE;
string queryParams = string.Format(" ORDER BY {0} OFFSET {1} ROWS FETCH NEXT {2} ROWS ONLY", tb.FirstPK(), offset, nextRows);
string dataQuery = string.Format("SELECT * FROM dbo.{0} {1}", tb.TableName, queryParams);
try
{
db.Open();
object[,] bulk = new object[BULK_SIZE, 2];//[data[], id]
int n = 0;
object[] values;
using (SqlDataReader reader = new SqlCommand(dataQuery, db).ExecuteReader())
{
while (reader.Read())
{
string docId = string.Empty;
foreach (string pk in tb.PKeys)
{
docId += reader[pk].ToString() + '_';
}
docId = docId.Substring(0, docId.Length - 1);//delete last '_'
values = new object[reader.FieldCount];
reader.GetValues(values);
bulk[n, 0] = values;
bulk[n, 1] = docId;
n++;
}
reader.Close();
}
db.Close();
if (IsMemoryAvailable())//Waits for other delegates to finish
{
DataUpload(bulk, tb, mapper);
}
}
catch (Exception e)
{
Console.WriteLine(e.Message); Console.WriteLine(e.StackTrace); Console.ReadLine();
//throw e;
}
}
Is there a better way to do this?
I get then bulk, insert it into an array and handle that bulk in another task.
The problem is that the memory of SQL Server keeps going up (cache goes too big), and in late-bulk-fetching I get timeout exceptions and such; it gets to slow.
The solution is to create a temp table with only the primary keys, and query back and forth from the table and the temp table with joins.
This is way more memory efficient than offset-fetch, and also faster.

No value given for required parameters, with value

I'm working in a c#/ASP.NET on a web app.
As part of the app, I want to connect to an ms-access database, and get values from it into a dataset.
For some reason, I get the error in the title when filling the dataset with a DataAdaptor- despite this, when I use breakpoints, I can see the command is as follows:
SELECT ..... WHERE ItemID = #value0 (if you need the parameters, ask for them and i`ll copy the whole thing).
I can also see that #value0 has the value 2 with breakpoints, and I am assured it's the only value in the query.
My question is, how could this happen? If the only value in the query is filled, what am I missing?
EDIT:
Full query:
SELECT ItemName as Name,ItemPicture as Picture,ItemHeroModif as Assistance,ItemTroopModif as Charisma, HerbCost as Herbs, GemCost as Gems FROM Item WHERE ItemID = #value0"
Full building code (Generating the query for each user requires a different amount of items, this one has only a single item, so i've used it to test):
static public DataSet getUserShopItemDS(string username,List<shopItem> items)
{
string madeForCommand = "SELECT ItemName as Name,ItemPicture as Picture,ItemHeroModif as Assistance,ItemTroopModif as Charisma, HerbCost as Herbs, GemCost as Gems FROM Item WHERE ItemID = ";
int count = 0;
foreach (shopItem item in items)
{
madeForCommand += "#value"+count+" OR ";
count++;
}
madeForCommand = madeForCommand.Substring(0, madeForCommand.Length - 3);
OleDbCommand command = GenerateConnection(madeForCommand);
for (int ii = 0; ii < items.Count; ii++)
{
command.Parameters.AddWithValue("#value" + ii, items[ii].ID);
}
var FreestyleAdaptor = new OleDbDataAdapter();
FreestyleAdaptor.SelectCommand = command;
DataSet Items = new DataSet();
FreestyleAdaptor.Fill(Items);//The error is thrown here.
return Items;
}
EDIT 2: - The shopitem class:
public class shopItem
{
//Irrelevant parameters,
public int ID // Whenever a user logs in, I create a list of his items, and attach each one it's ID from the database. I send the list as a parameter to the function.
{
get;
private set;
}
public shopItem(int ID)
{
this.ID = ID;
}
public shopItem() { }
// more constructors.
}
Here's the code you should use:
var madeForCommand = "SELECT ItemName as Name,ItemPicture as Picture,ItemHeroModif as Assistance,ItemTroopModif as Charisma, HerbCost as Herbs, GemCost as Gems FROM Item WHERE ";
OleDbCommand command = new OleDbCommand();
for (int ii = 0; ii < items.Count; ii++)
{
madeForCommand += "ItemId = ? OR ";
command.Parameters.AddWithValue("value" + ii, items[ii].ID);
}
madeForCommand = madeForCommand.Substring(0, madeForCommand.Length - 3);
In MS Access parameters are passed using ? and have to be provided in the same order as they are used in the query.

Categories