Windows service stop automatically after first successful run - c#

I have created windows service scheduler which run after 24 hr, When i stared the service first time it enters data in database it run perfectly fine.
But in second run service get stops , however i want service run continually.
I have created scheduler for it as well to run it every minute , but data only inserts when time match .
public void StartService(object e)
{
try {
//ExtractorEventLog("0", null);
List<PracticeInformation> getServiceTime = GetpracticeInformationList();
var gettime = (from t in getServiceTime
select new PracticeInformation
{
extractor_start_time = t.extractor_start_time
}).OrderByDescending(x => x.id).ToList().FirstOrDefault();
DateTime SetdateTime = DateTime.Parse(ConfigurationManager.AppSettings["ScheduledTime"]);
if (gettime != null)
{
SetdateTime = DateTime.Parse(gettime.extractor_start_time.ToString());
}
try
{
if (SetdateTime.ToString("HH:mm") == DateTime.Now.ToString("HH:mm"))
{
ExtractorEventLog(SetdateTime.ToString("HH:mm") + " " + DateTime.Now.ToString("HH:mm"), null);
string strFile = #"C:\requiredFilesForDDP\Temp" + DateTime.Now.Ticks.ToString() + ".csv";
if (connm == "")
{
GetConnectionString();
}
using (OdbcConnection conn = new OdbcConnection(connm))
{
//ExtractorEventLog(connm, null);
conn.Open();
#region Get Hospital data
var id = 0;
try
{
List<PracticeInformation> practiceList = getServiceTime;
var practiceName = practiceList[0].name;
id = practiceList[0].id;
List<PracticeInformationModel> HospitalAll = new List<PracticeInformationModel>();
using (OdbcCommand com = new OdbcCommand(Common.cmdGetHospital, conn))
{
using (OdbcDataReader readerHospital = com.ExecuteReader())
{
HospitalAll = DataReaderMapToList<PracticeInformationModel>(readerHospital);
List<PracticeInformationModel> objPracticeInformationmodel = (from pi in HospitalAll
select new PracticeInformationModel
{
practice_name = pi.practice_name,
phone = pi.phone
}
).ToList();
var getPracticeName = practiceList.Select(x => new { x.name }).ToList();
List<PracticeInformationModel> objpracticelist = objPracticeInformationmodel.Where(x => !getPracticeName.Any(y => y.name == x.practice_name)).ToList();
DataTable PracticeInformationDT = ToDataTable(objpracticelist); // To save PracticeInformation table
using (MySqlConnection sqConn = new MySqlConnection(Common.connectionStringDB))
{
using (SqlBulkCopy blkcopyPi = new SqlBulkCopy(sqConn.ConnectionString, SqlBulkCopyOptions.KeepNulls))
{
CreateCSVfile(PracticeInformationDT, strFile);
if (!File.Exists(strFile))
{
FileStream fs = new FileStream(strFile, FileMode.Create, FileAccess.Write);
fs.Close();
fs.Dispose();
}
if (sqConn.State == ConnectionState.Open)
{
sqConn.Close();
}
sqConn.Open();
MySqlBulkLoader bcp1 = new MySqlBulkLoader(sqConn);
bcp1.TableName = "PracticeInformation"; //Create PracticeInformation table into MYSQL database...
bcp1.FieldTerminator = ",";
bcp1.LineTerminator = "\r\n";
bcp1.FileName = strFile;
bcp1.NumberOfLinesToSkip = 0;
bcp1.Load();
try
{
File.Delete(strFile);
}
catch (Exception ex)
{
ExceptionLog(ex.Message, ex.GetType().Name.ToString(), ex.StackTrace, id);
}
}
readerHospital.Close();
}
}
}
#endregion
#region Get Provider Data From Dentrix
List<Provider> ProviderAll = new List<Provider>();
using (OdbcCommand com = new OdbcCommand(Common.cmdGetProvider, conn))
{
using (OdbcDataReader readerProvider = com.ExecuteReader())
{
ProviderAll = DataReaderMapToList<Provider>(readerProvider);
List<Provider> objProvider = (from pro in ProviderAll
select new Provider
{
provider_id = pro.provider_id,
first_name = pro.first_name,
last_name = pro.last_name,
work_phone = pro.work_phone
}).ToList();
}
}
#endregion
#region Add Appointment
using (OdbcCommand com = new OdbcCommand(Common.cmdText, conn))
{
using (OdbcDataReader reader = com.ExecuteReader())
{
List<AllModalFromApplointment> AppointmentListAll = new List<AllModalFromApplointment>();
AppointmentListAll = DataReaderMapToList<AllModalFromApplointment>(reader);
List<Alert> patientalert_id = new List<Alert>();
#region Get alert data
MySqlConnection connection = null;
List<string> target = new List<string>();
EventLog(Common.Get_Pre_Med_Alerts);
ExtractorEventLog(Common.Get_Pre_Med_Alerts, id.ToString());
int add = 0;
int alert_count = 0;
//string connectionStringDB = "server=192.168.0.1;user=dentrix;database=db_dentrix;password=dentrix";
connection = new MySqlConnection(Common.connectionStringDB);
foreach (var i in AppointmentListAll.Select(x => x.patient_id).ToList())
{
if (i != 0)
{
OdbcCommand ODBCCommand = new OdbcCommand("{call admin.sp_getpatientmedalerts (?)}", conn);
ODBCCommand.CommandType = CommandType.StoredProcedure;
ODBCCommand.Parameters.AddWithValue("#patient_guid", i); //4898-11738
ODBCCommand.ExecuteNonQuery();
OdbcDataReader a = ODBCCommand.ExecuteReader();
DataTable getAlert = new DataTable();
if (a.HasRows)
{
target.Add(i.ToString());
alert_count = add + 1;
getAlert.Load(a);
a.Close();
}
}
}
ExtractorEventLog(target.Count() + " " + Common.No_Of_Pre_Med_Alert, null);
#endregion
#region
List<AppointmentDetail> objAppointment = (from app in AppointmentListAll
join pro in ProviderAll on app.provider_id equals pro.provider_id
select new AppointmentDetail
{
patient_id = Convert.ToInt64(app.patient_guid),
appointment_id = Convert.ToInt64(app.appointment_id),
appointment_timestamp = Convert.ToString(app.appointment_date.AddHours(app.start_hour).AddMinutes(app.start_minute)),
patient_first_name = app.patient_name.Split(',')[0],
patient_last_name = app.patient_name.Split(',')[1],
patient_phone_number = app.patient_phone,
red_cross_flag = target.Any(x => x.ToString().Contains(app.patient_id.ToString())) ? "Y" : "N",
dr_name = pro.first_name.Trim() + ' ' + pro.last_name.Trim(),
dr_phone = HospitalAll[0].phone,
hospital_name = HospitalAll[0].practice_name,
appointment_type = null,
fees = 0,
status = null
//createdDate = DateTime.UtcNow
}).ToList();
//Ristrict duplicate records
ExtractorEventLog(Common.Data_Inserting, id.ToString());
List<AppointmentDetail> usethe = GetAppointmentList();
var query = usethe.Select(x => new { x.appointment_id }).ToList();
List<AppointmentDetail> xyz = objAppointment.Where(x => !query.Any(y => y.appointment_id == x.appointment_id)).ToList();
if (xyz != null && xyz.Count != 0)
{
var from = xyz.First().appointment_timestamp;
var to = xyz.Last().appointment_timestamp;
ExtractorEventLog(Common.Get_Appointment_Date + from + "and" + to, id.ToString());
}
else
{
EventLog("No appointmet present");
}
int appointment_count = xyz.Count();
DataTable appointmentDT = ToDataTable(xyz); // To save appointment table
using (MySqlConnection sqConn = new MySqlConnection(Common.connectionStringDB))
{
using (SqlBulkCopy blkcopy = new SqlBulkCopy(sqConn.ConnectionString, SqlBulkCopyOptions.KeepNulls))
{
CreateCSVfile(appointmentDT, strFile);
if (!File.Exists(strFile))
{
FileStream fs = new FileStream(strFile, FileMode.Create, FileAccess.Write);
fs.Close();
fs.Dispose();
}
if (sqConn.State == ConnectionState.Open)
{
sqConn.Close();
}
sqConn.Open();
MySqlBulkLoader bcp1 = new MySqlBulkLoader(sqConn);
bcp1.TableName = "appointment";
//bcp1.TableName = "Appointment"; //Create Appointment table into MYSQL database.
bcp1.FieldTerminator = ",";
bcp1.LineTerminator = "\r\n";
bcp1.FileName = strFile;
bcp1.NumberOfLinesToSkip = 0;
bcp1.Load();
try
{
File.Delete(strFile);
}
catch (Exception ex)
{
ExtractorEventLog("appointment-ex", null);
ExceptionLog(ex.Message, ex.GetType().Name.ToString(), ex.StackTrace, id);
}
ExtractorEventLog(appointment_count + " " + Common.No_Of_Appointment_Retrive, id.ToString());
}
ExtractorEventLog(Common.Done, id.ToString());
this.ScheduleService();
}
#endregion
}
}
}
catch (Exception ex)
{
ExtractorEventLog("a-ex", null);
ExceptionLog(ex.Message, ex.GetType().Name.ToString(), ex.StackTrace, id);
}
}
}
else{
this.ScheduleService();
}
}
catch (Exception ex)
{
ExtractorEventLog(ex.Message +"b-ex", null);
//ExceptionLog(ex.Message, ex.GetType().Name.ToString(), ex.StackTrace, id);
}
//EventLog("this ScheduleService()");
// this.ScheduleService();m
}
catch(Exception ex)
{
ExtractorEventLog("zz-ex", null);
}
}

Related

How to programmatically upload DataTable and DataGrid at same time

I'm trying to create a table in my application that will list new objects that I will work with later. The problem is that I don't get the data in the DataGrid, but if the LoadAlertGrip() method is separate and under the ButtonClick event, it works as it should, but it doesn't change automatically when a new object is created. Do you know what to do?
public AdminWindow()
{
InitializeComponent();
Task.Run(() => Test(new EmailParser(true, "username" , "password")) );
}
public void Test(EmailParser emailParser)
{
bool help = true;
do
{
using (var client = new ImapClient())
{
using (var cancel = new System.Threading.CancellationTokenSource())
{
client.Connect(emailParser.ServerName, emailParser.Port, emailParser.IsSSLuse, cancel.Token);
client.Authenticate(emailParser.Username, emailParser.Password, cancel.Token);
var inbox = client.Inbox;
inbox.Open(FolderAccess.ReadOnly, cancel.Token);
for (int i = 0; i < inbox.Count; i++)
{
var message = inbox.GetMessage(i, cancel.Token);
GetBodyText = message.TextBody;
Problem problem = new Problem(message.MessageId);
if (!dAOProblem.GetAll().Any(x => x.Message_Id.Equals(problem.Message_Id)))
{
dAOProblem.Save(problem);
Alert alert = new Alert(message.MessageId, message.Date.DateTime, message.From.ToString(), 1, problem.Id);
if (!dAOAlert.GetAll().Any(x => x.Id_MimeMessage.Equals(alert.Id_MimeMessage)))
{
dAOAlert.Save(alert);
LoadAlertGrid();
}
}
}
//client.Disconnect(true, cancel.Token);
}
}
} while (help != false);
}
public void LoadAlertGrid()
{
SqlConnection con = DatabaseSingleton.GetInstance();
using (SqlCommand comm = new SqlCommand("SELECT Alert.email, Alert.date, AMUser.name AS 'admin', Problem.nameOfAlert, Result.result " +
"FROM ((Alert INNER JOIN AMUser ON Alert.AMUser_ID = AMUser.id)" +
" INNER JOIN Problem ON Alert.Problem_ID = Problem.id " +
"INNER JOIN Result ON Problem.Result_ID = Result.id); ", con))
{
DataTable table = new DataTable();
SqlDataReader sdr = comm.ExecuteReader();
table.Load(sdr);
AlertTable.ItemsSource = table.DefaultView;
sdr.Close();
}
}

HangFire running each job separately

I am using HangFire to run the job. I have a method that retrieve data from more than 50 sql servers.
I want to use HangFire to run each location separately by LocationID, and if 1 location fails I want that job get running again for that location after x minutes.
Also I want to see the log of the job on the HangFire Dashboard.
My job method :
public void SyncCompInfo()
{
List<Location> locations;
using (var db = new CompInfoDemoEntities())
{
locations = db.Locations.Where(x => x.IsActive).ToList();
}
foreach (var location in locations)
{
try
{
using (var _db = new CompInfoDemoEntities())
{
_log.Info("Getting CompoInfo data from location: " + location.StoreName);
_syncLogService.Add("Getting CompoInfo data from location: " + location.StoreName);
var responses = new List<CompInfoResponse>();
var compInfos = _db.IMS_CompInfo.Where(x => x.LocationId == location.Id).ToList();
using (var cnn = new SqlConnection(location.ConnectionString))
{
var sql = "select * from IMS_CompInfo;";
var sqlCmd = new SqlCommand(sql, cnn);
cnn.Open();
using (SqlDataReader rdr = sqlCmd.ExecuteReader())
{
while (rdr.Read())
{
var item = new CompInfoResponse();
item.Id = int.Parse(rdr["Id"].ToString());
item.ClientID = rdr["ClientID"].ToString();
item.LicenceID = rdr["LicenceID"].ToString();
item.POSCode = rdr["POSCode"].ToString();
item.logiPOS_Version = rdr["logiPOS_Version"].ToString();
if (rdr["LastLoginDate"] != null)
{
item.LastLoginDate = DateTime.Parse(rdr["LastLoginDate"].ToString());
}
item.ComputerName = rdr["ComputerName"].ToString();
if (rdr["BootTime"] != null)
{
item.BootTime = DateTime.Parse(rdr["BootTime"].ToString());
}
item.Domain = rdr["Domain"].ToString();
item.Manufacturer = rdr["Manufacturer"].ToString();
item.Model = rdr["Model"].ToString();
item.Memory = rdr["Memory"].ToString();
item.OS = rdr["OS"].ToString();
item.Build = rdr["Build"].ToString();
item.CPU = rdr["CPU"].ToString();
item.ProcArchitecture = rdr["ProcArchitecture"].ToString();
item.IP1 = rdr["IP1"].ToString();
item.MAC1 = rdr["MAC1"].ToString();
if (rdr["LastModifiedDate"] != null)
{
item.LastModifiedDate = DateTime.Parse(rdr["LastModifiedDate"].ToString());
}
if (rdr["Tag"] != null)
{
item.Tag = int.Parse(rdr["Tag"].ToString());
}
item.Application = rdr["Application"].ToString();
responses.Add(item);
}
}
}
What you seem to need is something like this, with a method you call upon startup, which loops other the locations and enqueues a job for each location.
I over simplified the thing (for example making the methods static),
but I guess most of the idea is there.
Have a look at Hangfire recurring tasks I guess it may be better suited to your needs than tasks fired upon application startups.
public void CalledUponStartup()
{
List<Location> locations;
using (var db = new CompInfoDemoEntities())
{
locations = db.Locations.Where(x => x.IsActive).ToList();
}
foreach (var location in locations)
{
BackgroundJob.Enqueue(() => SyncCompInfo(location.Id));
}
}
public static void SyncCompInfo(int locationId)
{
try
{
using (var _db = new CompInfoDemoEntities())
{
var location = db.Locations.FirstOrDefault(x => x.Id == locationId);
_log.Info("Getting CompoInfo data from location: " + location.StoreName);
_syncLogService.Add("Getting CompoInfo data from location: " + location.StoreName);
var responses = new List<CompInfoResponse>();
var compInfos = _db.IMS_CompInfo.Where(x => x.LocationId == location.Id).ToList();
using (var cnn = new SqlConnection(location.ConnectionString))
{
var sql = "select * from IMS_CompInfo;";
var sqlCmd = new SqlCommand(sql, cnn);
cnn.Open();
using (SqlDataReader rdr = sqlCmd.ExecuteReader())
{
while (rdr.Read())
{
var item = new CompInfoResponse();
item.Id = int.Parse(rdr["Id"].ToString());
item.ClientID = rdr["ClientID"].ToString();
item.LicenceID = rdr["LicenceID"].ToString();
item.POSCode = rdr["POSCode"].ToString();
item.logiPOS_Version = rdr["logiPOS_Version"].ToString();
if (rdr["LastLoginDate"] != null)
{
item.LastLoginDate = DateTime.Parse(rdr["LastLoginDate"].ToString());
}
item.ComputerName = rdr["ComputerName"].ToString();
if (rdr["BootTime"] != null)
{
item.BootTime = DateTime.Parse(rdr["BootTime"].ToString());
}
item.Domain = rdr["Domain"].ToString();
item.Manufacturer = rdr["Manufacturer"].ToString();
item.Model = rdr["Model"].ToString();
item.Memory = rdr["Memory"].ToString();
item.OS = rdr["OS"].ToString();
item.Build = rdr["Build"].ToString();
item.CPU = rdr["CPU"].ToString();
item.ProcArchitecture = rdr["ProcArchitecture"].ToString();
item.IP1 = rdr["IP1"].ToString();
item.MAC1 = rdr["MAC1"].ToString();
if (rdr["LastModifiedDate"] != null)
{
item.LastModifiedDate = DateTime.Parse(rdr["LastModifiedDate"].ToString());
}
if (rdr["Tag"] != null)
{
item.Tag = int.Parse(rdr["Tag"].ToString());
}
item.Application = rdr["Application"].ToString();
responses.Add(item);
}
}

how to ignore error records while updating the entries when error inner exception for details

How can I ignore writing of records giving error while updating the entities.
ERROR DISPLAY
private void btn_salary_Click(object sender, EventArgs e)
{
if (sg.HasRows(lib.tbl_pay) == true && chkShowOnly.Checked == false)
{
lib.ShowMessage(lib.tbl_pay + " has data can not import");
return;
}
sql = "SELECT * FROM PAY WHERE BankAc IS NOT null";
q1 = new DBFHelper(txt_olddir.Text).FillDataTable(sql);
dataGridView1.DataSource = q1;
if (chkShowOnly.Checked == true) return;
DateTime xdate = sg.ServerDate;
for (int x = 0; x < q1.Rows.Count; x++)
{
PAY q11 = new PAY
{
BANKAC = sg.AsString(q1.Rows[x]["BankAc"]),
MDATE = sg.AsDate(q1.Rows[x]["MDATE"].ToString()),
BYEAR = sg.AsDate(q1.Rows[x]["MDATE"].ToString()).Year,
BMONTH = sg.AsDate(q1.Rows[x]["MDATE"].ToString()).Month,
PERPAY = q1.AsDecimal("PERPAY", x),
BASIC = q1.AsDecimal("PERPAY", x),
DARATE = q1.AsDecimal("DA", x),
NEWDA = q1.AsDecimal("NEWDA",x),
ADNDA = q1.AsDecimal ("ADNDA",x),
HRA = q1.AsDecimal ("HRA",x),
ENTREL = q1.AsDecimal("ENTREL",x),
EXTRA = q1.AsDecimal("EXTRA",x),
INSKNP = q1.AsDecimal("INSGKP",x),
MEDI = q1.AsDecimal("MEDI",x),
RD = q1.AsDecimal("RD",x),
GPF = q1.AsDecimal("GPF",x),
GPFLOAN= q1.AsDecimal("GPFLOAN",x),
CPF = q1.AsDecimal("CPF",x),
CPFLOAN = q1.AsDecimal("CPFLOAN",x),
LICPRE = q1.AsDecimal("LICPRE",x),
LICLOAN = q1.AsDecimal("LICLOAN",x),
ITAX = q1.AsDecimal("ITAX",x),
JSBLOAN = q1.AsDecimal("JSBLOAN",x),
NSBLOAN = q1.AsDecimal("NSBLOAN",x),
STEMP = q1.AsDecimal("STEMP",x),
PROFUND = q1.AsDecimal("PROFUND",x),
TYPE_ATX = q1.AsString("TYPE",x),
GRADE = q1.AsString("GRADE",x),
TFUND = q1.AsDecimal("TFUND",x),
GFUND = q1.AsDecimal("GFUND",x),
OBCLOAN = q1.AsDecimal("OBCLOAN",x),
OTH1_LOAN = q1.AsDecimal("OTH1_LOAN",x),
OTH2_LOAN = q1.AsDecimal("OTH2_LOAN",x),
BSTGBANK = q1.AsDecimal("BSTGBANK",x),
EARNING = q1.AsDecimal("EARNING",x),
TOTDED = q1.AsDecimal("TOTDED",x),
NETPAY = q1.AsDecimal("NETPAY",x),
CITYALW = q1.AsDecimal("CITYALW",x),
user_code = lib.user_code,
modi_dt = xdate
};
g1.PAYs.Add(q11);
lbl_counter.Text = x.ToString() + " Records Inserted";
Application.DoEvents();
}
try
{
g1.SaveChanges();
}
catch (Exception ex)
{
string emsg = ex.Message;
if (ex.InnerException != null)
{
emsg = emsg + ex.InnerException.Message;
}
if (ex.InnerException.InnerException != null)
{
emsg = emsg + ex.InnerException.InnerException.Message;
}
if (ex.InnerException.InnerException.InnerException != null)
{
emsg = emsg + ex.InnerException.InnerException.InnerException.Message;
}
lib.ShowMessage(emsg);
}
lib.ShowMessage("Done..");
}

elasticsearch speed comparison issue with sql

I compare the speed of Sql with ElasticSearch. I have 1.5 million data. But sql query runs faster than elastic search. I don't understand the problem.
Why is the word like query coming faster in sql?
My code for Sql
public static List<Sales> GetAllRecords(string itemType)
{
List<Sales> salesReports = new List<Sales>();
string sqlQuery = String.Format(#"SELECT * FROM dbo.Sales where Region like '%{0}%'", itemType);
using (SqlConnection connection = new SqlConnection(CONNECTION_STRING))
{
var result = connection.Query<Sales>(sqlQuery);
foreach (var item in result)
{
Sales global = new Sales()
{
Region = item.Region,
Country = item.Country,
Item_Type=item.Item_Type,
Order_Date=item.Order_Date,
Order_ID = item.Order_ID,
Order_Priority=item.Order_Priority,
Sales_Channel=item.Sales_Channel,
Ship_Date = item.Ship_Date,
Total_Cost=item.Total_Cost,
Total_Profit=item.Total_Profit,
Total_Revenue=item.Total_Revenue,
Units_Sold=item.Units_Sold,
Unit_Cost=item.Unit_Cost,
Unit_Price = item.Unit_Price
};
salesReports.Add(global);
}
return result.ToList();
}
}
My code for ElasticSearch.
I search the data that I indexed before with elasticsearch here.
public static List<Sales> ConfigureES(string inputText)
{
List<Sales> salesReports = new List<Sales>();
// 1. Connection URL's elastic search
var listOfUrls = new Uri[]
{
// here we can set multple connectionn URL's...
new Uri("http://localhost:9200/")
};
StaticConnectionPool connPool = new StaticConnectionPool(listOfUrls);
ConnectionSettings connSett = new ConnectionSettings(connPool);
ElasticClient eClient = new ElasticClient(connSett);
// var see = eClient.DeleteIndex(INDEX_NAME);
// check the connection health
var checkClusterHealth = eClient.ClusterHealth();
if (checkClusterHealth.ApiCall.Success && checkClusterHealth.IsValid)
{
// 2. check the index exist or not
var checkResult = eClient.IndexExists(INDEX_NAME);
if (!checkResult.Exists)
{
// Raise error to Index not avaliable
}
// Search particular text field
var searchResponse = eClient.Search<Sales>(s =>
s.Index(INDEX_NAME).From(0).Size(5000).Scroll("10m")
.Query(q => q.Match(m => m.Field(f => f.Region).Query(inputText))));
//var results = eClient.Scroll<Salesreport>("10m", searchResponse.ScrollId);
while (searchResponse.Documents.Any())
{
var res = searchResponse.Documents;
var sds = res.Cast<Sales>();
salesReports.AddRange(sds);
searchResponse = eClient.Scroll<Sales>("10m", searchResponse.ScrollId);
}
}
else
{
// fail log the exception further use
var exception = checkClusterHealth.OriginalException.ToString();
var debugException = checkClusterHealth.DebugInformation.ToString();
}
return salesReports;
}
where I index data to elasticsearch.
public static string CONNECTION_STRING = string.Empty;
public static string INDEX_NAME = "elastic";
public static string INDEX_TYPE = "report4";
private static ElasticClient eClient;
static void Main(string[] args)
{
try
{
// read the config file ...
var configuration = new ConfigurationBuilder()
.SetBasePath(#"C:\Users\Celal\Desktop\ElasticSearch-master\ElasticSearch-master\ElasticSearchBGPJob\ElasticSearchBGPJob\ElasticSearchBGPJob")
.AddJsonFile("appsettings.json", false)
.Build();
CONNECTION_STRING = configuration.GetSection("DefaultConnection").Value;
if (string.IsNullOrEmpty(CONNECTION_STRING))
throw new ArgumentException("No connection string in appsettings.json");
// 1. Connection URL's elastic search
var listOfUrls =
// here we can set multple connectionn URL's...
new Uri("http://localhost:9200/");
ConnectionSettings connSett = new ConnectionSettings(listOfUrls);
eClient = new ElasticClient(connSett);
// var see = eClient.DeleteIndex(INDEX_NAME);
var createIndexDescriptor = new CreateIndexDescriptor(INDEX_NAME).Mappings(ms => ms.Map<Sales>(m => m.AutoMap()));
// check the connection health
var checkClusterHealth = eClient.ClusterHealth();
if (checkClusterHealth.ApiCall.Success && checkClusterHealth.IsValid)
{
// 2. check the index exist or not
var checkResult = eClient.IndexExists(INDEX_NAME);
if (!checkResult.Exists)
{
var createIndexResponse = eClient.CreateIndex(createIndexDescriptor);
if (createIndexResponse.ApiCall.Success && createIndexResponse.IsValid)
{
// index is created successfully....
}
else
{
// fail log the exception further use
var exception = createIndexResponse.OriginalException.ToString();
var debugException = createIndexResponse.DebugInformation.ToString();
}
}
// 3. get the last documet id of index
var lastRecordResponse = eClient.Search<Sales>(s => s
.Index(INDEX_NAME)
.Type(INDEX_TYPE)
.From(0)
.Size(1).Sort(sr => sr.Descending(f => f.Order_ID)));
if (lastRecordResponse.ApiCall.Success && lastRecordResponse.IsValid)
{
Console.WriteLine("Start " + DateTime.Now);
long salesRecordId = 0;
var listofrecords = new List<Sales>();
if (lastRecordResponse.Documents.Count >= 1)
{
var obj = lastRecordResponse.Documents;
foreach (var item in obj)
{
salesRecordId = item.Order_ID;
}
listofrecords = GetAllRecords(salesRecordId);
}
else
{
listofrecords = GetAllRecords(salesRecordId);
}
Console.WriteLine("END " + DateTime.Now);
// Insert the data into document format corresponding index...
if (listofrecords.Count > 0)
{
Console.WriteLine("===== START========= " + DateTime.Now);
BulkInsertData(listofrecords, eClient).Wait();
Console.WriteLine("===== END========= " + DateTime.Now);
}
}
else
{
// fail log the exception further use
var exception = lastRecordResponse.OriginalException.ToString();
var debugException = lastRecordResponse.DebugInformation.ToString();
}
}
else
{
// fail log the exception further use
var exception = checkClusterHealth.OriginalException.ToString();
var debugException = checkClusterHealth.DebugInformation.ToString();
}
Console.WriteLine("Hello World!");
Console.ReadLine();
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
Console.ReadLine();
}
}
public static List<Sales> GetAllRecords(long LastSalesId)
{
List<Sales> salesReports = new List<Sales>();
string sqlQuery = String.Format(#"SELECT * FROM dbo.Sales where Order_ID > {0} ", LastSalesId);
using (SqlConnection connection = new SqlConnection(CONNECTION_STRING))
{
connection.Open();
using (SqlCommand command = new SqlCommand(sqlQuery, connection))
{
command.CommandTimeout = 1000;
using (SqlDataReader dataReader = command.ExecuteReader())
{
if (dataReader.HasRows)
{
while (dataReader.Read())
{
Sales global = new Sales()
{
Order_ID=Convert.ToInt32(dataReader["Order_ID"]),
Region=Convert.ToString(dataReader["Region"]),
Country = Convert.ToString(dataReader["Country"]),
Total_Cost = (decimal)Convert.ToDouble(dataReader["Total_Cost"]),
Total_Revenue = Convert.ToString(dataReader["Total_Revenue"]),
Item_Type = Convert.ToString(dataReader["Item_Type"])
};
salesReports.Add(global);
}
}
}
}
connection.Close();
}
return salesReports;
}
static async Task BulkInsertData(List<Sales> ListofData, ElasticClient Eclient)
{
try
{
var splitTheLargeList = ChunkBy(ListofData);
var test = splitTheLargeList.LastOrDefault();
foreach (var item in splitTheLargeList)
{
var bulkResponse = await Eclient.BulkAsync(b => b
.Index(INDEX_NAME)
// .Type(INDEX_TYPE)
.IndexMany(item));
if (bulkResponse.ApiCall.Success && bulkResponse.IsValid)
{
// success fully inserted...
}
else
{
// fail log the exception further use
var exception = bulkResponse.OriginalException.ToString();
var debugException = bulkResponse.DebugInformation.ToString();
}
}
}
catch (Exception ex)
{
Console.WriteLine(ex.InnerException.ToString());
}
}
public static List<List<T>> ChunkBy<T>(List<T> source, int chunkSize = 1000)
{
return source
.Select((x, i) => new { Index = i, Value = x })
.GroupBy(x => x.Index / chunkSize)
.Select(x => x.Select(v => v.Value).ToList())
.ToList();
}
public static IEnumerable<List<T>> SplitList<T>(List<T> ListofData, int listSize = 1000)
{
for (int i = 0; i < ListofData.Count; i += listSize)
{
yield return ListofData.GetRange(i, Math.Min(listSize, ListofData.Count - i));
}
}

Code to Insert Image into SQL Server

I want to insert an image into the SQL Server database from my Windows Forms application.
This question looks like it was trying to ask what I wanted to find out, but it was closed:
Insert image into SQL Server
Here is the code I used to do that.
Modify this code as needed for the table that you are going to use by viewing the design of your database in Microsoft Management Studio:
public static void InsertImage(int inventoryID, int businessID, FileInfo file, string sqlConnection)
{
var list = new List<byte>();
using (var stream = file.Open(FileMode.Open))
{
var data = new byte[stream.Length];
stream.Read(data, 0, data.Length);
list.AddRange(data);
}
var bmp = System.Drawing.Image.FromFile(file.FullName, true);
using (var conn = new SqlConnection(sqlConnection))
{
conn.Open();
var imageId = -1;
var sqlSelect = "SELECT [ImageId] FROM [dbo].[ImageTable] WHERE [InventoryId]=#InventoryId;";
using (var cmd = new SqlCommand(sqlSelect, conn))
{
cmd.Parameters.Add("#InventoryId", System.Data.SqlDbType.Int).Value = inventoryID;
using (var r = cmd.ExecuteReader())
{
if (r.Read())
{
var o = r["ImageId"];
if ((o != null) && (o != DBNull.Value))
{
imageId = (int)o;
}
}
}
}
if (imageId == -1)
{
var sqlCmd = "INSERT INTO [dbo].[ImageTable] " +
"([InventoryId], [ImageFileName], [ImageSize], [ImageWidth], [ImageHeight], [ImageBytes]) " +
"VALUES " +
"(#InventoryId, #ImageFileName, #ImageSize, #ImageWidth, #ImageHeight, #ImageBytes); ";
using (var cmd = new SqlCommand(sqlCmd, conn))
{
cmd.Parameters.Add("#InventoryId", System.Data.SqlDbType.Int).Value = inventoryID;
cmd.Parameters.Add("#ImageFileName", System.Data.SqlDbType.VarChar, 255).Value = file.Name;
cmd.Parameters.Add("#ImageSize", System.Data.SqlDbType.Int).Value = list.Count;
cmd.Parameters.Add("#ImageWidth", System.Data.SqlDbType.SmallInt).Value = bmp.Width;
cmd.Parameters.Add("#ImageHeight", System.Data.SqlDbType.SmallInt).Value = bmp.Height;
cmd.Parameters.Add("#ImageBytes", System.Data.SqlDbType.VarBinary, -1).Value = list.ToArray();
cmd.ExecuteNonQuery();
}
}
}
}
To run/test the code, I created this helper method:
public static string[] GetImages(string fullFolderPath, string searchPattern)
{
var list = new List<String>();
if (Directory.Exists(fullFolderPath))
{
if (String.IsNullOrEmpty(searchPattern))
{
searchPattern = "*.jpg";
}
var dir = new DirectoryInfo(fullFolderPath);
var files = dir.GetFiles(searchPattern);
for (int i = 0; i < files.Length; i++)
{
InsertImage(i + 1, 1, files[i], _sqlConnection);
list.Add(files[i].FullName);
}
}
return list.ToArray();
}
Now, running it from my Console Application is a simple, single call:
static void Main(string[] args)
{
var list = GetImages(#"C:\inetpub\wwwroot\Ads", "*.jpg");
}

Categories