I am using HangFire to run the job. I have a method that retrieve data from more than 50 sql servers.
I want to use HangFire to run each location separately by LocationID, and if 1 location fails I want that job get running again for that location after x minutes.
Also I want to see the log of the job on the HangFire Dashboard.
My job method :
public void SyncCompInfo()
{
List<Location> locations;
using (var db = new CompInfoDemoEntities())
{
locations = db.Locations.Where(x => x.IsActive).ToList();
}
foreach (var location in locations)
{
try
{
using (var _db = new CompInfoDemoEntities())
{
_log.Info("Getting CompoInfo data from location: " + location.StoreName);
_syncLogService.Add("Getting CompoInfo data from location: " + location.StoreName);
var responses = new List<CompInfoResponse>();
var compInfos = _db.IMS_CompInfo.Where(x => x.LocationId == location.Id).ToList();
using (var cnn = new SqlConnection(location.ConnectionString))
{
var sql = "select * from IMS_CompInfo;";
var sqlCmd = new SqlCommand(sql, cnn);
cnn.Open();
using (SqlDataReader rdr = sqlCmd.ExecuteReader())
{
while (rdr.Read())
{
var item = new CompInfoResponse();
item.Id = int.Parse(rdr["Id"].ToString());
item.ClientID = rdr["ClientID"].ToString();
item.LicenceID = rdr["LicenceID"].ToString();
item.POSCode = rdr["POSCode"].ToString();
item.logiPOS_Version = rdr["logiPOS_Version"].ToString();
if (rdr["LastLoginDate"] != null)
{
item.LastLoginDate = DateTime.Parse(rdr["LastLoginDate"].ToString());
}
item.ComputerName = rdr["ComputerName"].ToString();
if (rdr["BootTime"] != null)
{
item.BootTime = DateTime.Parse(rdr["BootTime"].ToString());
}
item.Domain = rdr["Domain"].ToString();
item.Manufacturer = rdr["Manufacturer"].ToString();
item.Model = rdr["Model"].ToString();
item.Memory = rdr["Memory"].ToString();
item.OS = rdr["OS"].ToString();
item.Build = rdr["Build"].ToString();
item.CPU = rdr["CPU"].ToString();
item.ProcArchitecture = rdr["ProcArchitecture"].ToString();
item.IP1 = rdr["IP1"].ToString();
item.MAC1 = rdr["MAC1"].ToString();
if (rdr["LastModifiedDate"] != null)
{
item.LastModifiedDate = DateTime.Parse(rdr["LastModifiedDate"].ToString());
}
if (rdr["Tag"] != null)
{
item.Tag = int.Parse(rdr["Tag"].ToString());
}
item.Application = rdr["Application"].ToString();
responses.Add(item);
}
}
}
What you seem to need is something like this, with a method you call upon startup, which loops other the locations and enqueues a job for each location.
I over simplified the thing (for example making the methods static),
but I guess most of the idea is there.
Have a look at Hangfire recurring tasks I guess it may be better suited to your needs than tasks fired upon application startups.
public void CalledUponStartup()
{
List<Location> locations;
using (var db = new CompInfoDemoEntities())
{
locations = db.Locations.Where(x => x.IsActive).ToList();
}
foreach (var location in locations)
{
BackgroundJob.Enqueue(() => SyncCompInfo(location.Id));
}
}
public static void SyncCompInfo(int locationId)
{
try
{
using (var _db = new CompInfoDemoEntities())
{
var location = db.Locations.FirstOrDefault(x => x.Id == locationId);
_log.Info("Getting CompoInfo data from location: " + location.StoreName);
_syncLogService.Add("Getting CompoInfo data from location: " + location.StoreName);
var responses = new List<CompInfoResponse>();
var compInfos = _db.IMS_CompInfo.Where(x => x.LocationId == location.Id).ToList();
using (var cnn = new SqlConnection(location.ConnectionString))
{
var sql = "select * from IMS_CompInfo;";
var sqlCmd = new SqlCommand(sql, cnn);
cnn.Open();
using (SqlDataReader rdr = sqlCmd.ExecuteReader())
{
while (rdr.Read())
{
var item = new CompInfoResponse();
item.Id = int.Parse(rdr["Id"].ToString());
item.ClientID = rdr["ClientID"].ToString();
item.LicenceID = rdr["LicenceID"].ToString();
item.POSCode = rdr["POSCode"].ToString();
item.logiPOS_Version = rdr["logiPOS_Version"].ToString();
if (rdr["LastLoginDate"] != null)
{
item.LastLoginDate = DateTime.Parse(rdr["LastLoginDate"].ToString());
}
item.ComputerName = rdr["ComputerName"].ToString();
if (rdr["BootTime"] != null)
{
item.BootTime = DateTime.Parse(rdr["BootTime"].ToString());
}
item.Domain = rdr["Domain"].ToString();
item.Manufacturer = rdr["Manufacturer"].ToString();
item.Model = rdr["Model"].ToString();
item.Memory = rdr["Memory"].ToString();
item.OS = rdr["OS"].ToString();
item.Build = rdr["Build"].ToString();
item.CPU = rdr["CPU"].ToString();
item.ProcArchitecture = rdr["ProcArchitecture"].ToString();
item.IP1 = rdr["IP1"].ToString();
item.MAC1 = rdr["MAC1"].ToString();
if (rdr["LastModifiedDate"] != null)
{
item.LastModifiedDate = DateTime.Parse(rdr["LastModifiedDate"].ToString());
}
if (rdr["Tag"] != null)
{
item.Tag = int.Parse(rdr["Tag"].ToString());
}
item.Application = rdr["Application"].ToString();
responses.Add(item);
}
}
Related
I compare the speed of Sql with ElasticSearch. I have 1.5 million data. But sql query runs faster than elastic search. I don't understand the problem.
Why is the word like query coming faster in sql?
My code for Sql
public static List<Sales> GetAllRecords(string itemType)
{
List<Sales> salesReports = new List<Sales>();
string sqlQuery = String.Format(#"SELECT * FROM dbo.Sales where Region like '%{0}%'", itemType);
using (SqlConnection connection = new SqlConnection(CONNECTION_STRING))
{
var result = connection.Query<Sales>(sqlQuery);
foreach (var item in result)
{
Sales global = new Sales()
{
Region = item.Region,
Country = item.Country,
Item_Type=item.Item_Type,
Order_Date=item.Order_Date,
Order_ID = item.Order_ID,
Order_Priority=item.Order_Priority,
Sales_Channel=item.Sales_Channel,
Ship_Date = item.Ship_Date,
Total_Cost=item.Total_Cost,
Total_Profit=item.Total_Profit,
Total_Revenue=item.Total_Revenue,
Units_Sold=item.Units_Sold,
Unit_Cost=item.Unit_Cost,
Unit_Price = item.Unit_Price
};
salesReports.Add(global);
}
return result.ToList();
}
}
My code for ElasticSearch.
I search the data that I indexed before with elasticsearch here.
public static List<Sales> ConfigureES(string inputText)
{
List<Sales> salesReports = new List<Sales>();
// 1. Connection URL's elastic search
var listOfUrls = new Uri[]
{
// here we can set multple connectionn URL's...
new Uri("http://localhost:9200/")
};
StaticConnectionPool connPool = new StaticConnectionPool(listOfUrls);
ConnectionSettings connSett = new ConnectionSettings(connPool);
ElasticClient eClient = new ElasticClient(connSett);
// var see = eClient.DeleteIndex(INDEX_NAME);
// check the connection health
var checkClusterHealth = eClient.ClusterHealth();
if (checkClusterHealth.ApiCall.Success && checkClusterHealth.IsValid)
{
// 2. check the index exist or not
var checkResult = eClient.IndexExists(INDEX_NAME);
if (!checkResult.Exists)
{
// Raise error to Index not avaliable
}
// Search particular text field
var searchResponse = eClient.Search<Sales>(s =>
s.Index(INDEX_NAME).From(0).Size(5000).Scroll("10m")
.Query(q => q.Match(m => m.Field(f => f.Region).Query(inputText))));
//var results = eClient.Scroll<Salesreport>("10m", searchResponse.ScrollId);
while (searchResponse.Documents.Any())
{
var res = searchResponse.Documents;
var sds = res.Cast<Sales>();
salesReports.AddRange(sds);
searchResponse = eClient.Scroll<Sales>("10m", searchResponse.ScrollId);
}
}
else
{
// fail log the exception further use
var exception = checkClusterHealth.OriginalException.ToString();
var debugException = checkClusterHealth.DebugInformation.ToString();
}
return salesReports;
}
where I index data to elasticsearch.
public static string CONNECTION_STRING = string.Empty;
public static string INDEX_NAME = "elastic";
public static string INDEX_TYPE = "report4";
private static ElasticClient eClient;
static void Main(string[] args)
{
try
{
// read the config file ...
var configuration = new ConfigurationBuilder()
.SetBasePath(#"C:\Users\Celal\Desktop\ElasticSearch-master\ElasticSearch-master\ElasticSearchBGPJob\ElasticSearchBGPJob\ElasticSearchBGPJob")
.AddJsonFile("appsettings.json", false)
.Build();
CONNECTION_STRING = configuration.GetSection("DefaultConnection").Value;
if (string.IsNullOrEmpty(CONNECTION_STRING))
throw new ArgumentException("No connection string in appsettings.json");
// 1. Connection URL's elastic search
var listOfUrls =
// here we can set multple connectionn URL's...
new Uri("http://localhost:9200/");
ConnectionSettings connSett = new ConnectionSettings(listOfUrls);
eClient = new ElasticClient(connSett);
// var see = eClient.DeleteIndex(INDEX_NAME);
var createIndexDescriptor = new CreateIndexDescriptor(INDEX_NAME).Mappings(ms => ms.Map<Sales>(m => m.AutoMap()));
// check the connection health
var checkClusterHealth = eClient.ClusterHealth();
if (checkClusterHealth.ApiCall.Success && checkClusterHealth.IsValid)
{
// 2. check the index exist or not
var checkResult = eClient.IndexExists(INDEX_NAME);
if (!checkResult.Exists)
{
var createIndexResponse = eClient.CreateIndex(createIndexDescriptor);
if (createIndexResponse.ApiCall.Success && createIndexResponse.IsValid)
{
// index is created successfully....
}
else
{
// fail log the exception further use
var exception = createIndexResponse.OriginalException.ToString();
var debugException = createIndexResponse.DebugInformation.ToString();
}
}
// 3. get the last documet id of index
var lastRecordResponse = eClient.Search<Sales>(s => s
.Index(INDEX_NAME)
.Type(INDEX_TYPE)
.From(0)
.Size(1).Sort(sr => sr.Descending(f => f.Order_ID)));
if (lastRecordResponse.ApiCall.Success && lastRecordResponse.IsValid)
{
Console.WriteLine("Start " + DateTime.Now);
long salesRecordId = 0;
var listofrecords = new List<Sales>();
if (lastRecordResponse.Documents.Count >= 1)
{
var obj = lastRecordResponse.Documents;
foreach (var item in obj)
{
salesRecordId = item.Order_ID;
}
listofrecords = GetAllRecords(salesRecordId);
}
else
{
listofrecords = GetAllRecords(salesRecordId);
}
Console.WriteLine("END " + DateTime.Now);
// Insert the data into document format corresponding index...
if (listofrecords.Count > 0)
{
Console.WriteLine("===== START========= " + DateTime.Now);
BulkInsertData(listofrecords, eClient).Wait();
Console.WriteLine("===== END========= " + DateTime.Now);
}
}
else
{
// fail log the exception further use
var exception = lastRecordResponse.OriginalException.ToString();
var debugException = lastRecordResponse.DebugInformation.ToString();
}
}
else
{
// fail log the exception further use
var exception = checkClusterHealth.OriginalException.ToString();
var debugException = checkClusterHealth.DebugInformation.ToString();
}
Console.WriteLine("Hello World!");
Console.ReadLine();
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
Console.ReadLine();
}
}
public static List<Sales> GetAllRecords(long LastSalesId)
{
List<Sales> salesReports = new List<Sales>();
string sqlQuery = String.Format(#"SELECT * FROM dbo.Sales where Order_ID > {0} ", LastSalesId);
using (SqlConnection connection = new SqlConnection(CONNECTION_STRING))
{
connection.Open();
using (SqlCommand command = new SqlCommand(sqlQuery, connection))
{
command.CommandTimeout = 1000;
using (SqlDataReader dataReader = command.ExecuteReader())
{
if (dataReader.HasRows)
{
while (dataReader.Read())
{
Sales global = new Sales()
{
Order_ID=Convert.ToInt32(dataReader["Order_ID"]),
Region=Convert.ToString(dataReader["Region"]),
Country = Convert.ToString(dataReader["Country"]),
Total_Cost = (decimal)Convert.ToDouble(dataReader["Total_Cost"]),
Total_Revenue = Convert.ToString(dataReader["Total_Revenue"]),
Item_Type = Convert.ToString(dataReader["Item_Type"])
};
salesReports.Add(global);
}
}
}
}
connection.Close();
}
return salesReports;
}
static async Task BulkInsertData(List<Sales> ListofData, ElasticClient Eclient)
{
try
{
var splitTheLargeList = ChunkBy(ListofData);
var test = splitTheLargeList.LastOrDefault();
foreach (var item in splitTheLargeList)
{
var bulkResponse = await Eclient.BulkAsync(b => b
.Index(INDEX_NAME)
// .Type(INDEX_TYPE)
.IndexMany(item));
if (bulkResponse.ApiCall.Success && bulkResponse.IsValid)
{
// success fully inserted...
}
else
{
// fail log the exception further use
var exception = bulkResponse.OriginalException.ToString();
var debugException = bulkResponse.DebugInformation.ToString();
}
}
}
catch (Exception ex)
{
Console.WriteLine(ex.InnerException.ToString());
}
}
public static List<List<T>> ChunkBy<T>(List<T> source, int chunkSize = 1000)
{
return source
.Select((x, i) => new { Index = i, Value = x })
.GroupBy(x => x.Index / chunkSize)
.Select(x => x.Select(v => v.Value).ToList())
.ToList();
}
public static IEnumerable<List<T>> SplitList<T>(List<T> ListofData, int listSize = 1000)
{
for (int i = 0; i < ListofData.Count; i += listSize)
{
yield return ListofData.GetRange(i, Math.Min(listSize, ListofData.Count - i));
}
}
I need to refactor the below code so that the deleted_at logic will be outside of the foreach (var app in data) loop. I tried to create the list guids and then add guids to it but its not working because model.resources is inside the loop and it is still deleting all the apps.
I need deleted_at logic outside because I'm trying to delete all apps which are in the database but are not in new data that I'm receiving from API.
If you have a better approach on my code I would love to see that, Thank you.
public async Task GetBuilds()
{
var data = new List<GetBuildTempClass>();
var guids = new List<GetBuildTempClass>();
using (var scope = _scopeFactory.CreateScope())
{
var _DBcontext = scope.ServiceProvider.GetRequiredService<PCFStatusContexts>();
foreach (var app in data)
{
var request = new HttpRequestMessage(HttpMethod.Get,
"apps/" + app.AppGuid + "/builds?per_page=200&order_by=updated_at");
var response = await _client_SB.SendAsync(request);
var json = await response.Content.ReadAsStringAsync();
BuildsClass.BuildsRootObject model =
JsonConvert.DeserializeObject<BuildsClass.BuildsRootObject>(json);
foreach (var item in model.resources)
{
var x = _DBcontext.Builds.FirstOrDefault(o =>
o.Guid == Guid.Parse(item.guid));
if (x == null)
{
_DBcontext.Builds.Add(new Builds
{
Guid = Guid.Parse(item.guid),
State = item.state,
CreatedAt = item.created_at,
UpdatedAt = item.updated_at,
Error = item.error,
CreatedByGuid = Guid.Parse(item.created_by.guid),
CreatedByName = item.created_by.name,
CreatedByEmail = item.created_by.email,
AppGuid = app.AppGuid,
AppName = app.AppName,
Foundation = 2,
Timestamp = DateTime.Now
});
}
else if (x.UpdatedAt != item.updated_at)
{
x.State = item.state;
x.UpdatedAt = item.updated_at;
x.Timestamp = DateTime.Now;
}
var sqlresult = new GetBuildTempClass
{
AppGuid = Guid.Parse(item.guid)
};
guids.Add(sqlresult);
}
//var guids = model.resources.Select(r => Guid.Parse(r.guid));
var builds = _DBcontext.Builds.Where(o =>
guids.Contains(o.Guid) == false &&
o.Foundation == 2 && o.DeletedAt == null);
foreach (var build_item in builds)
{
build_item.DeletedAt = DateTime.Now;
}
}
await _DBcontext.SaveChangesAsync();
}
}
I got it working I added var guids = new List < Guid > (); list to store data,
added guids.Add(Guid.Parse(item.guid)); to populate the list and finally wrote var builds = _DBcontext.Builds.Where(o = >guids.Contains(o.Guid) == false && o.Foundation == 2 && o.DeletedAt == null); outside the loop.
If anyone has a better suggestion please add a new answer.
public async Task GetBuilds() {
var data = new List < GetBuildTempClass > ();
var guids = new List < Guid > ();
using(var scope = _scopeFactory.CreateScope()) {
var _DBcontext = scope.ServiceProvider.GetRequiredService < PCFStatusContexts > ();
foreach(var app in data) {
var request = new HttpRequestMessage(HttpMethod.Get, "apps/" + app.AppGuid + "/builds?per_page=200&order_by=updated_at");
var response = await _client_SB.SendAsync(request);
var json = await response.Content.ReadAsStringAsync();
BuildsClass.BuildsRootObject model = JsonConvert.DeserializeObject < BuildsClass.BuildsRootObject > (json);
foreach(var item in model.resources) {
var x = _DBcontext.Builds.FirstOrDefault(o = >o.Guid == Guid.Parse(item.guid));
if (x == null) {
_DBcontext.Builds.Add(new Builds {
Guid = Guid.Parse(item.guid),
State = item.state,
CreatedAt = item.created_at,
UpdatedAt = item.updated_at,
Error = item.error,
CreatedByGuid = Guid.Parse(item.created_by.guid),
CreatedByName = item.created_by.name,
CreatedByEmail = item.created_by.email,
AppGuid = app.AppGuid,
AppName = app.AppName,
Foundation = 2,
Timestamp = DateTime.Now
});
}
else if (x.UpdatedAt != item.updated_at) {
x.State = item.state;
x.UpdatedAt = item.updated_at;
x.Timestamp = DateTime.Now;
}
guids.Add(Guid.Parse(item.guid));
}
}
var builds = _DBcontext.Builds.Where(o = >guids.Contains(o.Guid) == false && o.Foundation == 2 && o.DeletedAt == null);
foreach(var build_item in builds) {
build_item.DeletedAt = DateTime.Now;
}
await _DBcontext.SaveChangesAsync();
}
}
I have created windows service scheduler which run after 24 hr, When i stared the service first time it enters data in database it run perfectly fine.
But in second run service get stops , however i want service run continually.
I have created scheduler for it as well to run it every minute , but data only inserts when time match .
public void StartService(object e)
{
try {
//ExtractorEventLog("0", null);
List<PracticeInformation> getServiceTime = GetpracticeInformationList();
var gettime = (from t in getServiceTime
select new PracticeInformation
{
extractor_start_time = t.extractor_start_time
}).OrderByDescending(x => x.id).ToList().FirstOrDefault();
DateTime SetdateTime = DateTime.Parse(ConfigurationManager.AppSettings["ScheduledTime"]);
if (gettime != null)
{
SetdateTime = DateTime.Parse(gettime.extractor_start_time.ToString());
}
try
{
if (SetdateTime.ToString("HH:mm") == DateTime.Now.ToString("HH:mm"))
{
ExtractorEventLog(SetdateTime.ToString("HH:mm") + " " + DateTime.Now.ToString("HH:mm"), null);
string strFile = #"C:\requiredFilesForDDP\Temp" + DateTime.Now.Ticks.ToString() + ".csv";
if (connm == "")
{
GetConnectionString();
}
using (OdbcConnection conn = new OdbcConnection(connm))
{
//ExtractorEventLog(connm, null);
conn.Open();
#region Get Hospital data
var id = 0;
try
{
List<PracticeInformation> practiceList = getServiceTime;
var practiceName = practiceList[0].name;
id = practiceList[0].id;
List<PracticeInformationModel> HospitalAll = new List<PracticeInformationModel>();
using (OdbcCommand com = new OdbcCommand(Common.cmdGetHospital, conn))
{
using (OdbcDataReader readerHospital = com.ExecuteReader())
{
HospitalAll = DataReaderMapToList<PracticeInformationModel>(readerHospital);
List<PracticeInformationModel> objPracticeInformationmodel = (from pi in HospitalAll
select new PracticeInformationModel
{
practice_name = pi.practice_name,
phone = pi.phone
}
).ToList();
var getPracticeName = practiceList.Select(x => new { x.name }).ToList();
List<PracticeInformationModel> objpracticelist = objPracticeInformationmodel.Where(x => !getPracticeName.Any(y => y.name == x.practice_name)).ToList();
DataTable PracticeInformationDT = ToDataTable(objpracticelist); // To save PracticeInformation table
using (MySqlConnection sqConn = new MySqlConnection(Common.connectionStringDB))
{
using (SqlBulkCopy blkcopyPi = new SqlBulkCopy(sqConn.ConnectionString, SqlBulkCopyOptions.KeepNulls))
{
CreateCSVfile(PracticeInformationDT, strFile);
if (!File.Exists(strFile))
{
FileStream fs = new FileStream(strFile, FileMode.Create, FileAccess.Write);
fs.Close();
fs.Dispose();
}
if (sqConn.State == ConnectionState.Open)
{
sqConn.Close();
}
sqConn.Open();
MySqlBulkLoader bcp1 = new MySqlBulkLoader(sqConn);
bcp1.TableName = "PracticeInformation"; //Create PracticeInformation table into MYSQL database...
bcp1.FieldTerminator = ",";
bcp1.LineTerminator = "\r\n";
bcp1.FileName = strFile;
bcp1.NumberOfLinesToSkip = 0;
bcp1.Load();
try
{
File.Delete(strFile);
}
catch (Exception ex)
{
ExceptionLog(ex.Message, ex.GetType().Name.ToString(), ex.StackTrace, id);
}
}
readerHospital.Close();
}
}
}
#endregion
#region Get Provider Data From Dentrix
List<Provider> ProviderAll = new List<Provider>();
using (OdbcCommand com = new OdbcCommand(Common.cmdGetProvider, conn))
{
using (OdbcDataReader readerProvider = com.ExecuteReader())
{
ProviderAll = DataReaderMapToList<Provider>(readerProvider);
List<Provider> objProvider = (from pro in ProviderAll
select new Provider
{
provider_id = pro.provider_id,
first_name = pro.first_name,
last_name = pro.last_name,
work_phone = pro.work_phone
}).ToList();
}
}
#endregion
#region Add Appointment
using (OdbcCommand com = new OdbcCommand(Common.cmdText, conn))
{
using (OdbcDataReader reader = com.ExecuteReader())
{
List<AllModalFromApplointment> AppointmentListAll = new List<AllModalFromApplointment>();
AppointmentListAll = DataReaderMapToList<AllModalFromApplointment>(reader);
List<Alert> patientalert_id = new List<Alert>();
#region Get alert data
MySqlConnection connection = null;
List<string> target = new List<string>();
EventLog(Common.Get_Pre_Med_Alerts);
ExtractorEventLog(Common.Get_Pre_Med_Alerts, id.ToString());
int add = 0;
int alert_count = 0;
//string connectionStringDB = "server=192.168.0.1;user=dentrix;database=db_dentrix;password=dentrix";
connection = new MySqlConnection(Common.connectionStringDB);
foreach (var i in AppointmentListAll.Select(x => x.patient_id).ToList())
{
if (i != 0)
{
OdbcCommand ODBCCommand = new OdbcCommand("{call admin.sp_getpatientmedalerts (?)}", conn);
ODBCCommand.CommandType = CommandType.StoredProcedure;
ODBCCommand.Parameters.AddWithValue("#patient_guid", i); //4898-11738
ODBCCommand.ExecuteNonQuery();
OdbcDataReader a = ODBCCommand.ExecuteReader();
DataTable getAlert = new DataTable();
if (a.HasRows)
{
target.Add(i.ToString());
alert_count = add + 1;
getAlert.Load(a);
a.Close();
}
}
}
ExtractorEventLog(target.Count() + " " + Common.No_Of_Pre_Med_Alert, null);
#endregion
#region
List<AppointmentDetail> objAppointment = (from app in AppointmentListAll
join pro in ProviderAll on app.provider_id equals pro.provider_id
select new AppointmentDetail
{
patient_id = Convert.ToInt64(app.patient_guid),
appointment_id = Convert.ToInt64(app.appointment_id),
appointment_timestamp = Convert.ToString(app.appointment_date.AddHours(app.start_hour).AddMinutes(app.start_minute)),
patient_first_name = app.patient_name.Split(',')[0],
patient_last_name = app.patient_name.Split(',')[1],
patient_phone_number = app.patient_phone,
red_cross_flag = target.Any(x => x.ToString().Contains(app.patient_id.ToString())) ? "Y" : "N",
dr_name = pro.first_name.Trim() + ' ' + pro.last_name.Trim(),
dr_phone = HospitalAll[0].phone,
hospital_name = HospitalAll[0].practice_name,
appointment_type = null,
fees = 0,
status = null
//createdDate = DateTime.UtcNow
}).ToList();
//Ristrict duplicate records
ExtractorEventLog(Common.Data_Inserting, id.ToString());
List<AppointmentDetail> usethe = GetAppointmentList();
var query = usethe.Select(x => new { x.appointment_id }).ToList();
List<AppointmentDetail> xyz = objAppointment.Where(x => !query.Any(y => y.appointment_id == x.appointment_id)).ToList();
if (xyz != null && xyz.Count != 0)
{
var from = xyz.First().appointment_timestamp;
var to = xyz.Last().appointment_timestamp;
ExtractorEventLog(Common.Get_Appointment_Date + from + "and" + to, id.ToString());
}
else
{
EventLog("No appointmet present");
}
int appointment_count = xyz.Count();
DataTable appointmentDT = ToDataTable(xyz); // To save appointment table
using (MySqlConnection sqConn = new MySqlConnection(Common.connectionStringDB))
{
using (SqlBulkCopy blkcopy = new SqlBulkCopy(sqConn.ConnectionString, SqlBulkCopyOptions.KeepNulls))
{
CreateCSVfile(appointmentDT, strFile);
if (!File.Exists(strFile))
{
FileStream fs = new FileStream(strFile, FileMode.Create, FileAccess.Write);
fs.Close();
fs.Dispose();
}
if (sqConn.State == ConnectionState.Open)
{
sqConn.Close();
}
sqConn.Open();
MySqlBulkLoader bcp1 = new MySqlBulkLoader(sqConn);
bcp1.TableName = "appointment";
//bcp1.TableName = "Appointment"; //Create Appointment table into MYSQL database.
bcp1.FieldTerminator = ",";
bcp1.LineTerminator = "\r\n";
bcp1.FileName = strFile;
bcp1.NumberOfLinesToSkip = 0;
bcp1.Load();
try
{
File.Delete(strFile);
}
catch (Exception ex)
{
ExtractorEventLog("appointment-ex", null);
ExceptionLog(ex.Message, ex.GetType().Name.ToString(), ex.StackTrace, id);
}
ExtractorEventLog(appointment_count + " " + Common.No_Of_Appointment_Retrive, id.ToString());
}
ExtractorEventLog(Common.Done, id.ToString());
this.ScheduleService();
}
#endregion
}
}
}
catch (Exception ex)
{
ExtractorEventLog("a-ex", null);
ExceptionLog(ex.Message, ex.GetType().Name.ToString(), ex.StackTrace, id);
}
}
}
else{
this.ScheduleService();
}
}
catch (Exception ex)
{
ExtractorEventLog(ex.Message +"b-ex", null);
//ExceptionLog(ex.Message, ex.GetType().Name.ToString(), ex.StackTrace, id);
}
//EventLog("this ScheduleService()");
// this.ScheduleService();m
}
catch(Exception ex)
{
ExtractorEventLog("zz-ex", null);
}
}
I have created database in SqLite in my windows app, now I want to fetch the data from my database. This is the code how I inserted the data in databse.
var dbpath = Path.Combine(Windows.Storage.ApplicationData.Current.LocalFolder.Path, "scrapbook.sqlite");
using (var db = new SQLite.SQLiteConnection(dbpath))
{
// Create the tables if they don't exist
var bg = listAlbumContainer[0] as AlbumContainer;
var albumbg = bg.BackgroundImageName;
var sk = listAlbumContainer[0].listAlbumContainer;
var _audio = listAlbumContainer[0].listAlbumContainer5;
var _video = listAlbumContainer[0].listAlbumContainer3;
var wa = listAlbumContainer[0].listAlbumContainer4;
var ci = listAlbumContainer[0].listAlbumContainer2;
var gi = listAlbumContainer[0].listAlbumContainer1;
string snodesticker = Serializeanddeserializwhelper.Serialize(sk);
string nodegi = Serializeanddeserializwhelper.Serialize(gi);
string nodeci = Serializeanddeserializwhelper.Serialize(ci);
string nodewa = Serializeanddeserializwhelper.Serialize(wa);
string nodevideo = Serializeanddeserializwhelper.Serialize(_video);
string nodeaudio = Serializeanddeserializwhelper.Serialize(_audio);
var TittledataInsertCheck = db.Table<Title>().Where(x => x.ALBUM_TITLE.Contains(nametxt.Text)).FirstOrDefault();
if (TittledataInsertCheck == null)
{
db.Insert(new Title()
{
ALBUM_TITLE = nametxt.Text
});
var TittledataInsert = db.Table<Title>().Where(x => x.ALBUM_TITLE.Contains(nametxt.Text)).FirstOrDefault();
if (TittledataInsert != null)
{
db.Insert(new PAGE()
{
PAGE_BACKGROUND = albumbg,
TITLE_ID = TittledataInsert.ID
});
}
}
else
{
await new MessageDialog("Tittle Already Found Please change tittle").ShowAsync();
return;
}
var PagedataInsert = db.Table<PAGE>().Where(x => x.PAGE_BACKGROUND.Contains(albumbg)).FirstOrDefault();
if (PagedataInsert != null)
{
db.Insert(new CONTENT
{
PAGE_ID = PagedataInsert.ID,
STICKERS = snodesticker,
AUDIO = nodeaudio,
VIDEO = nodevideo,
GALLERY_IMAGES = nodegi,
CAMERA_IMAGES = nodeci,
WOARD_ART = nodewa
});
}
db.Commit();
db.Dispose();
db.Close();
var line = new MessageDialog("Records Inserted");
await line.ShowAsync();
}
Is this the right way to insert the data?
I have a canvas on which I set the background image. And on this background some images, video and audio.
I want to insert an image into the SQL Server database from my Windows Forms application.
This question looks like it was trying to ask what I wanted to find out, but it was closed:
Insert image into SQL Server
Here is the code I used to do that.
Modify this code as needed for the table that you are going to use by viewing the design of your database in Microsoft Management Studio:
public static void InsertImage(int inventoryID, int businessID, FileInfo file, string sqlConnection)
{
var list = new List<byte>();
using (var stream = file.Open(FileMode.Open))
{
var data = new byte[stream.Length];
stream.Read(data, 0, data.Length);
list.AddRange(data);
}
var bmp = System.Drawing.Image.FromFile(file.FullName, true);
using (var conn = new SqlConnection(sqlConnection))
{
conn.Open();
var imageId = -1;
var sqlSelect = "SELECT [ImageId] FROM [dbo].[ImageTable] WHERE [InventoryId]=#InventoryId;";
using (var cmd = new SqlCommand(sqlSelect, conn))
{
cmd.Parameters.Add("#InventoryId", System.Data.SqlDbType.Int).Value = inventoryID;
using (var r = cmd.ExecuteReader())
{
if (r.Read())
{
var o = r["ImageId"];
if ((o != null) && (o != DBNull.Value))
{
imageId = (int)o;
}
}
}
}
if (imageId == -1)
{
var sqlCmd = "INSERT INTO [dbo].[ImageTable] " +
"([InventoryId], [ImageFileName], [ImageSize], [ImageWidth], [ImageHeight], [ImageBytes]) " +
"VALUES " +
"(#InventoryId, #ImageFileName, #ImageSize, #ImageWidth, #ImageHeight, #ImageBytes); ";
using (var cmd = new SqlCommand(sqlCmd, conn))
{
cmd.Parameters.Add("#InventoryId", System.Data.SqlDbType.Int).Value = inventoryID;
cmd.Parameters.Add("#ImageFileName", System.Data.SqlDbType.VarChar, 255).Value = file.Name;
cmd.Parameters.Add("#ImageSize", System.Data.SqlDbType.Int).Value = list.Count;
cmd.Parameters.Add("#ImageWidth", System.Data.SqlDbType.SmallInt).Value = bmp.Width;
cmd.Parameters.Add("#ImageHeight", System.Data.SqlDbType.SmallInt).Value = bmp.Height;
cmd.Parameters.Add("#ImageBytes", System.Data.SqlDbType.VarBinary, -1).Value = list.ToArray();
cmd.ExecuteNonQuery();
}
}
}
}
To run/test the code, I created this helper method:
public static string[] GetImages(string fullFolderPath, string searchPattern)
{
var list = new List<String>();
if (Directory.Exists(fullFolderPath))
{
if (String.IsNullOrEmpty(searchPattern))
{
searchPattern = "*.jpg";
}
var dir = new DirectoryInfo(fullFolderPath);
var files = dir.GetFiles(searchPattern);
for (int i = 0; i < files.Length; i++)
{
InsertImage(i + 1, 1, files[i], _sqlConnection);
list.Add(files[i].FullName);
}
}
return list.ToArray();
}
Now, running it from my Console Application is a simple, single call:
static void Main(string[] args)
{
var list = GetImages(#"C:\inetpub\wwwroot\Ads", "*.jpg");
}