How to convert Handsontable data back into C# class - c#

I am using an ajax post to send my column headers and data from a handsontable back to an ashx handler.
$.ajax({
type: 'POST',
url: "Scripts/SaveExcelData.ashx",
contentType: "application/json; charset=utf-8",
data: JSON.stringify({"columns": hot.getColHeader(), "rows": hot.getData()}),
success: function (data) {}
});
Currently I am using to following to deserialize the request, but have not been able to successfully come up with anything that converts the rows into an array of DataBaseRow class objects.
var jsonString = String.Empty;
context.Request.InputStream.Position = 0;
using (var inputStream = new StreamReader(context.Request.InputStream))
{
jsonString = inputStream.ReadToEnd();
var results = JsonConvert.DeserializeObject<dynamic>(jsonString);
var columns = results.columns;
var rows = results.rows;
//use columns here to convert rows into DataBaseRow class
}
columns looks like: {["Col1","Col2","Col3"]}
rows looks like: {[["Val1","Val2","Val3"],["Val1","Val2","Val3"],["Val1","Val2","Val3"]]}
How can I do this?
UPDATE
Instead of trying to convert the dynamic class into the DataBaseRow class, I found I could actually just manually loop through the array values and write them into new instances of the DataBaseRow class.
using (DBEntities edmx = new DBEntities())
{
foreach (var row in rows)
{
DataBaseRow dbr = new DataBaseRow();
edmx.DataBaseRow.Add(dbr);
dbr.LoadedTime = DateTime.Now;
for (int i = 0; i < row.Count; i++)
{
string colval = row[i].ToString();
string colname = columns[i].ToString();
switch (colname)
{
case "Col1":
dbr.DBCol1 = colval;
break;
case "Col2":
dbr.DBCol2 = colval;
break;
case "Col3":
dbr.DBCol3 = colval;
break;
}
}
}
edmx.SaveChanges();
}
This works, but is very slow (see comment for timings). Is there a faster/better way to process this data? (if it matters - I actually have 14 columns that I'm mapping in the switch)

So the technical answer to my question can be found in the Update I added (just reference the dynamic objects as arrays, don't try to convert them).
However, seems like Entity Framework is very poor at handling saving large datasets. This can be sped up by grouping the saves into chunks and recreating the context for every chunck. https://stackoverflow.com/a/5942176/266592
I ended up rewriting this to insert the values into a DataTable and then using SqlBulkCopy to save the records to the database.
var jsonString = String.Empty;
context.Request.InputStream.Position = 0;
using (var inputStream = new StreamReader(context.Request.InputStream))
{
jsonString = inputStream.ReadToEnd();
var results = JsonConvert.DeserializeObject<dynamic>(jsonString);
var columns = results.columns;
var rows = results.rows;
var dt = new DataTable();
for (int i = 0; i < columns.Count; i++)
{
dt.Columns.Add(columns[i].ToString());
}
foreach (var row in rows)
{
var datarow = dt.NewRow();
for (int i = 0; i < row.Count; i++)
{
datarow[i] = row[i];
}
dt.Rows.Add(datarow);
}
using (var connection = new SqlConnection(ConnectionString))
{
SqlTransaction transaction = null;
connection.Open();
try
{
transaction = connection.BeginTransaction();
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.DestinationTableName = "TABLENAME";
sqlBulkCopy.BatchSize = 100000;
sqlBulkCopy.BulkCopyTimeout = 0;
foreach (DataColumn col in dt.Columns)
{
sqlBulkCopy.ColumnMappings.Add(col.ColumnName, col.ColumnName);
}
sqlBulkCopy.WriteToServer(dt);
}
transaction.Commit();
}
catch (Exception)
{
transaction.Rollback();
}
}
}

Related

csvHelper Put values from different lines into one filed in Database

I have a .csv file structured like so, first row is the header column, for each SomeID I need to add the NetCharges together(or substract if the code calls for it) and put each item into its own column by the SomeCode column.
Heres the file I receive;
SomeID,OrderNumber,Code,NetCharge,Total
23473,30388,LI 126.0000, 132.00
96021, 000111, LI, 130.00, 126.00
23473,30388,FU 6.0000, 132.00
4571A,10452,LI,4100.0000, 4325.0000
4571A,10452,FU,150.00,4325.0000
4571A,10452,DT,75.00,4325.0000
I need to insert the data to my sql table which is structured like this. This is what I'm aiming for:
ID OrderNumber LICode LICodeValue FUCode FUCodeValue DTCode, DTCodeValue, total
23473 30388n LI 126.000 FU 6.0000 NULL NULL 132.0000
4571A 10452 LI 4100.0000 FU 150.0000 DT 75.00 4325.0000
My SomeID will not always be grouped together like the 4571A id is.I basically need to iterate over this file and create one record for each SomeID. I cannot seem to find a way with csvHelper. I'm using C# and csvHelper. I have trid this so far but I cannot get back to the SomeId after passing on to the nexr one:
using (var reader = new StreamReader( "C:\testFiles\some.csv" ))
using (var csv = new CsvReader( reader, CultureInfo.InvariantCulture ))
{
var badRecords = new List<string>();
var isRecordBad = false;
csv.Configuration.HasHeaderRecord = true;
csv.Configuration.HeaderValidated = null;
csv.Configuration.IgnoreBlankLines = true;
csv.Configuration.Delimiter = ",";
csv.Configuration.BadDataFound = context =>
{
isRecordBad = true;
badRecords.Add( context.RawRecord );
};
csv.Configuration.MissingFieldFound = ( s, i, context ) =>
{
isRecordBad = true;
badRecords.Add( context.RawRecord );
};
List<DataFile> dataFile = csv.GetRecords<DataFile>().ToList();
//initialize variable
string lastSomeId = "";
if (!isRecordBad)
{
foreach (var item in dataFile)
{
// check if its same record
if (lastSomeId != item.SomeID)
{
MyClass someClass = new MyClass();
lastSomeId = item.SomeID;
//decimal? LI = 0;//was going to use these as vars for calculations not sure I need them???
//decimal? DSC = 0;
//decimal? FU = 0;
someClass.Id = lastSomeId;
someClass.OrdNum = item.OrderNumber;
if (item.Code == "LI")
{
someClass.LICode = item.Code;
someClass.LICodeValue = item.NetCharge;
}
if (item.Code == "DT")
{
someClass.DTCode = item.Code;
someClass.DTCodeValue = item.NetCharge
}
if (item.Code == "FU")
{
someClass.FUCode = item.Code;
someClass.FUCodeValue = item.NetCharge;
}
someClass.Total = (someClass.LICodeValue + someClass.FUCodeValue);
//check for other values to calculate
//insert record to DB
}
else
{
//Insert into db after maipulation of values
}
}
}
isRecordBad = false;
}//END Using
Any clues would be greatly appreciated. Thank you in advance.

How to improve performance of CSV upload via datatable

I have a working solution for uploading a CSV file. Currently, I use the IFormCollection for a user to upload multiple CSV files from a view.
The CSV files are saved as a temp file as follows:
List<string> fileLocations = new List<string>();
foreach (var formFile in files)
{
filePath = Path.GetTempFileName();
if (formFile.Length > 0)
{
using (var stream = new FileStream(filePath, FileMode.Create))
{
await formFile.CopyToAsync(stream);
}
}
fileLocations.Add(filePath);
}
I send the list of file locations to another method (just below). I loop through the file locations and stream the data from the temp files, I then use a data table and SqlBulkCopyto insert the data. I currently upload between 50 and 200 files at a time and each file is around 330KB. To insert a hundred, it takes around 6 minutes, which is around 30-35MB.
public void SplitCsvData(string fileLocation, Guid uid)
{
MetaDataModel MetaDatas;
List<RawDataModel> RawDatas;
var reader = new StreamReader(File.OpenRead(fileLocation));
List<string> listRows = new List<string>();
while (!reader.EndOfStream)
{
listRows.Add(reader.ReadLine());
}
var metaData = new List<string>();
var rawData = new List<string>();
foreach (var row in listRows)
{
var rowName = row.Split(',')[0];
bool parsed = int.TryParse(rowName, out int result);
if (parsed == false)
{
metaData.Add(row);
}
else
{
rawData.Add(row);
}
}
//Assigns the vertical header name and value to the object by splitting string
RawDatas = GetRawData.SplitRawData(rawData);
SaveRawData(RawDatas);
MetaDatas = GetMetaData.SplitRawData(rawData);
SaveRawData(RawDatas);
}
This code then passes the object to the to create the datatable and insert the data.
private DataTable CreateRawDataTable
{
get
{
var dt = new DataTable();
dt.Columns.Add("Id", typeof(int));
dt.Columns.Add("SerialNumber", typeof(string));
dt.Columns.Add("ReadingNumber", typeof(int));
dt.Columns.Add("ReadingDate", typeof(string));
dt.Columns.Add("ReadingTime", typeof(string));
dt.Columns.Add("RunTime", typeof(string));
dt.Columns.Add("Temperature", typeof(double));
dt.Columns.Add("ProjectGuid", typeof(Guid));
dt.Columns.Add("CombineDateTime", typeof(string));
return dt;
}
}
public void SaveRawData(List<RawDataModel> data)
{
DataTable dt = CreateRawDataTable;
var count = data.Count;
for (var i = 1; i < count; i++)
{
DataRow row = dt.NewRow();
row["Id"] = data[i].Id;
row["ProjectGuid"] = data[i].ProjectGuid;
row["SerialNumber"] = data[i].SerialNumber;
row["ReadingNumber"] = data[i].ReadingNumber;
row["ReadingDate"] = data[i].ReadingDate;
row["ReadingTime"] = data[i].ReadingTime;
row["CombineDateTime"] = data[i].CombineDateTime;
row["RunTime"] = data[i].RunTime;
row["Temperature"] = data[i].Temperature;
dt.Rows.Add(row);
}
using (var conn = new SqlConnection(connectionString))
{
conn.Open();
using (SqlTransaction tr = conn.BeginTransaction())
{
using (var sqlBulk = new SqlBulkCopy(conn, SqlBulkCopyOptions.Default, tr))
{
sqlBulk.BatchSize = 1000;
sqlBulk.DestinationTableName = "RawData";
sqlBulk.WriteToServer(dt);
}
tr.Commit();
}
}
}
Is there another way to do this or a better way to improve performance so that the time to upload is reduced as it can take a long time and I am seeing an ever increasing use of memory to around 500MB.
TIA
You can improve performance by removing the DataTable and reading from the input stream directly.
SqlBulkCopy has a WriteToServer overload that accepts an IDataReader instead of an entire DataTable.
CsvHelper can CSV files using a StreamReader as an input. It provides CsvDataReader as an IDataReader implementation on top of the CSV data. This allows reading directly from the input stream and writing to SqlBulkCopy.
The following method will read from an IFormFile, parse the stream using CsvHelper and use the CSV's fields to configure a SqlBulkCopy instance :
public async Task ToTable(IFormFile file, string table)
{
using (var stream = file.OpenReadStream())
using (var tx = new StreamReader(stream))
using (var reader = new CsvReader(tx))
using (var rd = new CsvDataReader(reader))
{
var headers = reader.Context.HeaderRecord;
var bcp = new SqlBulkCopy(_connection)
{
DestinationTableName = table
};
//Assume the file headers and table fields have the same names
foreach(var header in headers)
{
bcp.ColumnMappings.Add(header, header);
}
await bcp.WriteToServerAsync(rd);
}
}
This way nothing is ever written to a temp table or cached in memory. The uploaded files are parsed and written to the database directly.
In addition to #Panagiotis's answer, why don't you interleave your file processing with the file upload? Wrap up your file processing logic in an async method and change the loop to a Parallel.Foreach and process each file as it arrives instead of waiting for all of them?
private static readonly object listLock = new Object(); // only once at class level
List<string> fileLocations = new List<string>();
Parallel.ForEach(files, (formFile) =>
{
filePath = Path.GetTempFileName();
if (formFile.Length > 0)
{
using (var stream = new FileStream(filePath, FileMode.Create))
{
await formFile.CopyToAsync(stream);
}
await ProcessFileInToDbAsync(filePath);
}
// Added lock for thread safety of the List
lock (listLock)
{
fileLocations.Add(filePath);
}
});
Thanks to #Panagiotis Kanavos, I was able to work out what to do. Firstly, the way I was calling the methods, was leaving them in memory. The CSV file I have is in two parts, vertical metadata and then the usual horizontal information. So I needed to split them into two. Saving them as tmp files was also causing an overhead. It has gone from taking 5-6 minutes to now taking a minute, which for a 100 files containing 8,500 rows isn't bad I suppose.
Calling the method:
public async Task<IActionResult> UploadCsvFiles(ICollection<IFormFile> files, IFormCollection fc)
{
foreach (var f in files)
{
var getData = new GetData(_configuration);
await getData.SplitCsvData(f, uid);
}
return whatever;
}
This is the method doing the splitting:
public async Task SplitCsvData(IFormFile file, string uid)
{
var data = string.Empty;
var m = new List<string>();
var r = new List<string>();
var records = new List<string>();
using (var stream = file.OpenReadStream())
using (var reader = new StreamReader(stream))
{
while (!reader.EndOfStream)
{
var line = reader.ReadLine();
var header = line.Split(',')[0].ToString();
bool parsed = int.TryParse(header, out int result);
if (!parsed)
{
m.Add(line);
}
else
{
r.Add(line);
}
}
}
//TODO: Validation
//This splits the list into the Meta data model. This is just a single object, with static fields.
var metaData = SplitCsvMetaData.SplitMetaData(m, uid);
DataTable dtm = CreateMetaData(metaData);
var serialNumber = metaData.LoggerId;
await SaveMetaData("MetaData", dtm);
//
var lrd = new List<RawDataModel>();
foreach (string row in r)
{
lrd.Add(new RawDataModel
{
Id = 0,
SerialNumber = serialNumber,
ReadingNumber = Convert.ToInt32(row.Split(',')[0]),
ReadingDate = Convert.ToDateTime(row.Split(',')[1]).ToString("yyyy-MM-dd"),
ReadingTime = Convert.ToDateTime(row.Split(',')[2]).ToString("HH:mm:ss"),
RunTime = row.Split(',')[3].ToString(),
Temperature = Convert.ToDouble(row.Split(',')[4]),
ProjectGuid = uid.ToString(),
CombineDateTime = Convert.ToDateTime(row.Split(',')[1] + " " + row.Split(',')[2]).ToString("yyyy-MM-dd HH:mm:ss")
});
}
await SaveRawData("RawData", lrd);
}
I then use a data table for the metadata (which takes 20 seconds for a 100 files) as I map the field names to the columns.
public async Task SaveMetaData(string table, DataTable dt)
{
using (SqlBulkCopy sqlBulk = new SqlBulkCopy(_configuration.GetConnectionString("DefaultConnection"), SqlBulkCopyOptions.Default))
{
sqlBulk.DestinationTableName = table;
await sqlBulk.WriteToServerAsync(dt);
}
}
I then use FastMember for the large data parts for the raw data, which is more like a traditional CSV.
public async Task SaveRawData(string table, IEnumerable<LogTagRawDataModel> lrd)
{
using (SqlBulkCopy sqlBulk = new SqlBulkCopy(_configuration.GetConnectionString("DefaultConnection"), SqlBulkCopyOptions.Default))
using (var reader = ObjectReader.Create(lrd, "Id","SerialNumber", "ReadingNumber", "ReadingDate", "ReadingTime", "RunTime", "Temperature", "ProjectGuid", "CombineDateTime"))
{
sqlBulk.DestinationTableName = table;
await sqlBulk.WriteToServerAsync(reader);
}
}
I am sure this can be improved on, but for now, this works really well.

C# Reading CSV file with SQL conditions

I am using CsvHelper lib to read CSV file and I can successfully read the file with the lib. However I cannot use SQL condition to filter values. How can I do that without using SQL Server. I am really stuck on it.
It was very easy with Pandas and Pandasql libs in Python but it is being too hard in C#..
My Code:
public static void Main(string[] args)
{
var fileInfo = new FileInfo(#"filePath");
using (TextReader reader = fileInfo.OpenText())
using (var csvReader = new CsvReader(reader))
{
csvReader.Configuration.Delimiter = ",";
csvReader.Configuration.HasHeaderRecord = false;
csvReader.Configuration.IgnoreQuotes = true;
csvReader.Configuration.TrimFields = true;
csvReader.Configuration.WillThrowOnMissingField = false;
while (csvReader.Read())
{
var myStrinVar = csvReader.GetField<string>(0);
Console.Write(myStrinVar); //SELECT * FROM table...
}
}
}
I would suggest using LINQ to filter your results.
https://msdn.microsoft.com/en-us/library/bb397906.aspx
Say you have some class MyClass that you can serialize the lines in your file into.
For example:
public class MyClass
{
public int ID { get; set; }
}
var records = csv.GetRecords<MyClass>().ToList();
var filtered = records.Where(r => r.ID >= 10);
That example is a bit contrived but you can use any boolean expression you like in the where clause.
I know this is too late for OP, but the issue with the accepted answer is that you have to read in the entire result set to memory which may not be tenable for large files. Also, if you can extend this code below to get the top N rows without having to read the entire CSV if you find matches early in the file.
public static void Main(string[] args)
{
var fileInfo = new FileInfo(#"filePath");
var where = ""; //Code to set up where clause part of query goes here
using (TextReader reader = fileInfo.OpenText())
using (var csvReader = new CsvReader(reader))
{
csvReader.Configuration.Delimiter = ",";
csvReader.Configuration.HasHeaderRecord = false;
csvReader.Configuration.IgnoreQuotes = true;
csvReader.Configuration.TrimFields = true;
csvReader.Configuration.WillThrowOnMissingField = false;
DataTable dt = null;
while (csvReader.Read())
{
//Use the first row to initialize the columns.
if (dt == null)
{
dt = new DataTable();
for (var i = 0; i < csvReader.FieldCount; i++)
{
var fieldType = csvReader.GetFieldType(i);
DataColumn dc;
if (fieldType.IsNullableType())
{
dc = new DataColumn(csvReader.GetName(i), Nullable.GetUnderlyingType(fieldType));
dc.AllowDBNull = true;
}
else
dc = new DataColumn(csvReader.GetName(i), data.GetFieldType(i));
dt.Columns.Add(dc);
}
}
//Map DataReader to DataRow
var newRow = dt.Rows.Add();
foreach(DataColumn col in dt.Columns)
{
newRow[col.ColumnName] = csvReader[col.ColumnName];
}
//Create a temporary DataView and filter it with the where clause.
DataView dv = new DataView(dt);
dv.RowFilter = where;
var data = dv.Count > 0 ? dv[0] : null;
if(data != null)
{
//Row in here matches your where clause.
//Code to read this row or do something with it.
}
//Empty the temporary data table.
dt.Rows.Clear();
}
}
}

Set "NULL" in Empty cells in CsvReader

Here is my code:
using (System.Net.WebResponse tmpRes = tmpReq.GetResponse())
{
using (System.IO.Stream tmpStream = tmpRes.GetResponseStream())
{
using (System.IO.TextReader tmpReader = new System.IO.StreamReader(tmpStream))
{
string fileContents = tmpReader.ReadToEnd();
for (int i = 0; i < fileContents.Length; i++)
{
if (fileContents[i] == "")
{
fileContents[i] = "null";
}
}
using (Stream s = GenerateStreamFromString(fileContents))
{}
}
}
}
this shows error 'string' to 'char' convert implicitly. Is there any other way to set "NULL" in empty fields in CSVReader
You are not using the CsvReader at all. You are also not splitting the string by your delimiter to get "cells". However, you can load a DataTable from the CsvReader and modify that.
Here's an example presuming tab as delimiter:
var tblCSV = new DataTable();
using (System.Net.WebResponse tmpRes = tmpReq.GetResponse())
using (System.IO.Stream tmpStream = tmpRes.GetResponseStream())
using (System.IO.TextReader tmpReader = new System.IO.StreamReader(tmpStream))
using (var csv = new CsvReader(tmpReader, true, '\t', '"', '\0', '\0', ValueTrimmingOptions.All))
{
csv.MissingFieldAction = MissingFieldAction.ParseError;
csv.DefaultParseErrorAction = ParseErrorAction.RaiseEvent;
csv.ParseError += csv_ParseError;
csv.SkipEmptyLines = true;
// load into DataTable
tblCSV.Load(csv, LoadOption.OverwriteChanges, csvTable_FillError);
}
Now loop the rows and columns and modify them accordingly:
foreach(DataRow row in tblCSV.Rows)
{
foreach(DataColumn col in tblCSV.Columns)
{
if(string.IsNullOrWhiteSpace(row.Field<string>(col)))
row.SetField(col, "null");
}
}
Update related to your comment:
I wants to add NULL value in empty cells in sql database when the csv
data Save in database. Is there any other way?
You could simply use the loop above to update your table instead:
using (var con = new SqlConnection("ConnectionString"))
{
con.Open();
foreach (DataRow row in tblCSV.Rows)
{
using (var cmd = new SqlCommand("INSERT INTO table(Col1,Col2) VALUES (#Col1,Col2);", con))
{
string col1 = row.Field<string>("Col1");
if (string.IsNullOrWhiteSpace(col1))
col1 = null;
string col2 = row.Field<string>("Col2");
if (string.IsNullOrWhiteSpace(col2))
col2 = null;
cmd.Parameters.AddWithValue("#col1", col1);
cmd.Parameters.AddWithValue("#col2", col2);
int inserted = cmd.ExecuteNonQuery();
}
}
}

Printing Json data as Html Table

I am using PageMethod to retrieve Table data in Json format using the following C# Code
[WebMethod]
public static string GetJson2()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
JsonWriter jsonWriter = new JsonTextWriter(sw);
try
{
string connstr = "server=localhost;user=root;database=cm_users;port=3306;password=root";
MySqlConnection conn = new MySqlConnection(connstr);
conn.Open();
string sql = "select * from users";
MySqlCommand cmd = new MySqlCommand(sql, conn);
MySqlDataReader reader = cmd.ExecuteReader();
while (reader.Read())
{
int fieldcount = reader.FieldCount; // count how many columns are in the row
object[] values = new object[fieldcount]; // storage for column values
reader.GetValues(values); // extract the values in each column
jsonWriter.WriteStartObject();
for (int index = 0; index < fieldcount; index++)
{ // iterate through all columns
jsonWriter.WritePropertyName(reader.GetName(index)); // column name
jsonWriter.WriteValue(values[index]); // value in column
}
jsonWriter.WriteEndObject();
}
reader.Close();
}
catch (MySqlException mySqlException)
{ // exception
return mySqlException + "error";
}
// END of method
// the above method returns sb and another uses it to return as HTTP Response...
string jsonString = sb.ToString();
return jsonString; ;
}
Now I am Catching the out put of the method into an html Page using an Java Scipt
Using Ajax JavaScript I am consuming the returned string which is in Json format.
function getUsers() {
$.ajax({
type: "POST",
url: "http://{address}:8078/Default.aspx/GetJson2",
data: "{}",
contentType: "application/json",
dataType: "json",
success: function (msg) {
$("#Result").text(msg.d);
var myTable1 = '';
myTable1 += '<table id="myTable1" cellspacing=0 cellpadding=2 border=1>';
myTable1 += "<tr><td><b>ID</b></td><td><b>UserName</b></td><td><b>Password</b></td><td><b>Email</b></td></tr>";
$.each(msg, function(i,v){
alert(i + v);
myTable1 += "<tr><td>" + v.id + "</td><td>" + v.username + "</td><td>" + v.password + "</td><td>" + v.Email + "</td></tr>";
});
$("#user_tb1").html(myTable1) ;
},
error: function () {
alert("error");
}
});
};
I am getting Json string as
{"id":1,"username":"karthik","password":"karthik","Email":"karthikdheeraj#gmail.com"}{"id":2,"username":"Lohith","password":"Lohith","Email":"lohith#cnonymn.com"}
and Html as
A table structure in which each cell is filled with "undefined"
What might be the Issue in the above code.
It looks like the json being retrieved from the server is incorrect, it's not an array of objects.
The correct format should be:
[
{"id":1,"username":"karthik","password":"karthik","Email":"karthikdheeraj#gmail.com"},
{"id":2,"username":"Lohith","password":"Lohith","Email":"lohith#cnonymn.com"}
]
Here's a plnkr showing your table filling code working with correctly formatted data
There's something up with the JSON that you are getting back. The proper format needs to be:
var json = [{
"id": 1,
"username": "karthik",
"password": "karthik",
"Email": "karthikdheeraj#gmail.com"
}, {
"id": 2,
"username": "Lohith",
"password": "Lohith",
"Email": "lohith#cnonymn.com"
}];
Below is a fiddle I created showing that the loop now alerts the username properly.
http://jsfiddle.net/77YBq/
After more investigation:
To continue to drill into this issue I believe the root of your JSON problem "if the documentation is correct" JsonWriter Documentation
I beleive your server code needs to have
jsonWriter.WriteStartArray(); // Starts Json Array notation;
// This is your existing code
//================================================================================
while (reader.Read())
{
int fieldcount = reader.FieldCount; // count how many columns are in the row
object[] values = new object[fieldcount]; // storage for column values
reader.GetValues(values); // extract the values in each column
jsonWriter.WriteStartObject();
for (int index = 0; index < fieldcount; index++)
{ // iterate through all columns
jsonWriter.WritePropertyName(reader.GetName(index)); // column name
jsonWriter.WriteValue(values[index]); // value in column
}
jsonWriter.WriteEndObject();
}
reader.Close();
//================================================================================
// End of your existing code
jsonWriter.WriteEndArray(); // Ends Json Array notation;
The JsonTextWriter is not intended to be used in the way you are using it.
You should take advantage of a serialization library so that you aren't writing code to serialize JSON.
Here is a solution that uses JSON.NET.
Include the package at http://json.codeplex.com/ in your solution.
Add this using statement to your file:
using Newtonsoft.Json;
Add a class to map your records to.
public class User{
... properties here
}
[WebMethod]
public static string GetJson2()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
JsonWriter jsonWriter = new JsonTextWriter(sw);
var users = new List<User>();
try
{
string connstr = "server=localhost;user=root;database=cm_users;port=3306;password=root";
MySqlConnection conn = new MySqlConnection(connstr);
conn.Open();
string sql = "select * from users";
MySqlCommand cmd = new MySqlCommand(sql, conn);
MySqlDataReader reader = cmd.ExecuteReader();
while (reader.Read())
{
int fieldcount = reader.FieldCount; // count how many columns are in the row
object[] values = new object[fieldcount]; // storage for column values
reader.GetValues(values); // extract the values in each column
users.add(new User { id = reader["id"], username = reader["username"] ..});
}
reader.Close();
}
catch (MySqlException mySqlException)
{ // exception
return mySqlException + "error";
}
return JsonConvert.SerializeObject(users);
}
You should also consider naming your id, username etc as Id, Username etc so that you are following the correct naming conventions.

Categories