I add rows to the table configs like this, any form that i want
DataRow row2 = DataAccess.Instance.configs.NewRow();
row2["Nome"] = "abrirficheiro";
row2["Valor"] = Convert.ToString(Variables.abrirficheiro);
DataAccess.Instance.configs.Rows.Add(row2);
but i wanna keep the row at index 0 (the first one) untouched, any idea how it's done?
edit: i've been working around it for the past hour, still unsucessful
The table class code is like this
public sealed class DataAccess
{
static readonly DataAccess instance = new DataAccess();
//**adicionar tabelas aqui
public DataTable configs { get; private set; }
// Explicit static constructor to tell C# compiler
// not to mark type as beforefieldinit
static DataAccess()
{
}
DataAccess()
{
this.configs = new DataTable("configs");
TabelasSET.Tables.Add("configs");
configs.Columns.Add("Nome");
configs.Columns.Add("Valor");
}
public static DataAccess Instance
{
get
{
return instance;
}
}
}
Solved: Modified the code this way
DataRow row2 = DataAccess.Instance.configs.NewRow();
row2["Nome"] = "abrirficheiro";
row2["Valor"] = Convert.ToString(Variables.abrirficheiro);
DataAccess.Instance.configs.Rows.InsertAt(row2, 2);
DataAccess.Instance.configs.Rows.RemoveAt(1);
//second set
DataRow row3 = DataAccess.Instance.configs.NewRow();
row3["Nome"] = "mantergravacao";
row3["Valor"] = Convert.ToString(Variables.mantergravacao);
DataAccess.Instance.configs.Rows.InsertAt(row3, 3);
DataAccess.Instance.configs.Rows.RemoveAt(2);
And so on, basicaly, you gotta insert it first, then remove, i'm sure there are other ways but this has been working flawlessly
DataAccess.Instance.configs.Rows.InsertAt(row2,1)
try this one
DataRow row2 = DataAccess.Instance.configs.NewRow();
row2["Nome"] = "abc";
row2["Valor"] = "123";
DataAccess.Instance.configs.Rows.InsertAt(row2, 1);
DataRow row2 = DataAccess.Instance.configs.NewRow();
row2["Nome"] = "zyx";
row2["Valor"] = "789";
DataAccess.Instance.configs.Rows.InsertAt(row2, 2);
Related
Trying to Generate a Dynamic Linq Query, based on DataTable returned to me... The column names in the DataTable will change, but I will know which ones I want to total, and which ones I will want to be grouped.
I can get this to work with loops and writing the output to a variable, then recasting the parts back into a data table, but I'm hoping there is a more elegant way of doing this.
//C#
DataTable dt = new DataTable;
Dt.columns(DynamicData1)
Dt.columns(DynamicData1)
Dt.columns(DynamicCount)
In this case the columns are LastName, FirstName, Age. I want to total ages by LastName,FirstName columns (yes both in the group by). So one of my parameters would specify group by = LastName, FirstName and another TotalBy = Age. The next query may return different column names.
Datarow dr =..
dr[0] = {"Smith","John",10}
dr[1] = {"Smith","John",11}
dr[2] = {"Smith","Sarah",8}
Given these different potential columns names...I'm looking to generate a linq query that creates a generic group by and Total output.
Result:
LastName, FirstName, AgeTotal
Smith, John = 21
Smith, Sarah = 8
If you use a simple converter for Linq you can achieve that easily.
Here a quick data generation i did for the sample :
// create dummy table
var dt = new DataTable();
dt.Columns.Add("LastName", typeof(string));
dt.Columns.Add("FirstName", typeof(string));
dt.Columns.Add("Age", typeof(int));
// action to create easily the records
var addData = new Action<string, string, int>((ln, fn, age) =>
{
var dr = dt.NewRow();
dr["LastName"] = ln;
dr["FirstName"] = fn;
dr["Age"] = age;
dt.Rows.Add(dr);
});
// add 3 datarows records
addData("Smith", "John", 10);
addData("Smith", "John", 11);
addData("Smith", "Sarah", 8);
This is how to use my simple transformation class :
// create a linq version of the table
var lqTable = new LinqTable(dt);
// make the group by query
var groupByNames = lqTable.Rows.GroupBy(row => row["LastName"].ToString() + "-" + row["FirstName"].ToString()).ToList();
// for each group create a brand new linqRow
var linqRows = groupByNames.Select(grp =>
{
//get all items. so we can use first item for last and first name and sum the age easily at the same time
var items = grp.ToList();
// return a new linq row
return new LinqRow()
{
Fields = new List<LinqField>()
{
new LinqField("LastName",items[0]["LastName"].ToString()),
new LinqField("FirstName",items[0]["FirstName"].ToString()),
new LinqField("Age",items.Sum(item => Convert.ToInt32(item["Age"]))),
}
};
}).ToList();
// create new linq Table since it handle the datatable format ad transform it directly
var finalTable = new LinqTable() { Rows = linqRows }.AsDataTable();
And finally here are the custom class that are used
public class LinqTable
{
public LinqTable()
{
}
public LinqTable(DataTable sourceTable)
{
LoadFromTable(sourceTable);
}
public List<LinqRow> Rows = new List<LinqRow>();
public List<string> Columns
{
get
{
var columns = new List<string>();
if (Rows != null && Rows.Count > 0)
{
Rows[0].Fields.ForEach(field => columns.Add(field.Name));
}
return columns;
}
}
public void LoadFromTable(DataTable sourceTable)
{
sourceTable.Rows.Cast<DataRow>().ToList().ForEach(row => Rows.Add(new LinqRow(row)));
}
public DataTable AsDataTable()
{
var dt = new DataTable("data");
if (Rows != null && Rows.Count > 0)
{
Rows[0].Fields.ForEach(field =>
{
dt.Columns.Add(field.Name, field.DataType);
});
Rows.ForEach(row =>
{
var dr = dt.NewRow();
row.Fields.ForEach(field => dr[field.Name] = field.Value);
dt.Rows.Add(dr);
});
}
return dt;
}
}
public class LinqRow
{
public List<LinqField> Fields = new List<LinqField>();
public LinqRow()
{
}
public LinqRow(DataRow sourceRow)
{
sourceRow.Table.Columns.Cast<DataColumn>().ToList().ForEach(col => Fields.Add(new LinqField(col.ColumnName, sourceRow[col], col.DataType)));
}
public object this[int index]
{
get
{
return Fields[index].Value;
}
set
{
Fields[index].Value = value;
}
}
public object this[string name]
{
get
{
return Fields.Find(f => f.Name == name).Value;
}
set
{
var fieldIndex = Fields.FindIndex(f => f.Name == name);
if (fieldIndex >= 0)
{
Fields[fieldIndex].Value = value;
}
}
}
public DataTable AsSingleRowDataTable()
{
var dt = new DataTable("data");
if (Fields != null && Fields.Count > 0)
{
Fields.ForEach(field =>
{
dt.Columns.Add(field.Name, field.DataType);
});
var dr = dt.NewRow();
Fields.ForEach(field => dr[field.Name] = field.Value);
dt.Rows.Add(dr);
}
return dt;
}
}
public class LinqField
{
public Type DataType;
public object Value;
public string Name;
public LinqField(string name, object value, Type dataType)
{
DataType = dataType;
Value = value;
Name = name;
}
public LinqField(string name, object value)
{
DataType = value.GetType();
Value = value;
Name = name;
}
public override string ToString()
{
return Value.ToString();
}
}
I think I'd just use a dictionary:
public Dictionary<string, int> GroupTot(DataTable dt, string[] groupBy, string tot){
var d = new Dictionary<string, int>();
foreach(DataRow ro in dt.Rows){
string key = "";
foreach(string col in groupBy)
key += (string)ro[col] + '\n';
if(!d.ContainsKey(key))
d[key] = 0;
d[key]+= (int)ro[tot];
}
return d;
}
If you want the total on each row, we could get cute and create a column that is an array of one int instead of an int:
public void GroupTot(DataTable dt, string[] groupBy, string tot){
var d = new Dictionary<string, int>();
var dc = dt.Columns.Add("Total_" + tot, typeof(int[]));
foreach(DataRow ro in dt.Rows){
string key = "";
foreach(string col in groupBy)
key += (string)ro[col] + '\n'; //build a grouping key from first and last name
if(!d.ContainsKey(key)) //have we seen this name pair before?
d[key] = new int[1]; //no we haven't, ensure we have a tracker for our total, for this first+last name
d[key][0] += (int)ro[tot]; //add the total
ro[dc] = d[key]; //link the row to the total tracker
}
}
At the end of the operation every row will have an array of int in the "Total_age" column that represents the total for that First+Last name. The reason I used int[] rather than int, is because int is a value type, whereas int[] is a reference. Because as the table is being iterated each row gets assigned a reference to an int[] some of them with the same First+Last name will end up with their int[] references pointing to the same object in memory, so incrementing a later one increments all the earlier ones too (all "John Smith" rows total column holds a refernece to the same int[]. If we'd made the column an int type, then every row would point to a different counter, because every time we say ro[dc] = d[key] it would copy the current value of d[key] int into ro[dc]'s int. Any reference type would do for this trick to work, but value types wouldn't. If you wanted your column to be value type you'd have to iterate the table again, or have two dictionaries, one that maps DataRow -> total and iterate the keys, assigning the totals back into the row
I have a DataGridView used for displaying the data for a custom class, foo.
public class foo
{
public string header{get;set;}
public int value{get;set;}
public bool isHidden{get;set;}
}
Foo gets read from the database and gets processed from a JSON string and then inserted into the DataGridView for display to the user.
List<foo> listOfFoo = new List<foo>();
DataTable fooTable = new DataTable();
myDataGridView.DataSource = fooTable;
private void getFoo()
{
JavaScriptSerializer js = new JavaScriptSerializer();
string getData = myWebClient.DownloadString(/*url of server*/);
Dictionary<string,object> fooArg = (Dictionary<string,object>)js.DeserializeObject(getData);
object[] foo = (object[])fooArg["foo"];
for(int i = 0; i < foo.Length; i++)
{
Dictionary<string,object> fooStep = (Dictionary<string,object>)foo[i];
foo temp = new foo();
temp.header = (string)fooStep["header"];
temp.value = (int)fooStep["value"];
temp.isHidden = (bool)fooStep["isHidden"];
listOfFoo.Add(temp);
}
drawFoo();
}
private void drawFoo()
{
fooTable.Rows.Clear();
for(int i = 0; i < listOfFoo.Count; i++)
{
//myDataGridView has two columns, header and value. isHidden is not shown in the table itself
foo i = listOfFoo.ElementAt(i);
fooTable.Rows.Add(i.header, i.value);
}
}
The problem I'm having is that I'm using the index of the row to preserve the order of the foos (DataGridView is not sortable). If I modified my drawFoo function to the below, will the indexing be preserved? I have an insert function that inserts a new foo into the list below the one currently selected in the DataGridView. Would it mess up the indexing, considering when I post the listOfFoo back to the server it has to be in the appropriate order. Is there a better way to handle this? I'm open for suggestions. Thanks, people smarter than I am!
private void drawFoo()
{
fooTable.Rows.Clear();
for(int q = 0; q < listOfFoo.Count; q++)
{
//myDataGridView has two columns, header and value. isHidden is not shown in the table itself
foo i = listOfFoo.ElementAt(q);
fooTable.Rows.Add(i.header, i.value);
if(i.isHidden)
myDataGridView.Rows[q].Visible = false;
}
}
Suppose I have a simple class, and list of its instances:
public class Class1
{
public int a;
public int b;
}
List<Class1> l = new List<Class1>();
l.AddRange(new[] { new Class1 { a = 1, b = 2 }, new Class1 { a = 3, b = 4 } });
How do I bind a DataGridView with columns for a and b to it?
I've found answers, but not any that work. This should be simple, but I can't figure it out.
assuming that you have columns colA and colB, set their DataPropertyName property and set grid DataSource
colA.DataPropertyName = "a";
colB.DataPropertyName = "b";
grid.DataSource = l;
You can do like this
DataTable dt= new DataTable();
dt.Columns.Add("a");
dt.Columns.Add("b");
foreach(var v in l)
{
dt.Rows.Add(v.a,v.b);
}
dgv.DataSource=dt;
I have a CSV that is delivered to my application from various sources. The CSV will always have the same number columns and the header values for the columns will always be the same.
However, the columns may not always be in the same order.
Day 1 CSV may look like this
ID,FirstName,LastName,Email
1,Johh,Lennon,jlennon#applerecords.com
2,Paul,McCartney,macca#applerecords.com
Day 2 CSV may look like this
Email,FirstName,ID,LastName
resident1#friarpark.com,George,3,Harrison
ringo#allstarrband.com,Ringo,4,Starr
I want to read in the header row for each file and have a simple mechanism for associating each "column" of data with the associated property I have defined in my class.
I know I can use selection statements to figure it out, but that seems like a "bad" way to handle it.
Is there a simple way to map "columns" to properties using a dictionary or class at runtime?
Use a Dictionary to map column heading text to column position.
Hard-code mapping of column heading text to object property.
Example:
// Parse first line of text to add column heading strings and positions to your dictionary
...
// Parse data row into an array, indexed by column position
...
// Assign data to object properties
x.ID = row[myDictionary["ID"]];
x.FirstName = row[myDictionary["FirstName"]];
...
You dont need a design pattern for this purpose.
http://www.codeproject.com/Articles/9258/A-Fast-CSV-Reader
I have used this Reader, while it is pretty good, it has a functionality as row["firstname"] or row["id"] which you can parse and create your objects.
I have parsed both CSV files using Microsoft.VisualBasic.FileIO.TextFieldParser. I have populated DataTable after parsing both csv files:
DataTable dt;
private void button1_Click(object sender, EventArgs e)
{
dt = new DataTable();
ParseCSVFile("day1.csv");
ParseCSVFile("day2.csv");
dataGridView1.DataSource = dt;
}
private void ParseCSVFile(string sFileName)
{
var dIndex = new Dictionary<string, int>();
using (TextFieldParser csvReader = new TextFieldParser(sFileName))
{
csvReader.Delimiters = new string[] { "," };
var colFields = csvReader.ReadFields();
for (int i = 0; i < colFields.Length; i++)
{
string sColField = colFields[i];
if (sColField != string.Empty)
{
dIndex.Add(sColField, i);
if (!dt.Columns.Contains(sColField))
dt.Columns.Add(sColField);
}
}
while (!csvReader.EndOfData)
{
string[] fieldData = csvReader.ReadFields();
if (fieldData.Length > 0)
{
DataRow dr = dt.NewRow();
foreach (var kvp in dIndex)
{
int iVal = kvp.Value;
if (iVal < fieldData.Length)
dr[kvp.Key] = fieldData[iVal];
}
dt.Rows.Add(dr);
}
}
}
}
day1.csv and day2.csv as mentioned in the question.
Here is how output dataGridView1 look like:
Here is a simple generic method that will take a CSV file (broken into string[]) and create from it a list of objects. The assumption is that the object properties will have the same name as the headers. If this is not the case you might look into the DataMemberAttribute property and modify accordingly.
private static List<T> ProcessCSVFile<T>(string[] lines)
{
List<T> list = new List<T>();
Type type = typeof(T);
string[] headerArray = lines[0].Split(new char[] { ',' });
PropertyInfo[] properties = new PropertyInfo[headerArray.Length];
for (int prop = 0; prop < properties.Length; prop++)
{
properties[prop] = type.GetProperty(headerArray[prop]);
}
for (int count = 1; count < lines.Length; count++)
{
string[] valueArray = lines[count].Split(new char[] { ',' });
T t = Activator.CreateInstance<T>();
list.Add(t);
for (int value = 0; value < valueArray.Length; value++)
{
properties[value].SetValue(t, valueArray[value], null);
}
}
return list;
}
Now, in order to use it just pass your file formatted as an array of strings. Let's say the class you want to read into looks like this:
class Music
{
public string ID { get; set; }
public string FirstName { get; set; }
public string LastName { get; set; }
public string Email { get; set; }
}
So you can call this:
List<Music> newlist = ProcessCSVFile<Music>(list.ToArray());
...and everything gets done with one call.
By way of background to this question, I want to bind a "code made" DataTable to an aspx:GridView. To persist this table I implement the ISerializable interface. The table is displayed correct, but in postback for sorting the rows, a InvalidCastExcpetion is thrown because elements in the row.ItemArray changed from type double to type string.
This is the class. Note: two constructors, one for constructing the DataTable initialy and one for constructing the table in the deserialisation process:
[global::System.Serializable()]
[global::System.Xml.Serialization.XmlSchemaProviderAttribute("GetTypedTableSchema")]
public partial class HeatMapVisualisationDataTable : DataTable, System.Runtime.Serialization.ISerializable
{
#region members
public double _valueMin { get; private set; }
public double _valueMax { get; private set; }
#endregion membders
public HeatMapVisualisationDataTable(XemlExperimentHeatMapDataTable data)
: base("result", "http://gmd.mpimp-golm.mpg.de/HeatMap")
{
this.Columns.Add(new DataColumn("metabolite", typeof(Guid)));
this.Columns.Add(new DataColumn("name", typeof(string)));
DataColumn[] PrimaryKeyColumns = new DataColumn[1];
PrimaryKeyColumns[0] = this.Columns["metabolite"];
this.PrimaryKey = PrimaryKeyColumns;
SortedDictionary<int, DataColumn> headers = new SortedDictionary<int, DataColumn>();
foreach (var item in data.AsParallel().AsEnumerable().Select(x => x.ObservationPointId).Distinct().OrderBy(x => x))
{
DataColumn dc = this.Columns.Add(item.ToString(), typeof(Double));
headers.Add(item, dc);
}
foreach (var item in data)
{
DataRow tmpRow = base.Rows.Find(item.MetaboliteId);
if (tmpRow == null)
{
tmpRow = base.Rows.Add(new object[] { item.MetaboliteId });
tmpRow["name"] = item.MetaboliteName;
}
tmpRow[headers[item.ObservationPointId]] = item.value;
}
this.AcceptChanges();
_valueMax = data.AsParallel().AsUnordered().Max(x => x.value);
_valueMin = data.AsParallel().AsUnordered().Min(x => x.value);
}
public HeatMapVisualisationDataTable(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext ctxt) :
base(info, ctxt)
{
_valueMin = (double)info.GetValue("valueMin", typeof(double));
_valueMax = (double)info.GetValue("valueMax", typeof(double));
}
public override void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext ctxt)
{
base.GetObjectData(info, ctxt);
info.AddValue("valueMin", _valueMin);
info.AddValue("valueMax", _valueMax);
}
}
As I can observe by reading the schema in the SerializationInfo in the constructor HeatMapVisualisationDataTable(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext ctxt), the table is correct serialised and is comming correct from the "store" in terms of the serilaised xml schema. However, looking at the constructed base class in this constructor, all columns of type double befor serialsation are now of type string. What am I doing wrong?
Update 1: I could also reproduce this issue by changing the the numeric datatype from double to float or decimal. The primary key column of type Guid is deserialised in to the correct type.
Update 2: To my opinion the described behavior is a follow up of some DataTable.ReadXml() issues where DataTypes of columns get replaced by String.
Thanks,
Jan
As a kind of solution by circumwenting DataTable's serialisation into XML and subsequently the recreation of the DataTable from XML using ReadXml, I used this MSDN article on Binary Serialization of ADO.NET Objects. The trick is to store the information about column names and column types in some extra ArrayList and put those into the serialisation. See here my current implementations of the ISerializable interface:
public HeatMapVisualisationDataTable(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext ctxt)
base()
{
_valueMin = info.GetSingle("valueMin");
_valueMax = info.GetSingle("valueMax");
System.Collections.ArrayList colNames = (System.Collections.ArrayList)info.GetValue("colNames", typeof(System.Collections.ArrayList));
System.Collections.ArrayList colTypes = (System.Collections.ArrayList)info.GetValue("colTypes", typeof(System.Collections.ArrayList));
System.Collections.ArrayList dataRows = (System.Collections.ArrayList)info.GetValue("dataRows", typeof(System.Collections.ArrayList));
// Add columns
for (int i = 0; i < colNames.Count; i++)
{
DataColumn col = new DataColumn(colNames[i].ToString(), Type.GetType(colTypes[i].ToString()));
this.Columns.Add(col);
}
// Add rows
for (int i = 0; i < dataRows.Count; i++)
{
DataRow row = this.Rows.Add((object[])dataRows[i]);
}
this.AcceptChanges();
}
public override void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext ctxt)
{
System.Collections.ArrayList colNames = new System.Collections.ArrayList();
System.Collections.ArrayList colTypes = new System.Collections.ArrayList();
System.Collections.ArrayList dataRows = new System.Collections.ArrayList();
foreach (DataColumn col in this.Columns)
{
colNames.Add(col.ColumnName);
colTypes.Add(col.DataType.FullName);
}
foreach (DataRow row in this.Rows)
{
dataRows.Add(row.ItemArray);
}
info.AddValue("colNames", colNames);
info.AddValue("colTypes", colTypes);
info.AddValue("dataRows", dataRows);
info.AddValue("valueMin", _valueMin);
info.AddValue("valueMax", _valueMax);
}
Note, I don't call DataTable's GetObjectData(...) and constructor for deserialisation any more. However, this is not the answer to my original posting. It is just a workaround... So the question "Why does ReadXml() change the DataTypes?" is still open!