Related
How can I check 2 classes are added as members of each other.
class Team
{
Driver driver{ get; set;}
Driver codriver{ get; set;}
}
class Driver
{
Team parentTeam{ get; set;}
}
I have used the following way to get the properties:-
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
////if (!isParent) //need to find this.
//{
object propertyValue = property.GetValue(obj);
if (propertyValue != null)
{
// Get the deep clone of the field in the original object and assign the clone to the field in the new object.
property.SetValue(copiedObject, CloneProcedure(propertyValue));
}
//}
}
I want to skip through the first class which is a property in second when iterating through 2nd one.
Note: Some of you may feel my classes are declared incorrectly but this is a legacy system and there is no chance of me restructuring the classes.
I have tried DeclaringType and I get property.DeclaringType but obj.GetType().DeclaringType is null.
By the looks of it you are deep cloning, and you don't actually want to skip a parent Type, you just don't want the same instance to generate multiple clones.
What you could do is keep a Dictionary<object, object> that keeps references of previously cloned instances:
object CloneProcedure(object o, Dictionary<object, object> cloned)
{
object clone;
if (cloned.TryGetValue(o, out clone))
{
// this object has been cloned earlier, return reference to that clone
return clone;
}
clone = Activator.CreateInstance(o.GetType());
cloned[o] = clone;
PropertyInfo[] properties = ...
foreach ...
{
...
property.SetValue(copiedObject, CloneProcedure(propertyValue, cloned));
}
}
This ensures that no object is ever cloned multiple times, and if multiple properties point to the same instance, the clones will also point to the same cloned instance.
Try this
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (property.PropertyType == typeof(Parent))
{
object propertyValue = property.GetValue(obj);
if (propertyValue != null)
{
// Get the deep clone of the field in the original object and assign the clone to the field in the new object.
property.SetValue(copiedObject, CloneProcedure(propertyValue));
}
}
}
If you want to allow for inheritance, you can use Type.IsAssignableFrom
I am posting my final Deep copy utility if this will benefit anyone
public static class DeepCloneHelper
{
private static string[] _excludedPropertyNames = null;
/// <summary>
/// Get the deep clone of an object.
/// </summary>
/// <typeparam name="T">The type of the source.</typeparam>
/// <param name="source">It is the object used to deep clone.</param>
/// <param name="propertyNames"></param>
/// <returns>Return the deep clone.</returns>
public static T DeepClone<T>(T source, string[] propertyNames = null)
{
if (source == null)
{
throw new ArgumentNullException("Object is null");
}
if (propertyNames != null) { _excludedPropertyNames = propertyNames; }
return (T)CloneProcedure(source, new Dictionary<object, object>());
// return target;
}
/// <summary>
/// The method implements deep clone using reflection.
/// </summary>
/// <param name="source">It is the object used to deep clone.</param>
/// <param name="cloned"></param>
/// <returns>Return the deep clone.</returns>
private static object CloneProcedure(Object source, Dictionary<object, object> cloned)
{
if (source == null)
{
return null;
}
object clone;
if (cloned.TryGetValue(source, out clone))
{
// this object has been cloned earlier, return reference to that clone
return clone;
}
Type type = source.GetType();
// If the type of object is the value type, we will always get a new object when
// the original object is assigned to another variable. So if the type of the
// object is primitive or enum, we just return the object. We will process the
// struct type subsequently because the struct type may contain the reference
// fields.
// If the string variables contain the same chars, they always refer to the same
// string in the heap. So if the type of the object is string, we also return the
// object.
if (type.IsPrimitive || type.IsEnum || type == typeof(string))
{
return source;
}
// If the type of the object is the Array, we use the CreateInstance method to get
// a new instance of the array. We also process recursively this method in the
// elements of the original array because the type of the element may be the reference
// type.
else if (type.IsArray)
{
Type typeElement = Type.GetType(type.FullName.Replace("[]", string.Empty) + "," + type.Assembly.FullName);
var array = source as Array;
Array copiedArray = Array.CreateInstance(typeElement, array.Length);
cloned[source] = copiedArray;
for (int i = 0; i < array.Length; i++)
{
// Get the deep clone of the element in the original array and assign the
// clone to the new array.
copiedArray.SetValue(CloneProcedure(array.GetValue(i), cloned), i);
}
return copiedArray;
}
else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(List<>))
{
if (typeof(IList).IsAssignableFrom(type))
{
var collection = (IList)Activator.CreateInstance(type);
cloned[source] = collection;
foreach (var element in source as IEnumerable)
{
collection.Add(CloneProcedure(element, cloned));
}
return collection;
}
else if (type.IsGenericType)
{
var objectType = type.GetGenericArguments().Single();
if (typeof(IList<>).MakeGenericType(objectType).IsAssignableFrom(type) ||
typeof(ISet<>).MakeGenericType(objectType).IsAssignableFrom(type))
{
var collection = Activator.CreateInstance(type);
cloned[source] = collection;
var addMethod = collection.GetType().GetMethod("Add");
foreach (var element in source as IEnumerable)
{
addMethod.Invoke(collection, new[] { CloneProcedure(element, cloned) });
}
return collection;
}
}
return source;
}
// If the type of the object is class or struct, it may contain the reference fields,
// so we use reflection and process recursively this method in the fields of the object
// to get the deep clone of the object.
// We use Type.IsValueType method here because there is no way to indicate directly whether
// the Type is a struct type.
else if (type.IsClass || type.IsValueType)
{
object copiedObject = Activator.CreateInstance(source.GetType());
cloned[source] = copiedObject;
// Get all PropertyInfo.
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (_excludedPropertyNames == null || !_excludedPropertyNames.Contains(property.Name))
{
object propertyValue = property.GetValue(source);
if (propertyValue != null && property.CanWrite && property.GetSetMethod() != null)
{
// Get the deep clone of the field in the original object and assign the
// clone to the field in the new object.
property.SetValue(copiedObject, CloneProcedure(propertyValue, cloned));
}
}
}
return copiedObject;
}
else
{
throw new ArgumentException("The object is unknown type");
}
}
}
Reference : https://code.msdn.microsoft.com/windowsdesktop/CSDeepCloneObject-8a53311e
I store SQL results into a dynamic List, of which has an underlying DapperRow type. I am trying to serialize/unserialize this List of which XMLserializer complains:
There was an error generating the XML document. ---> System.InvalidOperationException: To be XML serializable, types which inherit from IEnumerable must have an implementation of Add(System.Object) at all levels of their inheritance hierarchy. Dapper.SqlMapper+DapperRow does not implement Add(System.Object).
Is there a way around this (besides the obvious casting the results to my own concrete object), or is it possible to somehow make DapperRow objects conform to System.Xml.XMLserializer constraints?
It states my result array is System.Collections.Generic.List<dynamic> {System.Collections.Generic.List<object>}
Opening the array it says each object is of type object {Dapper.SqlMapper.DapperRow}
I think because DapperRows are now basically IDictionary<string, object> that XML is having issues (I cannot use anything but System.Xml.XmlSerializer so don't suggest an alternative).
I just want to be able to turn a List<dynamic> that I get from Dapper and serialize and deserialize correctly using System.Xml.XmlSerializer
I was able to get something at least serializable by using a serializable dictionary : http://weblogs.asp.net/pwelter34/444961
var results = conn.Query<dynamic>(sql, param);
var resultSet = new List<SerializableDictionary<string, object>>();
foreach (IDictionary<string, object> row in results)
{
var dict = new SerializableDictionary<string, object>();
foreach (var pair in row)
{
dict.Add(pair.Key, pair.Value);
}
resultSet.Add(dict);
}
Its ugly, so I hope more elegant solutions come up
is it possible to somehow make DapperRow objects conform to System.Xml.XMLserializer constraints?
I don't think that this is possible. The DapperRow class is private and it does not have a parameterless constructor.
However, you might be able to use other means to fix your problem.
I suggest that you put your complex serialization logic behind an extension method. This way your original code will stay clean.
I also suggest the following model for storing the data (although you can choose not to use it and use your own logic behind the extension method):
public class Cell
{
public string Name { get; set; }
public object Value { get; set; }
public Cell(){}
public Cell(KeyValuePair<string, object> kvp)
{
Name = kvp.Key;
Value = kvp.Value;
}
}
public class Row : List<Cell>
{
public Row(){}
public Row(IEnumerable<Cell> cells)
: base(cells){}
}
public class Rows : List<Row>
{
public Rows(){}
public Rows(IEnumerable<Row> rows )
:base(rows){}
}
And then the extension method should look something like this:
public static class Extensions
{
public static void Serialize(this IEnumerable<dynamic> enumerable, Stream stream)
{
var rows =
new Rows(
enumerable
.Cast<IEnumerable<KeyValuePair<string, object>>>()
.Select(row =>
new Row(row.Select(cell => new Cell(cell)))));
XmlSerializer serializer = new XmlSerializer(typeof(Rows));
serializer.Serialize(stream, rows);
}
}
Then you would be able to use this code:
var result = connection.Query("SELECT * From Customers");
var memory_stream = new MemoryStream();
result.Serialize(memory_stream);
See how this code is very small because all the complex logic is moved to the extension method.
The model that I suggested allows also for deserialization, just make sure that you use the correct type (e.g. Rows) like this:
XmlSerializer serializer = new XmlSerializer(typeof(Rows));
Rows rows = (Rows)serializer.Deserialize(stream);
You can also have an extension method that just converts the resultset of Dapper to the Rows type and handle the serialization of Rows your self. Such extension method should look something like this:
public static Rows ToRows(this IEnumerable<dynamic> enumerable)
{
return
new Rows(
enumerable
.Cast<IEnumerable<KeyValuePair<string, object>>>()
.Select(row =>
new Row(row.Select(cell => new Cell(cell)))));
}
And then use it like this:
var rows = connection.Query("SELECT * From Customers").ToRows();
XmlSerializer serializer = new XmlSerializer(typeof(Rows));
serializer.Serialize(stream, rows);
Firstly, decorate the DapperResultSet with a [Serializable] attribute. Also create a constructor and in that, assign Rows with an empty List<object>. Use this modified code: (it also contains a modified implemented method)
[Serializable]
public class DapperResultSet : IEnumerable<object>
{
public List Rows { get; set; }
public void Add(dynamic o)
{
Rows.Add(o);
}
public DapperResultSet()
{
Rows = new List<object>();
}
public IEnumerator<object> GetEnumerator()
{
return Rows.GetEnumerator();
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
Next in your event handler (or where you would like to do the serialization):
var results = conn.Query<dynamic>(sql, param);
var r = new DapperResultSet();
foreach (var row in results)
{
r.Add(row);
}
//Here is the serialization part:
XmlSerializer xs = new XmlSerializer(typeof(DapperResultSet));
xs.Serialize(new FileStream("Serialized.xml", FileMode.Create), r); //Change path if necessary
For Deserialization,
XmlSerializer xs = new XmlSerializer(typeof(DapperResultSet));
DapperResultSet d_DRS = xs.Deserialize(new FileStream("Serialized.xml", FileMode.Open)); //Deserialized
Challenging request, because Dapper is not designed to be serializable. But let see what can be done.
The first decision is easy - we need to implement IXmlSerializable. The question is how.
Serialization is not a big deal, since we have the field names and values. So we could use similar approach to the SerializableDictionary<TKey, TValue> you mentioned. However, it heavily relies on typeof(TKey) and typeof(TValue)'. We have no problem with key (it's a string), but the type of the value is object. As I mentioned, it's not a problem to write an object value as XML. The problem is deserialization. At that point, all we have is a string and no any clue what that string is. Which means we need to store some metadata in order to be able to deserialize correctly. Of course there are many ways to do that, but I decided to store field names and types separately at the beginning, and then items with values only.
Putting it all together, here is what I ended up:
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq.Expressions;
using System.Reflection;
using System.Xml;
using System.Xml.Schema;
using System.Xml.Serialization;
namespace Samples
{
public class DapperResultSet : IXmlSerializable
{
static readonly Type TableType;
static readonly Type RowType;
static readonly Func<object, string[]> GetFieldNames;
static readonly Func<object, object[]> GetFieldValues;
static readonly Func<string[], object> CreateTable;
static readonly Func<object, object[], object> CreateRow;
static DapperResultSet()
{
TableType = typeof(Dapper.SqlMapper).GetNestedType("DapperTable", BindingFlags.NonPublic);
RowType = typeof(Dapper.SqlMapper).GetNestedType("DapperRow", BindingFlags.NonPublic);
// string[] GetFieldNames(object row)
{
var row = Expression.Parameter(typeof(object), "row");
var expr = Expression.Lambda<Func<object, string[]>>(
Expression.Field(Expression.Field(Expression.Convert(row, RowType), "table"), "fieldNames"),
row);
GetFieldNames = expr.Compile();
}
// object[] GetFieldValues(object row)
{
var row = Expression.Parameter(typeof(object), "row");
var expr = Expression.Lambda<Func<object, object[]>>(
Expression.Field(Expression.Convert(row, RowType), "values"),
row);
GetFieldValues = expr.Compile();
}
// object CreateTable(string[] fieldNames)
{
var fieldNames = Expression.Parameter(typeof(string[]), "fieldNames");
var expr = Expression.Lambda<Func<string[], object>>(
Expression.New(TableType.GetConstructor(new[] { typeof(string[]) }), fieldNames),
fieldNames);
CreateTable = expr.Compile();
}
// object CreateRow(object table, object[] values)
{
var table = Expression.Parameter(typeof(object), "table");
var values = Expression.Parameter(typeof(object[]), "values");
var expr = Expression.Lambda<Func<object, object[], object>>(
Expression.New(RowType.GetConstructor(new[] { TableType, typeof(object[]) }),
Expression.Convert(table, TableType), values),
table, values);
CreateRow = expr.Compile();
}
}
static readonly dynamic[] emptyItems = new dynamic[0];
public IReadOnlyList<dynamic> Items { get; private set; }
public DapperResultSet()
{
Items = emptyItems;
}
public DapperResultSet(IEnumerable<dynamic> source)
{
if (source == null) throw new ArgumentNullException("source");
Items = source as IReadOnlyList<dynamic> ?? new List<dynamic>(source);
}
XmlSchema IXmlSerializable.GetSchema() { return null; }
void IXmlSerializable.WriteXml(XmlWriter writer)
{
if (Items.Count == 0) return;
// Determine field names and types
var fieldNames = GetFieldNames((object)Items[0]);
var fieldTypes = new TypeCode[fieldNames.Length];
for (int count = 0, i = 0; i < Items.Count; i++)
{
var values = GetFieldValues((object)Items[i]);
for (int c = 0; c < fieldTypes.Length; c++)
{
if (fieldTypes[i] == TypeCode.Empty && values[c] != null)
{
fieldTypes[i] = Type.GetTypeCode(values[c].GetType());
if (++count >= fieldTypes.Length) break;
}
}
}
// Write fields
writer.WriteStartElement("Fields");
writer.WriteAttributeString("Count", XmlConvert.ToString(fieldNames.Length));
for (int i = 0; i < fieldNames.Length; i++)
{
writer.WriteStartElement("Field");
writer.WriteAttributeString("Name", fieldNames[i]);
writer.WriteAttributeString("Type", XmlConvert.ToString((int)fieldTypes[i]));
writer.WriteEndElement();
}
writer.WriteEndElement();
// Write items
writer.WriteStartElement("Items");
writer.WriteAttributeString("Count", XmlConvert.ToString(Items.Count));
foreach (IDictionary<string, object> item in Items)
{
writer.WriteStartElement("Item");
foreach (var entry in item)
{
writer.WriteStartAttribute(entry.Key);
writer.WriteValue(entry.Value);
writer.WriteEndAttribute();
}
writer.WriteEndElement();
}
writer.WriteEndElement();
}
void IXmlSerializable.ReadXml(XmlReader reader)
{
reader.MoveToContent();
bool isEmptyElement = reader.IsEmptyElement;
reader.ReadStartElement(); // Container
if (isEmptyElement) return;
// Read fields
int fieldCount = XmlConvert.ToInt32(reader.GetAttribute("Count"));
reader.ReadStartElement("Fields");
var fieldNames = new string[fieldCount];
var fieldTypes = new TypeCode[fieldCount];
var fieldIndexByName = new Dictionary<string, int>(fieldCount);
for (int c = 0; c < fieldCount; c++)
{
fieldNames[c] = reader.GetAttribute("Name");
fieldTypes[c] = (TypeCode)XmlConvert.ToInt32(reader.GetAttribute("Type"));
fieldIndexByName.Add(fieldNames[c], c);
reader.ReadStartElement("Field");
}
reader.ReadEndElement();
// Read items
int itemCount = XmlConvert.ToInt32(reader.GetAttribute("Count"));
reader.ReadStartElement("Items");
var items = new List<dynamic>(itemCount);
var table = CreateTable(fieldNames);
for (int i = 0; i < itemCount; i++)
{
var values = new object[fieldCount];
if (reader.MoveToFirstAttribute())
{
do
{
var fieldName = reader.Name;
var fieldIndex = fieldIndexByName[fieldName];
values[fieldIndex] = Convert.ChangeType(reader.Value, fieldTypes[fieldIndex], CultureInfo.InvariantCulture);
}
while (reader.MoveToNextAttribute());
}
reader.ReadStartElement("Item");
var item = CreateRow(table, values);
items.Add(item);
}
reader.ReadEndElement(); // Items
reader.ReadEndElement(); // Container
Items = items;
}
}
}
Some things would have been easier if we modify the Dapper source code, but I assume you don't want to do that.
And here is a sample usage:
static void Test(IEnumerable<dynamic> source)
{
var stream = new MemoryStream();
var sourceSet = new DapperResultSet(source);
var serializer = new XmlSerializer(typeof(DapperResultSet));
serializer.Serialize(stream, sourceSet);
stream.Position = 0;
var reader = new StreamReader(stream);
var xml = reader.ReadToEnd();
stream.Position = 0;
var deserializer = new XmlSerializer(typeof(DapperResultSet));
var target = ((DapperResultSet)deserializer.Deserialize(stream)).Items;
}
My question is shown in this code
I have class like that
public class MainCS
{
public int A;
public int B;
public int C;
public int D;
}
public class Sub1
{
public int A;
public int B;
public int C;
}
public void MethodA(Sub1 model)
{
MainCS mdata = new MainCS() { A = model.A, B = model.B, C = model.C };
// is there a way to directly cast class Sub1 into MainCS like that
mdata = (MainCS) model;
}
Use JSON serialization and deserialization:
using Newtonsoft.Json;
Class1 obj1 = new Class1();
Class2 obj2 = JsonConvert.DeserializeObject<Class2>(JsonConvert.SerializeObject(obj1));
Or:
public class Class1
{
public static explicit operator Class2(Class1 obj)
{
return JsonConvert.DeserializeObject<Class2>(JsonConvert.SerializeObject(obj));
}
}
Which then allows you to do something like
Class1 obj1 = new Class1();
Class2 obj2 = (Class2)obj1;
You have already defined the conversion, you just need to take it one step further if you would like to be able to cast. For example:
public class sub1
{
public int a;
public int b;
public int c;
public static explicit operator maincs(sub1 obj)
{
maincs output = new maincs() { a = obj.a, b = obj.b, c = obj.c };
return output;
}
}
Which then allows you to do something like
static void Main()
{
sub1 mySub = new sub1();
maincs myMain = (maincs)mySub;
}
What he wants to say is:
"If you have two classes which share most of the same properties you can cast an object from class a to class b and automatically make the system understand the assignment via the shared property names?"
Option 1: Use reflection
Disadvantage : It's gonna slow you down more than you think.
Option 2: Make one class derive from another, the first one with common properties and other an extension of that.
Disadvantage: Coupled! if your're doing that for two layers in your application then the two layers will be coupled!
Let there be:
class customer
{
public string firstname { get; set; }
public string lastname { get; set; }
public int age { get; set; }
}
class employee
{
public string firstname { get; set; }
public int age { get; set; }
}
Now here is an extension for Object type:
public static T Cast<T>(this Object myobj)
{
Type objectType = myobj.GetType();
Type target = typeof(T);
var x = Activator.CreateInstance(target, false);
var z = from source in objectType.GetMembers().ToList()
where source.MemberType == MemberTypes.Property select source ;
var d = from source in target.GetMembers().ToList()
where source.MemberType == MemberTypes.Property select source;
List<MemberInfo> members = d.Where(memberInfo => d.Select(c => c.Name)
.ToList().Contains(memberInfo.Name)).ToList();
PropertyInfo propertyInfo;
object value;
foreach (var memberInfo in members)
{
propertyInfo = typeof(T).GetProperty(memberInfo.Name);
value = myobj.GetType().GetProperty(memberInfo.Name).GetValue(myobj,null);
propertyInfo.SetValue(x,value,null);
}
return (T)x;
}
Now you use it like this:
static void Main(string[] args)
{
var cus = new customer();
cus.firstname = "John";
cus.age = 3;
employee emp = cus.Cast<employee>();
}
Method cast checks common properties between two objects and does the assignment automatically.
You could change your class structure to:
public class maincs : sub1
{
public int d;
}
public class sub1
{
public int a;
public int b;
public int c;
}
Then you could keep a list of sub1 and cast some of them to mainc.
By using following code you can copy any class object to another class object for same name and same type of properties.
public class CopyClass
{
/// <summary>
/// Copy an object to destination object, only matching fields will be copied
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="sourceObject">An object with matching fields of the destination object</param>
/// <param name="destObject">Destination object, must already be created</param>
public static void CopyObject<T>(object sourceObject, ref T destObject)
{
// If either the source, or destination is null, return
if (sourceObject == null || destObject == null)
return;
// Get the type of each object
Type sourceType = sourceObject.GetType();
Type targetType = destObject.GetType();
// Loop through the source properties
foreach (PropertyInfo p in sourceType.GetProperties())
{
// Get the matching property in the destination object
PropertyInfo targetObj = targetType.GetProperty(p.Name);
// If there is none, skip
if (targetObj == null)
continue;
// Set the value in the destination
targetObj.SetValue(destObject, p.GetValue(sourceObject, null), null);
}
}
}
Call Method Like,
ClassA objA = new ClassA();
ClassB objB = new ClassB();
CopyClass.CopyObject(objOfferMast, ref objB);
It will copy objA into objB.
You can provide an explicit overload for the cast operator:
public static explicit operator maincs(sub1 val)
{
var ret = new maincs() { a = val.a, b = val.b, c = val.c };
return ret;
}
Another option would be to use an interface that has the a, b, and c properties and implement the interface on both of the classes. Then just have the parameter type to methoda be the interface instead of the class.
Using this code you can copy any class object to another class object for same name and same type of properties.
JavaScriptSerializer JsonConvert = new JavaScriptSerializer();
string serializeString = JsonConvert.Serialize(objectEntity);
objectViewModel objVM = JsonConvert.Deserialize<objectViewModel>(serializeString);
There are some great answers here, I just wanted to add a little bit of type checking here as we cannot assume that if properties exist with the same name, that they are of the same type. Here is my offering, which extends on the previous, very excellent answer as I had a few little glitches with it.
In this version I have allowed for the consumer to specify fields to be excluded, and also by default to exclude any database / model specific related properties.
public static T Transform<T>(this object myobj, string excludeFields = null)
{
// Compose a list of unwanted members
if (string.IsNullOrWhiteSpace(excludeFields))
excludeFields = string.Empty;
excludeFields = !string.IsNullOrEmpty(excludeFields) ? excludeFields + "," : excludeFields;
excludeFields += $"{nameof(DBTable.ID)},{nameof(DBTable.InstanceID)},{nameof(AuditableBase.CreatedBy)},{nameof(AuditableBase.CreatedByID)},{nameof(AuditableBase.CreatedOn)}";
var objectType = myobj.GetType();
var targetType = typeof(T);
var targetInstance = Activator.CreateInstance(targetType, false);
// Find common members by name
var sourceMembers = from source in objectType.GetMembers().ToList()
where source.MemberType == MemberTypes.Property
select source;
var targetMembers = from source in targetType.GetMembers().ToList()
where source.MemberType == MemberTypes.Property
select source;
var commonMembers = targetMembers.Where(memberInfo => sourceMembers.Select(c => c.Name)
.ToList().Contains(memberInfo.Name)).ToList();
// Remove unwanted members
commonMembers.RemoveWhere(x => x.Name.InList(excludeFields));
foreach (var memberInfo in commonMembers)
{
if (!((PropertyInfo)memberInfo).CanWrite) continue;
var targetProperty = typeof(T).GetProperty(memberInfo.Name);
if (targetProperty == null) continue;
var sourceProperty = myobj.GetType().GetProperty(memberInfo.Name);
if (sourceProperty == null) continue;
// Check source and target types are the same
if (sourceProperty.PropertyType.Name != targetProperty.PropertyType.Name) continue;
var value = myobj.GetType().GetProperty(memberInfo.Name)?.GetValue(myobj, null);
if (value == null) continue;
// Set the value
targetProperty.SetValue(targetInstance, value, null);
}
return (T)targetInstance;
}
I tried to use the Cast Extension (see https://stackoverflow.com/users/247402/stacker) in a situation where the Target Type contains a Property that is not present in the Source Type. It did not work, I'm not sure why. I refactored to the following extension that did work for my situation:
public static T Casting<T>(this Object source)
{
Type sourceType = source.GetType();
Type targetType = typeof(T);
var target = Activator.CreateInstance(targetType, false);
var sourceMembers = sourceType.GetMembers()
.Where(x => x.MemberType == MemberTypes.Property)
.ToList();
var targetMembers = targetType.GetMembers()
.Where(x => x.MemberType == MemberTypes.Property)
.ToList();
var members = targetMembers
.Where(x => sourceMembers
.Select(y => y.Name)
.Contains(x.Name));
PropertyInfo propertyInfo;
object value;
foreach (var memberInfo in members)
{
propertyInfo = typeof(T).GetProperty(memberInfo.Name);
value = source.GetType().GetProperty(memberInfo.Name).GetValue(source, null);
propertyInfo.SetValue(target, value, null);
}
return (T)target;
}
Note that I changed the name of the extension as the Name Cast conflicts with results from Linq. Hat tip https://stackoverflow.com/users/2093880/usefulbee
var obj = _account.Retrieve(Email, hash);
AccountInfoResponse accountInfoResponse = new AccountInfoResponse();
if (obj != null)
{
accountInfoResponse =
JsonConvert.
DeserializeObject<AccountInfoResponse>
(JsonConvert.SerializeObject(obj));
}
image description
I developed a Class ObjectChanger that contains the functions ConvertToJson, DeleteFromJson, AddToJson, and ConvertToObject. These functions can be used to convert a C# object to JSON which properties can then be removed or added accordingly. Afterwards the adjusted JSON object can simply be converted to a new object using ConvertToObject function. In the sample code below the class "AtoB" utilizes ObjectChanger in its GetAtoB() function:
using System.Collections.Generic;
using Newtonsoft.Json;
using Nancy.Json;
namespace YourNameSpace
{
public class A
{
public int num1 { get; set; }
public int num2 { get; set; }
public int num3 { get; set; }
}
public class B//remove num2 and add num4
{
public int num1 { get; set; }
public int num3 { get; set; }
public int num4 { get; set; }
}
/// <summary>
/// This class utilizes ObjectChanger to illustrate how
/// to convert object of type A to one of type B
/// by converting A to a Json Object, manipulating the JSON
/// and then converting it to object of type B
/// </summary>
public class AtoB
{
public dynamic GetAtoB()
{
A objectA = new A
{
num1 =1, num2 =2,num3 =3
};
//convert "objectA" to JSON object "jsonA"
dynamic jsonA = ObjectChanger.ConvertToJson(objectA);
//remove num2 from jsonA
ObjectChanger.DeleteFromJson(jsonA, "num2");
//add property num4 with value 4 to jsonA
ObjectChanger.AddToJson(jsonA, "num4", 4);
B objectB = ObjectChanger.ConvertToObject<B>(jsonA);
return objectB;
//note: Above DeleteFromJson not needed if the
//property(e.g "num2") doesn't exist in objectB
//the jsonA will still keep the num2 but when
//ConvertToObject is called the objectB will only get
//populated with the relevant fields.
}
}
public class ObjectChanger
{
/// <summary>
/// Converts a provided class to JsonObject
/// sample use: dynamic r = ObjectChanger.ConvertToJson(providedObj);
/// </summary>
public static dynamic ConvertToJson(dynamic providedObj)
{
JsonSerializerSettings jss = new JsonSerializerSettings();
jss.ReferenceLoopHandling = ReferenceLoopHandling.Ignore;
//https://stackoverflow.com/questions/7397207/json-net-error-self-referencing-loop-detected-for-type
return JsonConvert.DeserializeObject<System.Dynamic.ExpandoObject>
(JsonConvert.SerializeObject(providedObj,jss));
}
/// <summary>
/// Deletes Property from Json Object
/// sample use: dynamic r = ObjectChanger.ConvertToJson(providedObj);
/// ((IDictionary<string, object>)r).Remove("keyvalue");
/// </summary>
public static dynamic DeleteFromJson(dynamic providedObj, string keyvalue)
{
((IDictionary<string, object>)providedObj).Remove(keyvalue);
return providedObj;
}
/// <summary>
/// Adds Property to provided Json Object
/// </summary>
/// <param name="providedObj"></param>
/// <param name="key"></param>
/// <param name="keyvalue"></param>
/// <returns>Returns updated Object</returns>
public static dynamic AddToJson(dynamic providedObj, string key,
dynamic keyvalue)
{
((IDictionary<string, object>)providedObj).Add(key, keyvalue);
return providedObj;
}
/// <summary>
/// Converts provided object providedObj
/// to an object of type T
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="providedObj"></param>
/// <returns></returns>
public static T ConvertToObject<T>(dynamic providedObj)
{
var serializer = new JavaScriptSerializer();
var json = serializer.Serialize(providedObj);
var c = serializer.Deserialize<T>(json);
return c;
}
}
}
Background
Working in .NET 2.0 Here, reflecting lists in general. I was originally using t.IsAssignableFrom(typeof(IEnumerable)) to detect if a Property I was traversing supported the IEnumerable Interface. (And thus I could cast the object to it safely)
However this code was not evaluating to True when the object is a BindingList<T>.
Next
I tried to use t.IsSubclassOf(typeof(IEnumerable)) and didn't have any luck either.
Code
/// <summary>
/// Reflects an enumerable (not a list, bad name should be fixed later maybe?)
/// </summary>
/// <param name="o">The Object the property resides on.</param>
/// <param name="p">The Property We're reflecting on</param>
/// <param name="rla">The Attribute tagged to this property</param>
public void ReflectList(object o, PropertyInfo p, ReflectedListAttribute rla)
{
Type t = p.PropertyType;
//if (t.IsAssignableFrom(typeof(IEnumerable)))
if (t.IsSubclassOf(typeof(IEnumerable)))
{
IEnumerable e = p.GetValue(o, null) as IEnumerable;
int count = 0;
if (e != null)
{
foreach (object lo in e)
{
if (count >= rla.MaxRows)
break;
ReflectObject(lo, count);
count++;
}
}
}
}
The Intent
I want to basically tag lists i want to reflect through with the ReflectedListAttribute and call this function on the properties that has it. (Already Working)
Once inside this function, given the object the property resides on, and the PropertyInfo related, get the value of the property, cast it to an IEnumerable (assuming it's possible) and then iterate through each child and call ReflectObject(...) on the child with the count variable.
When you do the as IEnumerable and the variable is not null you know that it does implement IEnumerable interface.
You don´t need the code:
Type t = p.PropertyType;
//if (t.IsAssignableFrom(typeof(IEnumerable)))
if (t.IsSubclassOf(typeof(IEnumerable)))
{
This would be enough:
public void ReflectList(object o, PropertyInfo p, ReflectedListAttribute rla)
{
IEnumerable e = p.GetValue(o, null) as IEnumerable;
int count = 0;
if (e != null)
{
foreach (object lo in e)
{
if (count >= rla.MaxRows)
break;
ReflectObject(lo, count);
count++;
}
}
}
From MSDN
The IsSubclassOf method cannot be used
to determine whether an interface
derives from another interface, or
whether a class implements an
interface Use the GetInterface method for that purpose
Also your implementation of IsAssignableFrom is wrong, you should use it like this:
typeof(IEnumerable).IsAssignableFrom(t)
This should return true if IEnumerable is implemented by t..
Why do you the if-statement at all?
You already did a var e = ... as IEnumerable and afterwards just check if is not null.
Isn't that enough?
These work. :)
A List is extended Collection. So, the tests are different for them. A Dictionary has got two internal containers. Hence one test for the same.
public static bool IsList(object obj)
{
System.Collections.IList list = obj as System.Collections.IList;
return list != null;
}
public static bool IsCollection(object obj)
{
System.Collections.ICollection coll = obj as System.Collections.ICollection;
return coll != null;
}
public static bool IsDictionary(object obj)
{
System.Collections.IDictionary dictionary = obj as System.Collections.IDictionary;
return dictionary != null;
}
Usage example -
if (IsDictionary(fieldObject)) //key-value type collection?
{
System.Collections.IDictionary dictionary = fieldObject as System.Collections.IDictionary;
foreach (object _ob in dictionary.Values)
{
//do work
}
// dictionary.Clear();
}
else //normal collection
{
if (IsCollection(fieldObject))
{
System.Collections.ICollection coll = fieldObject as System.Collections.ICollection;
foreach (object _ob in coll)
{
//do work
}
if (IsList(fieldObject))
{
//System.Collections.IList list = fieldObject as System.Collections.IList;
//list.Clear(); // <--- List's function, not Collection's.
}
}
}
I have a generic list of objects in C#, and wish to clone the list. The items within the list are cloneable, but there doesn't seem to be an option to do list.Clone().
Is there an easy way around this?
If your elements are value types, then you can just do:
List<YourType> newList = new List<YourType>(oldList);
However, if they are reference types and you want a deep copy (assuming your elements properly implement ICloneable), you could do something like this:
List<ICloneable> oldList = new List<ICloneable>();
List<ICloneable> newList = new List<ICloneable>(oldList.Count);
oldList.ForEach((item) =>
{
newList.Add((ICloneable)item.Clone());
});
Obviously, replace ICloneable in the above generics and cast with whatever your element type is that implements ICloneable.
If your element type doesn't support ICloneable but does have a copy-constructor, you could do this instead:
List<YourType> oldList = new List<YourType>();
List<YourType> newList = new List<YourType>(oldList.Count);
oldList.ForEach((item)=>
{
newList.Add(new YourType(item));
});
Personally, I would avoid ICloneable because of the need to guarantee a deep copy of all members. Instead, I'd suggest the copy-constructor or a factory method like YourType.CopyFrom(YourType itemToCopy) that returns a new instance of YourType.
Any of these options could be wrapped by a method (extension or otherwise).
You can use an extension method.
static class Extensions
{
public static IList<T> Clone<T>(this IList<T> listToClone) where T: ICloneable
{
return listToClone.Select(item => (T)item.Clone()).ToList();
}
}
For a shallow copy, you can instead use the GetRange method of the generic List class.
List<int> oldList = new List<int>( );
// Populate oldList...
List<int> newList = oldList.GetRange(0, oldList.Count);
Quoted from: Generics Recipes
public static object DeepClone(object obj)
{
object objResult = null;
using (var ms = new MemoryStream())
{
var bf = new BinaryFormatter();
bf.Serialize(ms, obj);
ms.Position = 0;
objResult = bf.Deserialize(ms);
}
return objResult;
}
This is one way to do it with C# and .NET 2.0. Your object requires to be [Serializable()]. The goal is to lose all references and build new ones.
To clone a list just call .ToList(). This creates a shallow copy.
Microsoft (R) Roslyn C# Compiler version 2.3.2.62116
Loading context from 'CSharpInteractive.rsp'.
Type "#help" for more information.
> var x = new List<int>() { 3, 4 };
> var y = x.ToList();
> x.Add(5)
> x
List<int>(3) { 3, 4, 5 }
> y
List<int>(2) { 3, 4 }
>
After a slight modification you can also clone:
public static T DeepClone<T>(T obj)
{
T objResult;
using (MemoryStream ms = new MemoryStream())
{
BinaryFormatter bf = new BinaryFormatter();
bf.Serialize(ms, obj);
ms.Position = 0;
objResult = (T)bf.Deserialize(ms);
}
return objResult;
}
Unless you need an actual clone of every single object inside your List<T>, the best way to clone a list is to create a new list with the old list as the collection parameter.
List<T> myList = ...;
List<T> cloneOfMyList = new List<T>(myList);
Changes to myList such as insert or remove will not affect cloneOfMyList and vice versa.
The actual objects the two Lists contain are still the same however.
Use AutoMapper (or whatever mapping lib you prefer) to clone is simple and a lot maintainable.
Define your mapping:
Mapper.CreateMap<YourType, YourType>();
Do the magic:
YourTypeList.ConvertAll(Mapper.Map<YourType, YourType>);
If you only care about value types...
And you know the type:
List<int> newList = new List<int>(oldList);
If you don't know the type before, you'll need a helper function:
List<T> Clone<T>(IEnumerable<T> oldList)
{
return newList = new List<T>(oldList);
}
The just:
List<string> myNewList = Clone(myOldList);
If you have already referenced Newtonsoft.Json in your project and your objects are serializeable you could always use:
List<T> newList = JsonConvert.DeserializeObject<T>(JsonConvert.SerializeObject(listToCopy))
Possibly not the most efficient way to do it, but unless you're doing it 100s of 1000s of times you may not even notice the speed difference.
For a deep copy, ICloneable is the correct solution, but here's a similar approach to ICloneable using the constructor instead of the ICloneable interface.
public class Student
{
public Student(Student student)
{
FirstName = student.FirstName;
LastName = student.LastName;
}
public string FirstName { get; set; }
public string LastName { get; set; }
}
// wherever you have the list
List<Student> students;
// and then where you want to make a copy
List<Student> copy = students.Select(s => new Student(s)).ToList();
you'll need the following library where you make the copy
using System.Linq
you could also use a for loop instead of System.Linq, but Linq makes it concise and clean. Likewise you could do as other answers have suggested and make extension methods, etc., but none of that is necessary.
There is no need to flag classes as Serializable and in our tests using the Newtonsoft JsonSerializer even faster than using BinaryFormatter. With extension methods usable on every object.
attention: private members are not cloned
Standard .NET JavascriptSerializer option:
public static T DeepCopy<T>(this T value)
{
JavaScriptSerializer js = new JavaScriptSerializer();
string json = js.Serialize(value);
return js.Deserialize<T>(json);
}
Faster option using Newtonsoft JSON:
public static T DeepCopy<T>(this T value)
{
string json = JsonConvert.SerializeObject(value);
return JsonConvert.DeserializeObject<T>(json);
}
I'll be lucky if anybody ever reads this... but in order to not return a list of type object in my Clone methods, I created an interface:
public interface IMyCloneable<T>
{
T Clone();
}
Then I specified the extension:
public static List<T> Clone<T>(this List<T> listToClone) where T : IMyCloneable<T>
{
return listToClone.Select(item => (T)item.Clone()).ToList();
}
And here is an implementation of the interface in my A/V marking software. I wanted to have my Clone() method return a list of VidMark (while the ICloneable interface wanted my method to return a list of object):
public class VidMark : IMyCloneable<VidMark>
{
public long Beg { get; set; }
public long End { get; set; }
public string Desc { get; set; }
public int Rank { get; set; } = 0;
public VidMark Clone()
{
return (VidMark)this.MemberwiseClone();
}
}
And finally, the usage of the extension inside a class:
private List<VidMark> _VidMarks;
private List<VidMark> _UndoVidMarks;
//Other methods instantiate and fill the lists
private void SetUndoVidMarks()
{
_UndoVidMarks = _VidMarks.Clone();
}
Anybody like it? Any improvements?
public static Object CloneType(Object objtype)
{
Object lstfinal = new Object();
using (MemoryStream memStream = new MemoryStream())
{
BinaryFormatter binaryFormatter = new BinaryFormatter(null, new StreamingContext(StreamingContextStates.Clone));
binaryFormatter.Serialize(memStream, objtype); memStream.Seek(0, SeekOrigin.Begin);
lstfinal = binaryFormatter.Deserialize(memStream);
}
return lstfinal;
}
public class CloneableList<T> : List<T>, ICloneable where T : ICloneable
{
public object Clone()
{
var clone = new List<T>();
ForEach(item => clone.Add((T)item.Clone()));
return clone;
}
}
public List<TEntity> Clone<TEntity>(List<TEntity> o1List) where TEntity : class , new()
{
List<TEntity> retList = new List<TEntity>();
try
{
Type sourceType = typeof(TEntity);
foreach(var o1 in o1List)
{
TEntity o2 = new TEntity();
foreach (PropertyInfo propInfo in (sourceType.GetProperties()))
{
var val = propInfo.GetValue(o1, null);
propInfo.SetValue(o2, val);
}
retList.Add(o2);
}
return retList;
}
catch
{
return retList;
}
}
If you need a cloned list with the same capacity, you can try this:
public static List<T> Clone<T>(this List<T> oldList)
{
var newList = new List<T>(oldList.Capacity);
newList.AddRange(oldList);
return newList;
}
//try this
List<string> ListCopy= new List<string>(OldList);
//or try
List<T> ListCopy=OldList.ToList();
If I need deep copy of collection, I have favorite approach like this:
public static IEnumerable<T> DeepCopy<T>(this IEnumerable<T> collectionToDeepCopy)
{
var serializedCollection = JsonConvert.SerializeObject(collectionToDeepCopy);
return JsonConvert.DeserializeObject<IEnumerable<T>>(serializedCollection);
}
You can use extension method:
namespace extension
{
public class ext
{
public static List<double> clone(this List<double> t)
{
List<double> kop = new List<double>();
int x;
for (x = 0; x < t.Count; x++)
{
kop.Add(t[x]);
}
return kop;
}
};
}
You can clone all objects by using their value type members for example, consider this class:
public class matrix
{
public List<List<double>> mat;
public int rows,cols;
public matrix clone()
{
// create new object
matrix copy = new matrix();
// firstly I can directly copy rows and cols because they are value types
copy.rows = this.rows;
copy.cols = this.cols;
// but now I can no t directly copy mat because it is not value type so
int x;
// I assume I have clone method for List<double>
for(x=0;x<this.mat.count;x++)
{
copy.mat.Add(this.mat[x].clone());
}
// then mat is cloned
return copy; // and copy of original is returned
}
};
Note: if you do any change on copy (or clone) it will not affect the original object.
Using a cast may be helpful, in this case, for a shallow copy:
IList CloneList(IList list)
{
IList result;
result = (IList)Activator.CreateInstance(list.GetType());
foreach (object item in list) result.Add(item);
return result;
}
applied to generic list:
List<T> Clone<T>(List<T> argument) => (List<T>)CloneList(argument);
I use automapper to copy an object. I just setup a mapping that maps one object to itself. You can wrap this operation any way you like.
http://automapper.codeplex.com/
I've made for my own some extension which converts ICollection of items that not implement IClonable
static class CollectionExtensions
{
public static ICollection<T> Clone<T>(this ICollection<T> listToClone)
{
var array = new T[listToClone.Count];
listToClone.CopyTo(array,0);
return array.ToList();
}
}
You could also simply convert the list to an array using ToArray, and then clone the array using Array.Clone(...).
Depending on your needs, the methods included in the Array class could meet your needs.
The following code should transfer onto a list with minimal changes.
Basically it works by inserting a new random number from a greater range with each successive loop. If there exist numbers already that are the same or higher than it, shift those random numbers up one so they transfer into the new larger range of random indexes.
// Example Usage
int[] indexes = getRandomUniqueIndexArray(selectFrom.Length, toSet.Length);
for(int i = 0; i < toSet.Length; i++)
toSet[i] = selectFrom[indexes[i]];
private int[] getRandomUniqueIndexArray(int length, int count)
{
if(count > length || count < 1 || length < 1)
return new int[0];
int[] toReturn = new int[count];
if(count == length)
{
for(int i = 0; i < toReturn.Length; i++) toReturn[i] = i;
return toReturn;
}
Random r = new Random();
int startPos = count - 1;
for(int i = startPos; i >= 0; i--)
{
int index = r.Next(length - i);
for(int j = startPos; j > i; j--)
if(toReturn[j] >= index)
toReturn[j]++;
toReturn[i] = index;
}
return toReturn;
}
Another thing: you could use reflection. If you'll cache this properly, then it'll clone 1,000,000 objects in 5.6 seconds (sadly, 16.4 seconds with inner objects).
[ProtoContract(ImplicitFields = ImplicitFields.AllPublic)]
public class Person
{
...
Job JobDescription
...
}
[ProtoContract(ImplicitFields = ImplicitFields.AllPublic)]
public class Job
{...
}
private static readonly Type stringType = typeof (string);
public static class CopyFactory
{
static readonly Dictionary<Type, PropertyInfo[]> ProperyList = new Dictionary<Type, PropertyInfo[]>();
private static readonly MethodInfo CreateCopyReflectionMethod;
static CopyFactory()
{
CreateCopyReflectionMethod = typeof(CopyFactory).GetMethod("CreateCopyReflection", BindingFlags.Static | BindingFlags.Public);
}
public static T CreateCopyReflection<T>(T source) where T : new()
{
var copyInstance = new T();
var sourceType = typeof(T);
PropertyInfo[] propList;
if (ProperyList.ContainsKey(sourceType))
propList = ProperyList[sourceType];
else
{
propList = sourceType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ProperyList.Add(sourceType, propList);
}
foreach (var prop in propList)
{
var value = prop.GetValue(source, null);
prop.SetValue(copyInstance,
value != null && prop.PropertyType.IsClass && prop.PropertyType != stringType ? CreateCopyReflectionMethod.MakeGenericMethod(prop.PropertyType).Invoke(null, new object[] { value }) : value, null);
}
return copyInstance;
}
I measured it in a simple way, by using the Watcher class.
var person = new Person
{
...
};
for (var i = 0; i < 1000000; i++)
{
personList.Add(person);
}
var watcher = new Stopwatch();
watcher.Start();
var copylist = personList.Select(CopyFactory.CreateCopyReflection).ToList();
watcher.Stop();
var elapsed = watcher.Elapsed;
RESULT: With inner object PersonInstance - 16.4, PersonInstance = null - 5.6
CopyFactory is just my test class where I have dozen of tests including usage of expression. You could implement this in another form in an extension or whatever. Don't forget about caching.
I didn't test serializing yet, but I doubt in an improvement with a million classes. I'll try something fast protobuf/newton.
P.S.: for the sake of reading simplicity, I only used auto-property here. I could update with FieldInfo, or you should easily implement this by your own.
I recently tested the Protocol Buffers serializer with the DeepClone function out of the box. It wins with 4.2 seconds on a million simple objects, but when it comes to inner objects, it wins with the result 7.4 seconds.
Serializer.DeepClone(personList);
SUMMARY: If you don't have access to the classes, then this will help. Otherwise it depends on the count of the objects. I think you could use reflection up to 10,000 objects (maybe a bit less), but for more than this the Protocol Buffers serializer will perform better.
There is a simple way to clone objects in C# using a JSON serializer and deserializer.
You can create an extension class:
using Newtonsoft.Json;
static class typeExtensions
{
[Extension()]
public static T jsonCloneObject<T>(T source)
{
string json = JsonConvert.SerializeObject(source);
return JsonConvert.DeserializeObject<T>(json);
}
}
To clone and object:
obj clonedObj = originalObj.jsonCloneObject;
For a deep clone I use reflection as follows:
public List<T> CloneList<T>(IEnumerable<T> listToClone) {
Type listType = listToClone.GetType();
Type elementType = listType.GetGenericArguments()[0];
List<T> listCopy = new List<T>();
foreach (T item in listToClone) {
object itemCopy = Activator.CreateInstance(elementType);
foreach (PropertyInfo property in elementType.GetProperties()) {
elementType.GetProperty(property.Name).SetValue(itemCopy, property.GetValue(item));
}
listCopy.Add((T)itemCopy);
}
return listCopy;
}
You can use List or IEnumerable interchangeably.
You can use the List<T>.ConvertAll(Converter<T, T>) method to create a new list containing all the elements of the original list, and use a conversion function that returns the input value.
List<int> originalList = new List<int> { 1, 2, 3, 4, 5 };
List<int> clonedList = new List<int>(originalList.ConvertAll(x => x));