I would like to use a list of old and the corresponding new names in a CSV file(source of CSV is a Excel sheet), in order to rename files. Obviously replace the old name with the new name specified for each case.
For Example:
Find what Replace With
C:\Users\Documents\Pump Station.doc C:\Users\Documents\Awesome Pump Station.doc
C:\Users\Documents\Pump Selection.doc C:\Users\Documents\Great Pump Selection.doc
C:\Users\Documents\Pump Sizing Calc.xlsx C:\Users\Documents\Hectic Pump Sizing Calc.xlsx
I am very new to coding and I am having trouble finishing this off. This is what I have so far. I do not necessarily need to even put the list user interface (which it currently does). Ultimately I would like to loop through the rows in my CSV file, check if the old name specified exists and if so, rename it to the new name specified.
I really appreciate any help in advance and sorry for any rookie errors I may have made in my code below.
public class OldNew
{
public string oldFile { get; set; }
public string newFile { get; set; }
}
public static class OldNewService
{
public static new List<OldNew>ReadFile(string filepath)
{
var lines = File.ReadAllLines(filepath);
var data = from l in lines.Skip(1)
let split = l.Split(',')
select new OldNew
{
oldFile = split[0],
newFile = split[1],
};
return data.ToList();
}
}
public partial class MainWindow : Window
{
public MainWindow()
{
InitializeComponent();
DataContext = OldNewService.ReadFile(#"C:\Users\cch\Documents\Batch Edit\Lookup Table.csv");
}
}
}
In my opinion, a better solution would be to use a plain old foreach and not a call to ToList().ForEach().
var lines = File.ReadAllLines(filepath);
var data = from l in lines.Skip(1)
let split = l.Split(',')
select new OldNew
{
oldFile = split[0],
newFile = split[1],
};
foreach(var f in data)
{
if (File.Exists(f.oldFile)
{
File.Move(f.oldFile, f.newFile);
}
}
See: http://blogs.msdn.com/b/ericlippert/archive/2009/05/18/foreach-vs-foreach.aspx for an explanation.
From what I understand, you want to get the new value of the file if an old one exists. To get the new value of the file from your list, try something like:
data.ForEach(d =>
{
if (!string.IsNullOrEmpty(d.oldFile))
{
File.Move(d.oldFile, d.newFile);
}
});
Wouldn't it make sense to rename the old filename if a new one exists?
Hope this helps.
Related
Everytime i try to save something to my created XML document the path i use keeps being wrong.
Here is the code:
public string ToXml<T>(T obj, string path)
{
var saveToXmlPath = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), path);
using (var stringWriter = new StreamWriter((saveToXmlPath)))
{
var xmlSerializer = new XmlSerializer(typeof(ObservableCollection<object>));
xmlSerializer.Serialize(stringWriter, obj);
return stringWriter.ToString();
}
}
public Constructor()
{
var temp = new ObservableCollection<Model> {
new Model { ID = 1, Name = "Name1" },
new Model { ID = 2, Name = "Name2" },
new Model { ID = 3, Name = "Name3" } };
ToXml(temp, #"Common\Assets\XML\XmlFile.xml");
}
It keep saying that the path is wrong, keeps adding /debug/big to the path.
Set a breakpoint on this line:
var saveToXmlPath = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), path);
What does it say?
Assembly.GetEntryAssembly().Location is adding /debug to your path.
Firstly, your "problem" has nothing to do with XML. You're wanting to know why Assembly.GetEntryAssembly().Location is giving you ".../bin/Debug".
Secondly... from what little information you have actually provided, there is in fact no problem at all; just your misunderstanding. Assembly.GetEntryAssembly().Location will give you the location of the executing ".exe" file (your app). In this case, that would indeed be inside the "bin/Debug" folder by default. If you want the XML file written somewhere else, then it would be helpful if you would specify where you think that somewhere else should be.
Have hit a wall with this so hopefully SO can be of help and I've not overlooked an obvious question previously answered. I'm trying export data from a ListView (actually SQLite data that's populating it via a list) to a new CSV file - no fancy filepicker as yet, just need to save the file locally (it's a Metro 8.1 App but being deployed to Surface 3, not RT). I've created a method based on examples I've found but it doesn't seem to be writing the file (have searched local machine after attempting export but nothing found). It's compiling fine and I'm not hitting any exceptions when debugging, also I'm using Filehelpers 2.0 as I couldn't get the current version to install (VS 2015 Community). 'Candidate' is the class for the datasource (DB/listview).
Class:
using SQLite;
using FileHelpers;
namespace SolutionName.Model
{
[Table("Candidates")]
[DelimitedRecord(",")]
[IgnoreEmptyLines()]
[IgnoreFirst()]
public class Candidate
{
[PrimaryKey, AutoIncrement]
public int Id { get; set; }
public string Title { get; set; }
public string FirstName { get; set; }
public string LastName { get; set; }
public string Email { get; set; }
public string Phone { get; set; }
public string AreasInterest { get; set; }
} // end class Candidate
} // end namespace
Method (called by a button):
private void WriteCSVFile(List<Candidate> dataSource)
{
//filehelper object
FileHelperEngine engine = new FileHelperEngine(typeof(Candidate));
List<Candidate> csv = new List<Candidate>();
//convert any datasource to csv based object
foreach (var item in dataSource)
{
Candidate temp = new Candidate();
temp.Title = item.Title;
temp.FirstName = item.FirstName;
temp.LastName = item.LastName;
temp.Email = item.Email;
temp.Phone = item.Phone;
temp.AreasInterest = item.AreasInterest;
csv.Add(temp);
} // end foreach
//give file a name and header text
engine.HeaderText = "Title,FirstName,LastName,Email,Phone,AreaInterest";
//save file locally
engine.WriteFile("export.csv", csv);
} // end method WriteCSVFile
Any pointers would be appreciated.
Testing: Passed
Version 3.2: No issues
Version 2.2: No issues
Using either version of FileHelpers this works as expected. I threw the following code into a test console and it ran through perfectly so my only suggestion now is that you are either not passing it data, or attempting to write to either a read-only or invalid location.
Do you see any exceptions in the Output tab of Visual Studio?
Have you confirmed you have data going into the dataSource parameter?
Have you confirmed the full path that you are writing the export.csv to?
Do you have the csv file open in Excel?
Note: that having the CSV open in Excel causes a full lock on the CSV file so you must exit Excel or close the file to be able to write to it
Code:
static void TestMain2(string[] args)
{
List<Candidate> source = new List<Candidate>()
{
new Candidate() { Id = 1, Email = "test1#test.com", Title = "Mr", FirstName = "Fred", LastName = "Flintstone", AreasInterest = "Area1", Phone = "+44 1234 123123" },
new Candidate() { Id = 3, Email = "test2#test.com", Title = "Mr", FirstName = "Barney", LastName = "Rubble", AreasInterest = "Area2", Phone = "+44 1234 231231" },
new Candidate() { Id = 2, Email = "test3#test.com", Title = "Mrs", FirstName = "Wilma", LastName = "Flintstone", AreasInterest = "Area3", Phone = "+44 1234 312312" }
};
WriteCSVFile(source);
}
private static void WriteCSVFile(List<Candidate> dataSource)
{
//filehelper object
FileHelperEngine engine = new FileHelperEngine(typeof(Candidate));
List<Candidate> csv = new List<Candidate>();
//convert any datasource to csv based object
foreach (var item in dataSource)
{
Candidate temp = new Candidate();
temp.Title = item.Title;
temp.FirstName = item.FirstName;
temp.LastName = item.LastName;
temp.Email = item.Email;
temp.Phone = item.Phone;
temp.AreasInterest = item.AreasInterest;
csv.Add(temp);
} // end foreach
//give file a name and header text
engine.HeaderText = "Title,FirstName,LastName,Email,Phone,AreaInterest";
//save file locally
engine.WriteFile("export.csv", csv);
} // end method WriteCSVFile
CSV File
Title,FirstName,LastName,Email,Phone,AreaInterest
0,Mr,Fred,Flintstone,test1#test.com,+44 1234 123123,Area1
0,Mr,Barney,Rubble,test2#test.com,+44 1234 231231,Area2
0,Mrs,Wilma,Flintstone,test3#test.com,+44 1234 312312,Area3
Notes:
The ID column wasn't copied over so this was always zero, but that may just have been because of your sample code.
I believe it's recommended to be using the generic FileHelperEngine rather than typeof() parameter on the base class since this initialises various methods/properties to utilise T rather than just a generic object.
You can try downloading the source to FileHelpers and linking your project directly to the library to debug what's going on internally.
You did previously mention that you have a System.*.dll referencing problem, check that you are using the Full Framework and not a Client one as that may cause that issue. I am not sure whether a W8 universal app allows that though.
I'm working on a messaging software, I'm working on the Attachements part,
I can attach the files but, when I try to add more, it replaces the old ones.
This is the code:
List<string> listaAnexos = new List<string>();
Archivo.Multiselect = true;
Archivo.ShowDialog();
int cAnex = 0;
string[] anexos = Archivo.FileNames;
foreach (string i in anexos)
{
listaAnexos.Add(i);
cAnex++;
}
lbAnexos.DataSource = listaAnexos;
txtCAnex.Text = cAnex.ToString();
Thanks
Assuming the above piece of code is called multiple times, you most likely need to declare listaAnexos outside of your method.
Every time you run the above method, you create a new instance of listAnexos to add files to, which you then assign to lbAnexos.DataSource, overwriting whatever was in there before.
Declare listaAnexos as a class instance, instead of inside your method.
public class YourClass
{
private List<string> listaAnexos = new List<string>();
private void YourMethod()
{
Archivo.Multiselect = true;
Archivo.ShowDialog();
...
foreach (string i in anexos)
{
listaAnexos.Add(i);
...
So the current situation is I have a program that is completely utilizing MEF. Now I want to make it utilize Rx as to allow it to scale to larger queries and allow the user to look over results as the various plugins return results. It is currently setup as such:
Workflow: Query => DetermineTypes => QueryPlugins => Results
Currently the code is all stored on GitHub if anyone needs to reference more than what I post below.
ALeRT on GitHub
The VS solution has a UI project (default StartUp Project), a PluginFramework Project, various TypePlugin Projects (think determining what the type is such as a URL, Email, File, Phone Number, etc) and also QueryPlugin Projects (perform xyz if the queryplugin supports the type that has been determined). All the results are displayed back into the UI by ways of a DataGrid that is being mapped to by the DefaultView of a DataTable.
I want to try and make the Rx portion as invisible to the plugins as possible. This is due to the fact that I do not want to make writing plugins complex for the few people that will. So I was thinking about taking the current Framework below:
public interface IQueryPlugin
{
string PluginCategory { get; }
string Name { get; }
string Version { get; }
string Author { get; }
System.Collections.Generic.List<string> TypesAccepted { get; }
string Result(string input, string type, bool sensitive);
}
and making the Result method into the following:
System.IObservable<string> Result(string input, string type, bool sensitive);
This would naturally require modifying the method that is calling the plugin which as it stands is:
using (GenericParserAdapter parser = new GenericParserAdapter())
{
using (TextReader sr = new StringReader(qPlugins.Result(query, qType, sensitive)))
{
Random rNum = new Random();
parser.SetDataSource(sr);
parser.ColumnDelimiter = Convert.ToChar(",");
parser.FirstRowHasHeader = true;
parser.MaxBufferSize = 4096;
parser.MaxRows = 500;
parser.TextQualifier = '\"';
DataTable tempTable = parser.GetDataTable();
tempTable.TableName = qPlugins.Name.ToString();
if (!tempTable.Columns.Contains("Query"))
{
DataColumn tColumn = new DataColumn("Query");
tempTable.Columns.Add(tColumn);
tColumn.SetOrdinal(0);
}
foreach (DataRow dr in tempTable.Rows)
{
dr["Query"] = query;
}
if (!resultDS.Tables.Contains(qPlugins.Name.ToString()))
{
resultDS.Tables.Add(tempTable);
}
else
{
resultDS.Tables[qPlugins.Name.ToString()].Merge(tempTable);
}
pluginsLB.DataContext = resultDS.Tables.Cast<DataTable>().Select(t => t.TableName).ToList();
}
}
So at this point I'm stuck as to how to make this work. There doesn't seem to be good documentation on how to integrate MEF with Rx. My assumption is to make the following change
using (TextReader sr = new StringReader(qPlugins.Result(query, qType, sensitive).Subscribe()))
but this isn't going to work. So any help on making these changes would be greatly appreciated. If you have other suggestions regarding my code, please let me know. I do this as a hobby so I know my code surely isn't up to snuff for most people.
Would this work for you:
IObservable<DataTable> q =
from text in qPlugins.Result(query, qType, sensitive)
from tempTable in Observable.Using(
() => new GenericParserAdapter(),
parser => Observable.Using(
() => new StringReader(text),
sr => Observable .Start<DataTable>(
() =>
{
var rNum = new Random();
parser.SetDataSource(sr);
parser.ColumnDelimiter = Convert.ToChar(",");
parser.FirstRowHasHeader = true;
parser.MaxBufferSize = 4096;
parser.MaxRows = 500;
parser.TextQualifier = '\"';
var tempTable = parser.GetDataTable();
tempTable.TableName = qPlugins.Name.ToString();
if (!tempTable.Columns.Contains("Query"))
{
DataColumn tColumn = new DataColumn("Query");
tempTable.Columns.Add(tColumn);
tColumn.SetOrdinal(0);
}
foreach (DataRow dr in tempTable.Rows)
dr["Query"] = query;
return tempTable;
})))
select tempTable;
I have a problem - previously I was after an algorithm to solve a part of it (see Combine LINQ queries) anyway, and I have come to a huge issue.
At around 540k directories, it's crashing out with out of memory. :(
I am trying to process and store the company SAN file information, and we need to do this, because we have people who keep data for 25 years and they don't need to, but it's hard to track. It's a total of up to 70 TB of files. So, as you can imagine, it's a lot of files.
From what I've read however, memory mapped files can't be dynamic? Is this true? I can't know prior how many files + directories there are for sure.
If not, (please say not), can someone do me a short example on how to make a dynamic mapped file (code provided in the Combine LINQ queries question). In short, I create a directory structure in memory holding directory → directories + files(name, size, access date, modified date, and creation date).
Any clues would be appreciated as this would get around my problem if it's possible.
When you can't fit the whole thing into memory you can stream your data with an IEnumerable Below's an example of that. I've been playing around with MemoryMapped files as well since I need the last drop of perf, but so far I've stuck with BinaryReader/Writer.
For the DB advocates: When you really need the last drop of perf, I do my own binary files as well. Going out of process to a DB really adds overhead. Also the whole security/ logging, ACID etc does add up.
Here's an example that streams your f_results class.
EDIT
Updated example to show how to write/read a tree of directory info. I keep 1 file that holds all the directories. This tree is loaded into memory in one go, and then points to the files where all the f_results are.
You still have to create a seperate file per directory that holds the f_results for all the files. How to do that depends on your code, but you should be able to figure that out.
Good luck!
public class f_results {
public String name { get; set; }
public DateTime cdate { get; set; }
public DateTime mdate { get; set; }
public DateTime adate { get; set; }
public Int64 size { get; set; }
// write one to a file
public void WriteTo(BinaryWriter wrtr) {
wrtr.Write(name);
wrtr.Write(cdate.Ticks);
wrtr.Write(mdate.Ticks);
wrtr.Write(adate.Ticks);
wrtr.Write(size);
}
// read one from a file
public f_results(BinaryReader rdr) {
name = rdr.ReadString();
cdate = new DateTime(rdr.ReadInt64());
mdate = new DateTime(rdr.ReadInt64());
adate = new DateTime(rdr.ReadInt64());
size = rdr.ReadInt64();
}
// stream a whole file as an IEnumerable (so very little memory needed)
public static IEnumerable<f_results> FromFile(string dataFilePath) {
var file = new FileStream(dataFilePath, FileMode.Open);
var rdr = new BinaryReader(file);
var eos = rdr.BaseStream.Length;
while (rdr.BaseStream.Position < eos) yield return new f_results(rdr);
rdr.Close();
file.Close();
}
}
class Program {
static void Main(string[] args) {
var d1 = new DirTree(#"C:\",
new DirTree(#"C:\Dir1",
new DirTree(#"C:\Dir1\Dir2"),
new DirTree(#"C:\Dir1\Dir3")
),
new DirTree(#"C:\Dir4",
new DirTree(#"C:\Dir4\Dir5"),
new DirTree(#"C:\Dir4\Dir6")
));
var path = #"D:\Dirs.dir";
// write the directory tree to a file
var file = new FileStream(path, FileMode.CreateNew | FileMode.Truncate);
var w = new BinaryWriter(file);
d1.WriteTo(w);
w.Close();
file.Close();
// read it from the file
var file2 = new FileStream(path, FileMode.Open);
var rdr = new BinaryReader(file2);
var d2 = new DirTree(rdr);
// now inspect d2 in debugger to see that it was read back into memory
// find files bigger than (roughly) 1GB
var BigFiles = from f in f_results.FromFile(#"C:\SomeFile.dat")
where f.size > 1e9
select f;
}
}
class DirTree {
public string Path { get; private set; }
private string FilesFile { get { return Path.Replace(':', '_').Replace('\\', '_') + ".dat"; } }
public IEnumerable<f_results> Files() {
return f_results.FromFile(this.FilesFile);
}
// you'll want to encapsulate this in real code but I didn't for brevity
public DirTree[] _SubDirectories;
public DirTree(BinaryReader rdr) {
Path = rdr.ReadString();
int count = rdr.ReadInt32();
_SubDirectories = new DirTree[count];
for (int i = 0; i < count; i++) _SubDirectories[i] = new DirTree(rdr);
}
public DirTree( string Path, params DirTree[] subDirs){
this.Path = Path;
_SubDirectories = subDirs;
}
public void WriteTo(BinaryWriter w) {
w.Write(Path);
w.Write(_SubDirectories.Length);
// depth first is the easiest way to do this
foreach (var f in _SubDirectories) f.WriteTo(w);
}
}
}