Due to some issues by running .xlsm files over the network it was decided not to use VBA anymore and to develop standalone apps that will edit regular excel files.
Since I have a some C# and Visual Studio knowledge I decided to use those tools. Since Iterop.Excel is really slow I decided to use SpreadsheetLight.
Everything went smooth during while reading and analyzing data but after I added some records and save the file the file become corrupted: when trying to open with excel I got the following message:
"We found A problem with some content. Do you want us to recover as much as we can? If you trust the source of this workbook, click yes". After click yes got the message that it cannot be recovered because is corrupt.
Even if I don't add any records and just save the file got corrupted.
The thing is that the file opens without any issues in OpenOffice, all the records are there.
Any help will be appreciated!
Below the class that implements the r/w of the excel file:
class SPREADSHEET_TOOLS
{
public string file_name;
public SLDocument doc;
public List<string> sheets;
MemoryStream ms;
public SPREADSHEET_TOOLS()
{
}
public bool init(string _file_name)
{
this.file_name = _file_name;
ms = new MemoryStream();
try
{
FileStream stream = File.Open(this.file_name, FileMode.Open);
this.doc = new SLDocument(stream);
this.sheets = doc.GetSheetNames();
stream.Close();
}
catch (IOException)
{
MessageBox.Show("Fisierul este deschis de un alt utilizator. Nu poate fi accesat!!!!");
return false;
}
return true;
}
public List<string>getUniqeRowValues(string sheet,int row)
{
List<string> values = new List<string>();
if (this.sheets.Contains(sheet))
{
this.doc.SelectWorksheet(sheet);
while (this.doc.GetCellValueAsString(row, 1) != "")
{
if (values.Count == 0)
{
values.Add(this.doc.GetCellValueAsString(row, 1));
}
else
{
if (!values.Contains(this.doc.GetCellValueAsString(row, 1)))
{
values.Add(this.doc.GetCellValueAsString(row, 1));
}
}
row++;
}
}
return values;
}
public List<string>getChildValues(string sheet, string parent, int row, int column_parent, int column_child)
{
List<string> values = new List<string>();
if (this.sheets.Contains(sheet))
{
this.doc.SelectWorksheet(sheet);
while (this.doc.GetCellValueAsString(row, column_parent) != "")
{
if (this.doc.GetCellValueAsString(row, column_parent) == parent)
{
values.Add(this.doc.GetCellValueAsString(row, column_child));
}
row++;
}
}
return values;
}
public int getLastRow(string sheet)
{
int row=0;
if (this.sheets.Contains(sheet))
{
this.doc.SelectWorksheet(sheet);
row = 1;
while (this.doc.GetCellValueAsString(row, 1) != "")
{
row++;
}
}
return row;
}
public bool writeRow(string[] data, string sheet,int row)
{
if (this.sheets.Contains(sheet))
{
this.doc.SelectWorksheet(sheet);
for (int i=0; i < data.Length; i++)
{
InlineString str = new InlineString();
//bool a = this.doc.SetCellValue(row,i+1,data[i]);
}
//this.doc.SaveAs(this.ms);
foreach (string s in this.sheets)
{
this.doc.SelectWorksheet(s);
}
this.doc.DocumentProperties.Creator = "CP";
this.doc.SaveAs("E:\\C-SHARP\\PONTAJ\\PONTAJ\\BUBU.XLSX");
MessageBox.Show("Saved!");
return true;
}
return false;
}
}
I also faced the same problem, Excel file gets corrupted after downloading.
So I have done some fixes and update SpreadSheetLight code to .NET 6.
You can download source code from here: https://github.com/bhavinvachhani403/SpreadSheetLight_Net6.0
I hope this will helps you to solve your problem.
I had the same error and I solved it by changing the version of DocumentFormat.OpenXml to version 2.5
Related
I am trying to write some string to a file, and here is the code:
private static void WriteSelfDefinedFile(string msg)
{
ArrayList logTypes = SelfDefinedLogRule(msg);
// New some StreamWriter here
StreamWriter[] sws = new StreamWriter[selfDefinedLogFileName.Length];
for(int i = 0;i<selfDefinedLogFileName.Length;i++)
{
sws[i] = new StreamWriter(selfDefinedLogDir + selfDefinedLogFileName[i], true);
Debug.Log("111");
}
// It is writting here, not important for this question,I think.
foreach(SelfDefinedLogType temp in logTypes)
{
int index = (int)temp;
foreach (SelfDefinedLogType n in Enum.GetValues(typeof(SelfDefinedLogType)))
{
if(temp == n && sws[index]!=null)
{
sws[index].WriteLine(System.DateTime.Now.ToString() + ":\t"+ msg);
break;
}
else if(sws[index] == null)
{
LogError("File " + selfDefinedLogFileName[index] + " open fail");
}
}
}
// Close the StreamWrite[] here
for(int i=0;i<selfDefinedLogFileName.Length;i++)
{
sws[i].Close();
Debug.Log("222");
}
}
When the client is sending msg to server or server replying some msg to client, this funtion will be called to write some message to some files.
And unity throws the IOException:Sharing violation on path D:\SelfDefinedLogs\SentAndReceiveMsg.txt, sometimes. I mean, it doesn't happened everytime I write.
I have closed all the opened streamWriter at the end of the funtion, but the problem still come up.I am really confused.I will be grateful if anybody gives some solutions to this problem.
I have a very large file, almost 2GB in size. I am trying to write a process to read the file in and write it out without the first row. I pretty much have been only able to read and write one line at a time which takes forever. I can open it, remove the first row and save it faster in TextPad, though that is still very slow.
I use this code to get the number of records in the file:
private long getNumRows(string strFileName)
{
long lngNumRows = 0;
string strMsg;
try
{
lngNumRows = 0;
using (var strReader = File.OpenText(#strFileName))
{
while (strReader.ReadLine() != null)
{
lngNumRows++;
}
strReader.Close();
strReader.Dispose();
}
}
catch (Exception excExcept)
{
strMsg = "The File could not be read: ";
strMsg += excExcept.Message;
System.Windows.MessageBox.Show(strMsg);
//Console.WriteLine("Thee was an error reading the file: ");
//Console.WriteLine(excExcept.Message);
//Console.ReadLine();
}
return lngNumRows;
}
This only takes seconds to run. When I add the following code it takes forever to run. Am I doing something wrong? Why does the write add so much time? Any ideas on how I can make this faster?
private void ProcessTextFiles(string strFileName)
{
string strDataLine;
string strFullOutputFileName;
string strSubFileName;
int intPos;
long lngTotalRows = 0;
long lngCurrNumRows = 0;
long lngModNumber = 0;
double dblProgress = 0;
double dblProgressPct = 0;
string strPrgFileName = "";
string strOutName = "";
string strMsg;
long lngFileNumRows;
try
{
using (StreamReader srStreamRdr = new StreamReader(strFileName))
{
while ((strDataLine = srStreamRdr.ReadLine()) != null)
{
lngCurrNumRows++;
if (lngCurrNumRows > 1)
{
WriteDataRow(strDataLine, strFullOutputFileName);
}
}
srStreamRdr.Dispose();
}
}
catch (Exception excExcept)
{
strMsg = "The File could not be read: ";
strMsg += excExcept.Message;
System.Windows.MessageBox.Show(strMsg);
//Console.WriteLine("The File could not be read:");
//Console.WriteLine(excExcept.Message);
}
}
public void WriteDataRow(string strDataRow, string strFullFileName)
{
//using (StreamWriter file = new StreamWriter(#strFullFileName, true, Encoding.GetEncoding("iso-8859-1")))
using (StreamWriter file = new StreamWriter(#strFullFileName, true, System.Text.Encoding.UTF8))
{
file.WriteLine(strDataRow);
file.Close();
}
}
Not sure how much this will improve the performance, but surely, opening and closing the output file for every line that you want to write is not a good idea.
Instead open both files just one time and then write the line directly
using (StreamWriter file = new StreamWriter(#strFullFileName, true, System.Text.Encoding.UTF8))
using (StreamReader srStreamRdr = new StreamReader(strFileName))
{
while ((strDataLine = srStreamRdr.ReadLine()) != null)
{
lngCurrNumRows++;
if (lngCurrNumRows > 1)
file.WriteLine(strDataRow);
}
}
You could also remove the check on lngCurrNumRow simply making an empty read before entering the while loop
strDataLine = srStreamRdr.ReadLine();
if(strDataLine != null)
{
while ((strDataLine = srStreamRdr.ReadLine()) != null)
{
file.WriteLine(strDataRow);
}
}
Depending on the memory of your machine. You could try the following (my big file was "D:\savegrp.log" I had a 2gb file knocking about) This used about 6gb memory when I tried it
int counter = File.ReadAllLines(#"D:\savegrp.log").Length;
Console.WriteLine(counter);
It does depends on the memory available..
File.WriteAllLines(#"D:\savegrp2.log",File.ReadAllLines(#"D:\savegrp.log").Skip(1));
Console.WriteLine("file saved");
After importing plenty of XML files into application i tried to do modifications on it by using XML document class, for this i created few methods to do modifications.
The thing is the starting method it's working fine and when comes to the second one it's displaying System.IO exception like "File is already using another process".
So any one help me out how can i solve this issue.
Sample code what i'm doing:
Method1(fileList);
Method2(fileList);
Method3(fileList);
private void Method1(IList<RenamedImportedFileInfo> fileList)
{
try
{
string isDefaultAttribute = Resource.Resources.ImportIsDefaultAttribute;
string editorsPath = editorsFolderName + Path.DirectorySeparatorChar + meterType;
string profilesPath = profileFolderName + Path.DirectorySeparatorChar + meterType;
string strUriAttribute = Resource.Resources.ImportUriAttribute;
foreach (RenamedImportedFileInfo renameInfo in fileList)
{
if (renameInfo.NewFilePath.ToString().Contains(editorsPath) && (renameInfo.IsProfileRenamed != true))
{
var xmldoc = new XmlDocument();
xmldoc.Load(renameInfo.NewFilePath);
if (xmldoc.DocumentElement.HasAttribute(isDefaultAttribute))
{
xmldoc.DocumentElement.Attributes[isDefaultAttribute].Value = Resource.Resources.ImportFalse;
}
XmlNodeList profileNodes = xmldoc.DocumentElement.GetElementsByTagName(Resource.Resources.ImportMeasurementProfileElement);
if (profileNodes.Count == 0)
{
profileNodes = xmldoc.DocumentElement.GetElementsByTagName(Resource.Resources.ImportBsMeasurementProfileElement);
}
if (profileNodes.Count > 0)
{
foreach (RenamedImportedFileInfo profileName in oRenamedImportedFileList)
{
if (profileName.NewFilePath.ToString().Contains(profilesPath))
{
if (string.Compare(Path.GetFileName(profileName.OldFilePath), Convert.ToString(profileNodes[0].Attributes[strUriAttribute].Value, CultureInfo.InvariantCulture), StringComparison.OrdinalIgnoreCase) == 0)
{
profileNodes[0].Attributes[strUriAttribute].Value = Path.GetFileName(profileName.NewFilePath);
renameInfo.IsProfileRenamed = true;
break;
}
}
}
}
xmldoc.Save(renameInfo.NewFilePath);
xmldoc = null;
profileNodes = null;
}
}
oRenamedImportedFileList = null;
}
catch (NullReferenceException nullException) { LastErrorMessage = nullException.Message; }
}
Thanks,
Raj
You are probably opening the same file twice in your application. Before you can open it again, you have to close it (or leave it open and work on the same document without opening it again).
For help on how to implement this, please show us more code so we can give you advice.
Firstly, i'd just like to mention that I've only started learning C# a few days ago so my knowledge of it is limited.
I'm trying to create a program that will parse text files for certain phrases input by the user and then output them into a new text document.
At the moment, i have it the program searching the original input file and gathering the selected text input by the user, coping those lines out, creating new text files and then merging them together and also deleting them afterwards.
I'm guessing that this is not the most efficient way of creating this but i just created it and had it work in a logical manor for me to understand as a novice.
The code is as follows;
private void TextInput1()
{
using (StreamReader fileOpen = new StreamReader(txtInput.Text))
{
using (StreamWriter fileWrite = new StreamWriter(#"*DIRECTORY*\FIRSTFILE.txt"))
{
string file;
while ((file = fileOpen.ReadLine()) != null)
{
if (file.Contains(txtFind.Text))
{
fileWrite.Write(file + "\r\n");
}
}
}
}
}
private void TextInput2()
{
using (StreamReader fileOpen = new StreamReader(txtInput.Text))
{
using (StreamWriter fileWrite = new StreamWriter(#"*DIRECTORY*\SECONDFILE.txt"))
{
string file;
while ((file = fileOpen.ReadLine()) != null)
{
if (file.Contains(txtFind2.Text))
{
fileWrite.Write("\r\n" + file);
}
}
}
}
}
private static void Combination()
{
ArrayList fileArray = new ArrayList();
using (StreamWriter writer = File.CreateText(#"*DIRECTORY*\FINALOUTPUT.txt"))
{
using (StreamReader reader = File.OpenText(#"*DIRECTORY*\FIRSTFILE.txt"))
{
writer.Write(reader.ReadToEnd());
}
using (StreamReader reader = File.OpenText(#"*DIRECTORY*\SECONDFILE.txt"))
{
writer.Write(reader.ReadToEnd());
}
}
}
private static void Delete()
{
if (File.Exists(#"*DIRECTORY*\FIRSTFILE.txt"))
{
File.Delete(#"*DIRECTORY*\FIRSTFILE.txt");
}
if (File.Exists(#"*DIRECTORY*\SECONDFILE.txt"))
{
File.Delete(#"*DIRECTORY*\SECONDFILE.txt");
}
}
The output file that is being created is simply outputting the first text input followed by the second. I am wondering if it is possible to be able to merge them into 1 file, 1 line at a time as it is a consecutive file meaning have the information from Input 1 followed 2 is needed rather than all of 1 then all of 2.
Thanks, Neil.
To combine the two files content in an one merged file line by line you could substitute your Combination() code with this
string[] file1 = File.ReadAllLines("*DIRECTORY*\FIRSTFILE.txt");
string[] file2 = File.ReadAllLines("*DIRECTORY*\SECONDFILE.txt");
using (StreamWriter writer = File.CreateText(#"*DIRECTORY*\FINALOUTPUT.txt"))
{
int lineNum = 0;
while(lineNum < file1.Length || lineNum < file2.Length)
{
if(lineNum < file1.Length)
writer.WriteLine(file1[lineNum]);
if(lineNum < file2.Length)
writer.WriteLine(file2[lineNum]);
lineNum++;
}
}
This assumes that the two files don't contains the same number of lines.
try this method. You can receive three paths. File 1, File 2 and File output.
public void MergeFiles(string pathFile1, string pathFile2, string pathResult)
{
File.WriteAllText(pathResult, File.ReadAllText(pathFile1) + File.ReadAllText(pathFile2));
}
If the pathResult file exists, the WriteAllText method will overwrite it. Remember to include System.IO namespace.
Important: It is not recommended for large files! Use another options available on this thread.
If your input files are quite large and you run out of memory, you could also try wrapping the two readers like this:
using (StreamWriter writer = File.CreateText(#"*DIRECTORY*\FINALOUTPUT.txt"))
{
using (StreamReader reader1 = File.OpenText(#"*DIRECTORY*\FIRSTFILE.txt"))
{
using (StreamReader reader2 = File.OpenText(#"*DIRECTORY*\SECONDFILE.txt"))
{
string line1 = null;
string line2 = null;
while ((line1 = reader1.ReadLine()) != null)
{
writer.WriteLine(line1);
line2 = reader2.ReadLine();
if(line2 != null)
{
writer.WriteLine(line2);
}
}
}
}
}
Still, you have to have an idea how many lines you have in your input files, but I think it gives you the general idea to proceed.
Using a FileInfo extension you could merge one or more files by doing the following:
public static class FileInfoExtensions
{
public static void MergeFiles(this FileInfo fi, string strOutputPath , params string[] filesToMerge)
{
var fiLines = File.ReadAllLines(fi.FullName).ToList();
fiLines.AddRange(filesToMerge.SelectMany(file => File.ReadAllLines(file)));
File.WriteAllLines(strOutputPath, fiLines.ToArray());
}
}
Usage
FileInfo fi = new FileInfo("input");
fi.MergeFiles("output", "File2", "File3");
I appreciate this question is almost old enough to (up)vote (itself), but for an extensible approach:
const string FileMergeDivider = "\n\n";
public void MergeFiles(string outputPath, params string[] inputPaths)
{
if (!inputPaths.Any())
throw new ArgumentException(nameof(inputPaths) + " required");
if (inputPaths.Any(string.IsNullOrWhiteSpace) || !inputPaths.All(File.Exists))
throw new ArgumentNullException(nameof(inputPaths), "contains invalid path(s)");
File.WriteAllText(outputPath, string.Join(FileMergeDivider, inputPaths.Select(File.ReadAllText)));
}
I am working with a SQL DB that stores Excel files (along with other file types such as PDF) as binary data. I use the following code to extract these files onto the file system.
The Problem:
PDF files come out just fine. But for Excel, the files get created and when I try to open them, they crash or just give me garbage text.
I am using this code from the previous guy who wrote this app for retrieving files. This code uses OpenMcdf which I don't fully understand because I couldn't find useful online documentation for it.
//execution starts here
public override void SaveToDisk()
{
byte[] keys = { (byte)0xd0, (byte)0xcf };
//Searches through m_RawOleObject for combination of 'keys'
int offset = Utils.SearchBytes(m_RawOleObject, keys); //returns '60' in case of Excel and '66' in case of Pdf
//m_RawOleOjbect contains the data from the sqlDataReader (the binary data from the column.)
m_RawOleObject = strip(m_RawOleObject, offset);
MemoryStream ms = new MemoryStream(m_RawOleObject);
CompoundFile cf = new CompoundFile(ms);
GetStorageByName(cf.RootStorage, m_StorageName);
if (Storage != null)
{
if (Storage is CFStream)
{
m_RawOleObject = (Storage as CFStream).GetData();
}
m_filename = System.IO.Path.Combine(STOREPATH, Utils.CombineFilenameWithExtension(Filename, m_extension));
WriteToFile(m_filename, m_RawOleObject);
}
}
protected void WriteToFile(string fn, byte[] obj)
{
fn = GetNextAvailableFilename(fn, 0);
FileStream fs = new FileStream(fn, FileMode.Create);
BinaryWriter writer = new BinaryWriter(fs);
writer.Write(obj);
writer.Close();
fs.Close();
fs.Dispose();
}
protected void GetStorageByName(CFStorage cfs, string name)
{
VisitedEntryAction va = delegate(CFItem target)
{
if (target is CFStorage)
{
GetStorageByName((CFStorage)target, name);
}
else
{
if (target.Name == name)
Storage = target;
}
};
//Visit NON-recursively (first level only)
cfs.VisitEntries(va, false);
}
Any ideas what's happening here? why are the excel corrupted? I couldn't find a lot online despite hours of search!
any ideas, suggestions, or solutions will be appreciated.
Thanks
Change your SaveToDisk logic as follows:
public override void SaveToDisk()
{
byte[] keys = { (byte)0xd0, (byte)0xcf, (byte)0x11, (byte)0xe0, (byte)0xa1, (byte)0xb1, (byte)0x1a, (byte)0xe1 };
int offset = Utils.SearchBytes(m_RawOleObject, keys);
using (MemoryStream ms = new MemoryStream(strip(m_RawOleObject, offset)))
{
CompoundFile cf = new CompoundFile(ms, UpdateMode.ReadOnly, true, true);
m_filename = GetNextAvailableFilename(System.IO.Path.Combine(STOREPATH, Utils.CombineFilenameWithExtension(Filename, m_extension)), 0);
using (var fs = new FileStream(m_filename, FileMode.Create))
{
cf.Save(fs);
cf.Close();
}
}
//Workbook would be saved as hidden in previous step
Microsoft.Office.Interop.Excel.Application xlApp = null;
Microsoft.Office.Interop.Excel.Workbook xlWb = null;
try
{
xlApp = new Microsoft.Office.Interop.Excel.Application();
xlWb = xlApp.Workbooks.Open(m_filename);
xlWb.CheckCompatibility = false;
foreach (Window wn in xlApp.Windows)
{
wn.Visible = true;
}
xlWb.Save();
xlWb.Close();
}
catch (Exception e)
{
//TODO: Log error and continue
}
finally
{
if (xlWb != null)
Marshal.ReleaseComObject(xlWb);
if (xlApp != null)
Marshal.ReleaseComObject(xlApp);
xlApp = null;
}
}