Problems with PDB file through CodeDom - c#

I have been creating a plugin model where code will be written on the server and a dll output will be created and downloaded to the client side.
This works well, but as we also want to have debugging support for the code that is generated in server, I tried the following code to download on client.
public OutputResponse GetObject(string fileName, string sourceCode)
{
string[] assemblies = this.GetAssemblies();
string codeLanguage = "cs";
var dllFilePath = Path.Combine(Path.GetTempPath(), fileName + ".dll");
var pdbFilePath = Path.Combine(Path.GetTempPath(), fileName + ".pdb");
//Delete Existing file
if (File.Exists(dllFilePath))
File.Delete(dllFilePath);
if (File.Exists(pdbFilePath))
File.Delete(pdbFilePath);
Dictionary<string, string> compilerInfo = new Dictionary<string, string>();
compilerInfo.Add("CompilerVersion", "v4.0");
CodeDomProvider provider = CodeDomProvider.CreateProvider(codeLanguage, compilerInfo);
var compilerParameter = new CompilerParameters();
compilerParameter.WarningLevel = 3;
compilerParameter.GenerateExecutable = false;
compilerParameter.GenerateInMemory = false;
compilerParameter.IncludeDebugInformation = true;
compilerParameter.CompilerOptions = "/debug:pdbonly";
compilerParameter.TempFiles = new TempFileCollection(Environment.GetEnvironmentVariable("TEMP"), true);
compilerParameter.TempFiles.KeepFiles = true;
compilerParameter.OutputAssembly = dllFilePath;
foreach (var assembly in assemblies)
compilerParameter.ReferencedAssemblies.Add(assembly);
var results = provider.CompileAssemblyFromSource(compilerParameter, sourceCode);
string sourceFile = string.Empty;
string pdbFile = Path.Combine(Path.GetDirectoryName(results.PathToAssembly), string.Concat(Path.GetFileNameWithoutExtension(results.PathToAssembly), ".pdb"));
foreach(string file in results.TempFiles)
{
var extension = Path.GetExtension(file);
switch(extension)
{
case ".cs":
sourceFile = file;
break;
}
}
var response = new OutputResponse(results.PathToAssembly, pdbFile, sourceFile);
return response;
}
The dll is generated correctly, and I am renaming the pdb and source file to the dll name, and downloaded to the client folder.
Now when a method is called using in the application which plugs in the dll, the Visual Studio cannot attach the debugger. It says "A matching symbol file was not found in this folder".
Can anyone help me on how to solve this problem.

Related

How to access embedded resources inside a merged DLL?

I have managed to merge DLL files as embedded resource to one DLL, using AssemblyResolve event, but when I try to use that Dll at another console application, I do not have access inside the embedded resources this merged Dll has. For example, this merged Dll contains another Dll of a class library I need to use, and I want to access public methods/interfaces etc.. that this class library has, but it seems I cannot access them from this merged Dll. How can this issue be resolved ?
static Dictionary<string, Assembly> dic = null;
static void Main(string[] args)
{
var filesList = Directory.EnumerateFiles( //path to Resources folder inside console application, which holds all DLLs that are set as embedded resources );
foreach (var file in filesList)
{
var fname = Path.GetFileNameWithoutExtension(file);
var embRes = "ConsoleAppTest.Resources." + fname + ".dll";
Console.WriteLine(embRes);
Load(embRes, fname + ".dll");
}
AppDomain.CurrentDomain.AssemblyResolve += new ResolveEventHandler(CurrentDomain_AssemblyResolve);
Console.ReadKey();
}
static Assembly CurrentDomain_AssemblyResolve(object sender, ResolveEventArgs args)
{
return GetAssembly(args.Name);
}
static void Load(string embeddedResource, string fileName)
{
if (dic == null)
dic = new Dictionary<string, Assembly>();
byte[] assemblyData = null;
Assembly asm = null;
Assembly curAsm = Assembly.GetExecutingAssembly();
using (Stream stream = curAsm.GetManifestResourceStream(embeddedResource))
{
// Either the file is not existed or it is not mark as embedded resource
if (stream == null)
throw new Exception(embeddedResource + " is not found in Embedded Resources.");
// Get byte[] from the file from embedded resource
assemblyData = new byte[(int)stream.Length];
stream.Read(assemblyData, 0, (int)stream.Length);
try
{
asm = Assembly.Load(assemblyData);
// Add the assembly/dll into dictionary
dic.Add(asm.FullName, asm);
Console.WriteLine(asm.FullName);
return;
}
catch
{
// Purposely do nothing
// Unmanaged dll or assembly cannot be loaded directly from byte[]
// Let the process fall through for next part
}
}
bool fileOk = false;
string tempFile = "";
//load unmanaged assemblies
using (SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider())
{
string fileHash = BitConverter.ToString(sha1.ComputeHash(assemblyData)).Replace("-", string.Empty); ;
tempFile = Path.GetTempPath() + fileName;
if (File.Exists(tempFile))
{
byte[] bb = File.ReadAllBytes(tempFile);
string fileHash2 = BitConverter.ToString(sha1.ComputeHash(bb)).Replace("-", string.Empty);
if (fileHash == fileHash2)
{
fileOk = true;
}
else
{
fileOk = false;
}
}
else
{
fileOk = false;
}
}
if (!fileOk)
{
System.IO.File.WriteAllBytes(tempFile, assemblyData);
}
asm = Assembly.LoadFile(tempFile);
dic.Add(asm.FullName, asm);
Console.WriteLine(asm.FullName);
}
static Assembly GetAssembly(string assemblyFullName)
{
if (dic == null || dic.Count == 0)
return null;
if (dic.ContainsKey(assemblyFullName))
return dic[assemblyFullName];
return null;
}
This console application is in .NET Framework 4.5.1 and when I build the solution, it produces an .exe file. Then I change the output type of the project to class library, to get a Dll instead of exe and build it again. After I get this merged Dll, ConsoleAppTest.dll, I add a reference to it at another test project, but I cannot access the dll that ConsoleAppTest has inside. For example, if it has ClientLibrary.dll inside, I want to access public methods/interfaces of this ClientLibrary.dll
using ConsoleAppTest;
//code to access public methods/interfaces of a dll that is inside ConsoleAppTest.dll included in using statement above

Rally C#: How to upload a collection of attachments and associate with a user story?

I have refereed to the other examples on this website, but found a major difference in my method. (Please be patient)
I am trying to iterate over a directory of files and upload each file as an attachment and associate to a user story.
I am only able to attach 1 file for a user story as of now.
I see that every attachment has to be encoded to a base 64 string and it must have a the size in bytes.
Here is my code so far:
public void createUsWithAttachmentList(string workspace, string project, string userStoryName, string userStoryDescription)
{
//authentication
this.EnsureRallyIsAuthenticated();
//DynamicJSONObject for AttachmentContent
DynamicJsonObject myAttachmentContent = new DynamicJsonObject();
//Length calculated from Base64String converted back
int imageNumberBytes = 0;
//Userstory setup
DynamicJsonObject toCreate = new DynamicJsonObject();
toCreate["Workspace"] = workspace;
toCreate["Project"] = project;
toCreate["Name"] = userStoryName;
toCreate["Description"] = userStoryDescription;
//Trying to get a list of all the file paths within a given directory, this directory would contain .png files that need to be associated to a user story.
string[] attachmentPath = Directory.GetFiles("C:\\Users\\user\\Desktop\\RallyAttachments");
This foreach loop is confusing. I am trying to iterate over each file in the directory in order to convert it into a base64 string, and at the same time acquire the number of bytes for each file as an int.
foreach (string fileName in attachmentPath)
{
Image myImage = Image.FromFile(fileName);
string imageBase64String = imageToBase64(myImage, System.Drawing.Imaging.ImageFormat.Png);
imageNumberBytes = Convert.FromBase64String(imageBase64String).Length;
//I am stuck here to be exact because there are multiple imageBase64Strings due to the collection of files located inside the directory. AND the below line is wrong because I have a list of imageBase64Strings that were generated from iterating through the string[] attachmentPath.
myAttachmentContent[RallyField.content] = imageBase64String;
}
try
{
//create user story
CreateResult createUserStory = _api.Create(RallyField.attachmentContent, myAttachmentContent);
//create attachment
CreateResult myAttachmentContentCreateResult = _api.Create(RallyField.attachmentContent, myAttachmentContent);
String myAttachmentContentRef = myAttachmentContentCreateResult.Reference;
//DynamicJSONObject for Attachment Container
//I assume I would need a separate container for each file in my directory containing the attachments.
DynamicJsonObject myAttachment = new DynamicJsonObject();
myAttachment["Artifact"] = createUserStory.Reference;
myAttachment["Content"] = myAttachmentContentRef;
myAttachment["Name"] = "AttachmentFromREST.png";
myAttachment["Description"] = "Email Attachment";
myAttachment["ContentType"] = "image/png";
myAttachment["Size"] = imageNumberBytes;
//create & associate the attachment
CreateResult myAttachmentCreateResult = _api.Create(RallyField.attachment, myAttachment);
Console.WriteLine("Created User Story: " + createUserStory.Reference);
}
catch (WebException e)
{
Console.WriteLine(e.Message);
}
}
Note: I am planning on extending this method to support multiple file types, and I thing I would need to get the file type of each file in the directory and proceed accordingly.
Any ideas on how to go about writing this?
You've got all the parts implemented- we just need to move it around a little bit. Create the story once at the beginning, and then each time through the loop make a new AttachmentContent and a new Attachment for each file.
public void createUsWithAttachmentList(string workspace, string project, string userStoryName, string userStoryDescription)
{
//authentication
this.EnsureRallyIsAuthenticated();
//Userstory setup
DynamicJsonObject toCreate = new DynamicJsonObject();
toCreate["Workspace"] = workspace;
toCreate["Project"] = project;
toCreate["Name"] = userStoryName;
toCreate["Description"] = userStoryDescription;
//Create the story first
try
{
//create user story
CreateResult createUserStory = _api.Create(RallyField.userStory, toCreate);
//now loop over each file
string[] attachmentPath = Directory.GetFiles("C:\\Users\\user\\Desktop\\RallyAttachments");
foreach (string fileName in attachmentPath)
{
//DynamicJSONObject for AttachmentContent
DynamicJsonObject myAttachmentContent = new DynamicJsonObject();
Image myImage = Image.FromFile(fileName);
string imageBase64String = imageToBase64(myImage, System.Drawing.Imaging.ImageFormat.Png);
int imageNumberBytes = Convert.FromBase64String(imageBase64String).Length;
myAttachmentContent[RallyField.content] = imageBase64String;
//create the AttachmentConent
CreateResult myAttachmentContentCreateResult = _api.Create(RallyField.attachmentContent, myAttachmentContent);
String myAttachmentContentRef = myAttachmentContentCreateResult.Reference;
//create an Attachment to associate to story
DynamicJsonObject myAttachment = new DynamicJsonObject();
myAttachment["Artifact"] = createUserStory.Reference;
myAttachment["Content"] = myAttachmentContentRef;
myAttachment["Name"] = "AttachmentFromREST.png";
myAttachment["Description"] = "Email Attachment";
myAttachment["ContentType"] = "image/png";
myAttachment["Size"] = imageNumberBytes;
//create & associate the attachment
CreateResult myAttachmentCreateResult = _api.Create(RallyField.attachment, myAttachment);
}
}
catch (WebException e)
{
Console.WriteLine(e.Message);
}
}

SSIS 2015 Script task convert text file to UTF8 in C# or VB

I want to convert my resulting txt file into a UTF8 formatted file so I can load it into my Azure SQL DW via Polybase. It is required the source file be in UTF8.
MSDN has an "IO Streaming example" HERE works perfectly for a single job. I am trying to architect an SSIS solution for around 30 tables though. I believe using this method would cause a race condition where the PS script will be locked by 1 SSIS package when another SSIS package needs it.
I am a sql dev, not a .NET dev so please forgive me. How would one convert the above to an SSIS C# Script task assuming I know how to pass parameters into the Script task?
PowerShell Code from MSDN
#Static variables
$ascii = [System.Text.Encoding]::ASCII
$utf16le = [System.Text.Encoding]::Unicode
$utf8 = [System.Text.Encoding]::UTF8
$ansi = [System.Text.Encoding]::Default
$append = $False
#Set source file path and file name
$src = [System.IO.Path]::Combine("<MySrcFolder>","<MyUtf8stage>.txt")
#Set source file encoding (using list above)
$src_enc = $ascii
#Set target file path and file name
$tgt = [System.IO.Path]::Combine("<MyDestFolder>","<MyFinalstage>.txt")
#Set target file encoding (using list above)
$tgt_enc = $utf8
$read = New-Object System.IO.StreamReader($src,$src_enc)
$write = New-Object System.IO.StreamWriter($tgt,$append,$tgt_enc)
while ($read.Peek() -ne -1)
{
$line = $read.ReadLine();
$write.WriteLine($line);
}
$read.Close()
$read.Dispose()
$write.Close()
$write.Dispose()
Update
I found a similar post which I was able to tweak to my needs, I swear I searched high and low before posting. Anyway here is what IS working for me. If you see anyway to improve it please share:
public void Main()
{
//$Package::SourceSQLObject = tablename
//$Package::StageFile_DestinationFolderPath = rootpath eg "C:\temp\"
string path = (string)Dts.Variables["$Package::StageFile_DestinationFolderPath"].Value;
string name = (string)Dts.Variables["$Package::SourceSQLObject"].Value;
string from = Path.Combine(path, name) + ".csv";
string to = Path.ChangeExtension(from, "txt");
Dts.Log("Starting " + to.ToUpper(), 0, null);
using (StreamReader reader = new StreamReader(from, Encoding.ASCII, false, 10))
using (StreamWriter writer = new StreamWriter(to, false, Encoding.UTF8, 10))
{
while (reader.Peek() >= 0)
{
writer.WriteLine(reader.ReadLine());
}
}
Dts.TaskResult = (int)ScriptResults.Success;
Your code indicates that your are trying to convert an ASCII file to UTF-8 however that article also states the following:
As UTF-8 uses the same character encoding as ASCII PolyBase will also
support loading data that is ASCII encoded.
So my advice to you is to try the file first with Polybase, check for any conversion issues before you spend any time trying to convert the files.
var mySrcFolder = ""; // something from user variables?
var myUtf8stage = ""; // something from user variables?
var myFinalstage = ""; // something from user variables?
// Static variables
var ascii = System.Text.Encoding.ASCII;
var utf16le = System.Text.Encoding.Unicode;
var utf8 = System.Text.Encoding.UTF8;
var ansi = System.Text.Encoding.Default;
var append = false;
// Set source file path and file name
var src = System.IO.Path.Combine(
mySrcFolder,
String.Format("{0}.txt", myUtf8stage));
// Set source file encoding (using list above)
var src_enc = ascii;
// Set target file path and file name
var tgt = System.IO.Path.Combine(
mySrcFolder,
String.Format("{0}.txt", myFinalstage));
// Set target file encoding (using list above)
var tgt_enc = utf8;
using (var read = new System.IO.StreamReader(src, src_enc))
using (var write = new System.IO.StreamWriter(tgt, append, tgt_enc))
{
while (read.Peek() != -1)
{
var line = read.ReadLine();
write.WriteLine(line);
}
}
public void Main()
{
//$Package::SourceSQLObject = tablename
//$Package::StageFile_DestinationFolderPath = rootpath eg "C:\temp\"
string path = (string)Dts.Variables["$Package::StageFile_DestinationFolderPath"].Value;
string name = (string)Dts.Variables["$Package::SourceSQLObject"].Value;
string from = Path.Combine(path, name) + ".csv";
string to = Path.ChangeExtension(from, "txt");
Dts.Log("Starting " + to.ToUpper(), 0, null);
using (StreamReader reader = new StreamReader(from, Encoding.ASCII, false, 10))
using (StreamWriter writer = new StreamWriter(to, false, Encoding.UTF8, 10))
{
while (reader.Peek() >= 0)
{
writer.WriteLine(reader.ReadLine());
}
}
Dts.TaskResult = (int)ScriptResults.Success;

How to copy file from one library to another library using CSOM?

I need to copy a particular file from one library to another library.
At first, need to check if file is existing in that library.
If Existing, then need to overwrite file content and new sharepoint version should be updated for that document.
I need to do this using c# CSOM and sharepoint version is 2013.
Thanks in advance :)
public static void CopyDocuments(string srcUrl, string destUrl, string srcLibrary, string destLibrary, Login _login)
{
// set up the src client
SP.ClientContext srcContext = new SP.ClientContext(srcUrl);
srcContext.AuthenticationMode = SP.ClientAuthenticationMode.FormsAuthentication;
srcContext.FormsAuthenticationLoginInfo = new SP.FormsAuthenticationLoginInfo(_login.UserName, _login.Password);
// set up the destination context (in your case there is no needs to create a new context, because it would be the same library!!!!)
SP.ClientContext destContext = new SP.ClientContext(destUrl);
destContext.AuthenticationMode = SP.ClientAuthenticationMode.FormsAuthentication;
destContext.FormsAuthenticationLoginInfo = new SP.FormsAuthenticationLoginInfo(_login.UserName, _login.Password);
// get the list and items
SP.Web srcWeb = srcContext.Web;
SP.List srcList = srcWeb.Lists.GetByTitle(srcLibrary);
SP.ListItemCollection col = srcList.GetItems(new SP.CamlQuery());
srcContext.Load(col);
srcContext.ExecuteQuery();
// get the new list
SP.Web destWeb = destContext.Web;
destContext.Load(destWeb);
destContext.ExecuteQuery();
foreach (var doc in col)
{
try
{
if (doc.FileSystemObjectType == SP.FileSystemObjectType.File)
{
// get the file
SP.File f = doc.File;
srcContext.Load(f);
srcContext.ExecuteQuery();
// build new location url
string nLocation = destWeb.ServerRelativeUrl.TrimEnd('/') + "/" + destLibrary.Replace(" ", "") + "/" + f.Name;
// read the file, copy the content to new file at new location
SP.FileInformation fileInfo = SP.File.OpenBinaryDirect(srcContext, f.ServerRelativeUrl);
SP.File.SaveBinaryDirect(destContext, nLocation, fileInfo.Stream, true);
}
if (doc.FileSystemObjectType == SP.FileSystemObjectType.Folder)
{
// load the folder
srcContext.Load(doc);
srcContext.ExecuteQuery();
// get the folder data, get the file collection in the folder
SP.Folder folder = srcWeb.GetFolderByServerRelativeUrl(doc.FieldValues["FileRef"].ToString());
SP.FileCollection fileCol = folder.Files;
// load everyting so we can access it
srcContext.Load(folder);
srcContext.Load(fileCol);
srcContext.ExecuteQuery();
foreach (SP.File f in fileCol)
{
// load the file
srcContext.Load(f);
srcContext.ExecuteQuery();
string[] parts = null;
string id = null;
if (srcLibrary == "My Files")
{
// these are doc sets
parts = f.ServerRelativeUrl.Split('/');
id = parts[parts.Length - 2];
}
else
{
id = folder.Name;
}
// build new location url
string nLocation = destWeb.ServerRelativeUrl.TrimEnd('/') + "/" + destLibrary.Replace(" ", "") + "/" + id + "/" + f.Name;
// read the file, copy the content to new file at new location
SP.FileInformation fileInfo = SP.File.OpenBinaryDirect(srcContext, f.ServerRelativeUrl);
SP.File.SaveBinaryDirect(destContext, nLocation, fileInfo.Stream, true);
}
}
}
catch (Exception ex)
{
Log("File Error = " + ex.ToString());
}
}
}
Source: https://sharepoint.stackexchange.com/questions/114033/how-do-i-move-files-from-one-document-library-to-another-using-jsom
I strongly advise against using the approach suggested by Nikerym. You don't want to download the bytes only to upload them unmodified. It's slow and error-prone. Instead, use the built-in method provided by the CSOM API.
https://learn.microsoft.com/en-us/previous-versions/office/sharepoint-server/mt162553(v=office.15)?redirectedfrom=MSDN
var srcPath = "https://YOUR.sharepoint.com/sites/xxx/SitePages/Page.aspx";
var destPath = $"https://YOUR.sharepoint.com/sites/xxx/SitePages/CopiedPage.aspx";
MoveCopyUtil.CopyFileByPath(ctx, ResourcePath.FromDecodedUrl(srcPath), ResourcePath.FromDecodedUrl(destPath), false, new MoveCopyOptions());
ctx.ExecuteQuery();
You can configure the override behavior by adjusting the 4th and 5th arguments of the function signature.
[...]
bool overwrite,
MoveCopyOptions options
https://learn.microsoft.com/en-us/previous-versions/office/sharepoint-server/mt844930(v=office.15)

How to automatically load files into SharePoint

We are having someone manually load weekly generated excel spreadsheets into SharePoint. I'm sure there is a way to automate this. I don't know a lot about SharePoint, and maybe it's really as simple as just knowing the folder SharePoint is moving the files to and copying them directly there. Or maybe it requires some programming to get it automatically load new files put in a given directory into SharePoint.
Either way, I would just like someone to point me in the right direction here.
You will need to upload the file using the copy web service in SharePoint. I am not sure what version of SharePoint you are running but I am assuming 2007. Here is a sample project.
public void UploadFile(string destinationFolderPath,
byte[] fileBytes,
string fileName,
bool overwrite,
string sourceFileUrl,
string lastVersionUrl)
{
List<Sharepoint.FieldInformation> fields = new List<Sharepoint.FieldInformation>();
Sharepoint.FieldInformation fieldInfo;
fieldInfo = new Sharepoint.FieldInformation();
fieldInfo.Id = Microsoft.SharePoint.SPBuiltInFieldId.Title;
fieldInfo.Value = "New title";
fieldInfo.DisplayName = "Title";
fieldInfo.Type = YetAnotherMigrationTool.Library.SP2007.Sharepoint.FieldType.Text;
fieldInfo.InternalName = "Title";
fields.Add(fieldInfo);
string[] url;
if (string.IsNullOrEmpty(destinationFolderPath))
url = new string[] { string.Format("{0}/{1}/{2}", _siteUrl, _name, fileName) };
else
url = new string[] { string.Format("{0}/{1}/{2}{3}", _siteUrl, _name, destinationFolderPath, fileName) };
Sharepoint.CopyResult[] result;
Sharepoint.Copy service = new Sharepoint.Copy();
service.Url = _siteUrl + "/_vti_bin/Copy.asmx";
service.Credentials = new NetworkCredential(Settings.Instance.User, Settings.Instance.Password);
service.Timeout = 600000;
uint documentId = service.CopyIntoItems(sourceFileUrl, url, fields.ToArray(), fileBytes, out result);
}
public void SetContentType(List<string> ids, string contentType)
{
ListsService.Lists service = new YetAnotherMigrationTool.Library.SP2007.ListsService.Lists();
service.Url = _siteUrl + "/_vti_bin/Lists.asmx";
service.Credentials = new NetworkCredential(Settings.Instance.User, Settings.Instance.Password);
string strBatch = "";
for (int i = 1; i <= ids.Count; i++)
{
strBatch += #"<Method ID='"+i.ToString()+#"' Cmd='Update'><Field Name='ID'>" + ids[i-1] + "</Field><Field Name='ContentType'>"+contentType+"</Field></Method>";
}
XmlDocument xmlDoc = new XmlDocument();
XmlElement elBatch = xmlDoc.CreateElement("Batch");
elBatch.SetAttribute("OnError", "Continue");
elBatch.SetAttribute("ListVersion", "10");
elBatch.SetAttribute("ViewName", "");
elBatch.InnerXml = strBatch;
result = service.UpdateListItems(_name, elBatch);
}
You could write a PowerShell script that copies the document into the document library via WebDav:
Assuming you have your document library at http://server/SomeWeb/DocumentLibrary/Folder:
copy-item somesheet.xlsx \\server\SomeWeb\DocumentLibrary\Folder

Categories