I'm inspecting a dll file and I need to know which classes of other dll files are used in this dll file. With the first loop it's possible to iterate through all classes of the dll- but now I want to get a list of all classes, which are used in some way in this class (those classes have a specific naming convention "Some.Test.Class*").
Could the solution be to parse every instruction of every method of the class and than searching in the instruction for the name of the class?
Does anybody have a better idea?
foreach (TypeDefinition type in this.currentAssembly.MainModule.Types)
{
foreach (MethodDefinition method in type.Methods)
{
if (method.HasBody)
{
for (int cnt = 0; cnt < method.Body.Instructions.Count - 1; cnt++)
{
Instruction instruction = method.Body.Instructions[cnt];
/*
????????
*/
}
}
}
}
P.S.: I must not load some referenced dll files
I am using Mono.Cecil:
private static List<MethodDefinition> GetAllInnerMethods(MethodDefinition method)
{
var queueMethodDefinition = new Queue<MethodDefinition>();
var hsMethodDefinition = new HashSet<MethodDefinition>();
queueMethodDefinition.Enqueue(method);
while (queueMethodDefinition.Count > 0)
{
MethodDefinition methodCurr = queueMethodDefinition.Dequeue();
if (!hsMethodDefinition.Contains(methodCurr))
{
hsMethodDefinition.Add(methodCurr);
IEnumerable<MethodDefinition> innerMethodsByCaller = GetInnerMethodsByCaller(methodCurr);
foreach (var currInnerMethod in innerMethodsByCaller)
{
if (!hsMethodDefinition.Contains(currInnerMethod))
{
queueMethodDefinition.Enqueue(currInnerMethod);
}
}
}
}
return hsMethodDefinition.ToList();
}
private static IEnumerable<MethodDefinition> GetInnerMethodsByCaller(MethodDefinition caller)
{
return caller.Body.Instructions
.Where(x => (x.OpCode == OpCodes.Call || x.OpCode == OpCodes.Calli || x.OpCode == OpCodes.Callvirt) && x.Operand is MethodDefinition)
.Select(x => (MethodDefinition)x.Operand);
}
Related
I have a method called clearFiles which can be called recursively if there is sub directories. I have created a counter that will count the number of files that have been deleted. The counter holds the correct number when the function is either called once or called recursively but the toast at the end of the function only returns the correct number when it is not called recursively. Is there anyway I can display the toast and then reset the number of filesCleared? Because it's just returning a 0 when it's called recursively.
From playing around with it for a bit it seems like the toast is getting called after the filesCleared variable is set to 0 which is not what I want.
filesCleared variable:
int filesCleared = 0;
clearFiles:
public async Task ClearFiles()
{
var pathName = FileFilter.PathName;
FileInfo[] files = SortFiles(pathName);
try
{
if(FileFilter.Filter == "all")
{
foreach(var file in files)
{
if(file.Extension == FileFilter.Extension || FileFilter.Extension == "all")
{
File.Delete(file.ToString());
filesCleared++;
}
}
}
if(FileFilter.Filter == "date")
{
foreach (var file in files) //regular files
{
if(file.CreationTime < FileFilter.DeleteDate) //based on time
{
if(file.Extension == FileFilter.Extension || FileFilter.Extension == "all") //based on extension
{
File.Delete(file.ToString());
filesCleared++;
}
}
}
}
if(FileFilter.Filter == "number")
{
var i = 0;
for(var j = files.Length-1; j >= 0 ; j--)
{
if(files[j].Extension == FileFilter.Extension || FileFilter.Extension == "all")
{
if(i++ >= FileFilter.FilesToKeep)
{
File.Delete(files[j].ToString());
filesCleared++;
}
}
}
}
if (FileFilter.SubFolders == true) //subfiles (will be called recursively w/ each filter)
{
foreach(var subDir in new DirectoryInfo(pathName).GetDirectories())
{
//subDir.Delete(true);
FileFilter.PathName = subDir.ToString();
ClearFiles();
//await ClearFiles(subDir.ToString());
}
FileFilter.PathName = pathName; //resets the pathName so it will go back to what it was before the recursion
}
}
catch (IOException ioExp)
{
Console.WriteLine(ioExp.Message);
Toast = Toast.Bad();
logger.LogError(ioExp, "Error Deleting");
}
Toast = Toast.Good(filesCleared + " Files Deleted");
filesCleared = 0;
}
If you want to do something once but also want to call a method recursively, you have to split it in two. After trying to simplify your code I get a ClearFiles method like this:
public void ClearFiles()
{
var filesCleared = 0;
try
{
filesCleared = DeleteFilesRecursively(FileFilter.PathName, FileFilter);
}
catch (IOException ioExp)
{
Console.WriteLine(ioExp.Message);
Toast = Toast.Bad();
logger.LogError(ioExp, "Error Deleting");
}
Toast = Toast.Good(filesCleared + " Files Deleted");
}
Now Toast.Good is only called once after all subfolders have been traversed.
Note that filesCleared is a local variable, since I don't see any point in making it global. That way you also don't need to reset it.
The implementation of DeleteFilesRecursively could be something like this and could be simplified more if you wanted:
private const string All = "all";
private const string FilterByDate = "date";
private const string FilterByNumber = "number";
int DeleteFilesRecursively(string dirPath, SomeFileFilterType fileFilter)
{
FileInfo[] files = SortFiles(dirPath);
var deleted = 0;
var toBeDeleted = files.Where(f => MatchesByExtension(f, fileFilter.Extension));
if (fileFilter.Filter == FilterByDate)
{
toBeDeleted = toBeDeleted.Where(f => MatchesByDate(f, fileFilter.DeleteDate));
}
else if (FileFilter.Filter == FilterByNumber)
{
// If your SortFiles method sorted in the other
// direction this call to Reverse would not be needed.
toBeDeleted = toBeDeleted.Reverse().Take(fileFilter.FilesToKeep);
}
foreach (var file in toBeDeleted)
{
File.Delete(file.ToString());
deleted++;
}
if (fileFilter.SubFolders)
{
foreach(var subDir in new DirectoryInfo(dirPath).GetDirectories())
{
deleted += DeleteFilesRecursively(subDir.FullName, fileFilter);
}
}
return deleted;
}
bool MatchesByExtension(FileInfo file, string extension)
=> file.Extension == extension || extension == All;
bool MatchesByDate(FileInfo file, DateTime deleteDate)
=> file.CreationTime < deleteDate;
Note that I also removed your magic strings, which could be even better by replacing them with an enum type.
I haven't tested this but I believe it should give you the same behavior as your current code (at least the parts about filtering and deleting).
I have a bunch of text files in a folder, and all of them should have identical headers. In other words the first 100 lines of all files should be identical. So I wrote a function to check this condition:
private static bool CheckHeaders(string folderPath, int headersCount)
{
var enumerators = Directory.EnumerateFiles(folderPath)
.Select(f => File.ReadLines(f).GetEnumerator())
.ToArray();
//using (enumerators)
//{
for (int i = 0; i < headersCount; i++)
{
foreach (var e in enumerators)
{
if (!e.MoveNext()) return false;
}
var values = enumerators.Select(e => e.Current);
if (values.Distinct().Count() > 1) return false;
}
return true;
//}
}
The reason I am using enumerators is memory efficiency. Instead of loading all file contents in memory I enumerate the files concurrently line-by-line until a mismatch is found, or all headers have been examined.
My problem is evident by the commented lines of code. I would like to utilize a using block to safely dispose all the enumerators, but unfortunately using (enumerators) doesn't compile. Apparently using can handle only a single disposable object. I know that I can dispose the enumerators manually, by wrapping the whole thing in a try-finally block, and running the disposing logic in a loop inside finally, but is seems awkward. Is there any mechanism I could employ to make the using statement a viable option in this case?
Update
I just realized that my function has a serious flaw. The construction of the enumerators is not robust. A locked file can cause an exception, while some enumerators have already been created. These enumerators will not be disposed. This is something I want to fix. I am thinking about something like this:
var enumerators = Directory.EnumerateFiles(folderPath)
.ToDisposables(f => File.ReadLines(f).GetEnumerator());
The extension method ToDisposables should ensure that in case of an exception no disposables are left undisposed.
You can create a disposable-wrapper over your enumerators:
class DisposableEnumerable : IDisposable
{
private IEnumerable<IDisposable> items;
public event UnhandledExceptionEventHandler DisposalFailed;
public DisposableEnumerable(IEnumerable<IDisposable> items) => this.items = items;
public void Dispose()
{
foreach (var item in items)
{
try
{
item.Dispose();
}
catch (Exception e)
{
var tmp = DisposalFailed;
tmp?.Invoke(this, new UnhandledExceptionEventArgs(e, false));
}
}
}
}
and use it with the lowest impact to your code:
private static bool CheckHeaders(string folderPath, int headersCount)
{
var enumerators = Directory.EnumerateFiles(folderPath)
.Select(f => File.ReadLines(f).GetEnumerator())
.ToArray();
using (var disposable = new DisposableEnumerable(enumerators))
{
for (int i = 0; i < headersCount; i++)
{
foreach (var e in enumerators)
{
if (!e.MoveNext()) return false;
}
var values = enumerators.Select(e => e.Current);
if (values.Distinct().Count() > 1) return false;
}
return true;
}
}
The thing is you have to dispose those objects separately one by one anyway. But it's up to you where to encapsulate that logic. And the code I've suggested has no manual try-finally,)
To the second part of the question. If I get you right this should be sufficient:
static class DisposableHelper
{
public static IEnumerable<TResult> ToDisposable<TSource, TResult>(this IEnumerable<TSource> source,
Func<TSource, TResult> selector) where TResult : IDisposable
{
var exceptions = new List<Exception>();
var result = new List<TResult>();
foreach (var i in source)
{
try { result.Add(selector(i)); }
catch (Exception e) { exceptions.Add(e); }
}
if (exceptions.Count == 0)
return result;
foreach (var i in result)
{
try { i.Dispose(); }
catch (Exception e) { exceptions.Add(e); }
}
throw new AggregateException(exceptions);
}
}
Usage:
private static bool CheckHeaders(string folderPath, int headersCount)
{
var enumerators = Directory.EnumerateFiles(folderPath)
.ToDisposable(f => File.ReadLines(f).GetEnumerator())
.ToArray();
using (new DisposableEnumerable(enumerators))
{
for (int i = 0; i < headersCount; i++)
{
foreach (var e in enumerators)
{
if (!e.MoveNext()) return false;
}
var values = enumerators.Select(e => e.Current);
if (values.Distinct().Count() > 1) return false;
}
return true;
}
}
and
try
{
CheckHeaders(folderPath, headersCount);
}
catch(AggregateException e)
{
// Prompt to fix errors and try again
}
I'm going to suggest an approach that uses recursive calls to Zip to allow parallel enumeration of a normal IEnumerable<string> without the need to resort to using IEnumerator<string>.
bool Zipper(IEnumerable<IEnumerable<string>> sources, int take)
{
IEnumerable<string> ZipperImpl(IEnumerable<IEnumerable<string>> ss)
=> (!ss.Skip(1).Any())
? ss.First().Take(take)
: ss.First().Take(take).Zip(
ZipperImpl(ss.Skip(1)),
(x, y) => (x == null || y == null || x != y) ? null : x);
var matching_lines = ZipperImpl(sources).TakeWhile(x => x != null).ToArray();
return matching_lines.Length == take;
}
Now build up your enumerables:
IEnumerable<string>[] enumerables =
Directory
.EnumerateFiles(folderPath)
.Select(f => File.ReadLines(f))
.ToArray();
Now it's simple to call:
bool headers_match = Zipper(enumerables, 100);
Here's a trace of running this code against three files with more than 4 lines:
Ben Petering at 5:28 PM ACST
Ben Petering at 5:28 PM ACST
Ben Petering at 5:28 PM ACST
From a call 2019-05-23, James mentioned he’d like the ability to edit the current shipping price rules (eg in shipping_rules.xml) via the admin.
From a call 2019-05-23, James mentioned he’d like the ability to edit the current shipping price rules (eg in shipping_rules.xml) via the admin.
From a call 2019-05-23, James mentioned he’d like the ability to edit the current shipping price rules (eg in shipping_rules.xml) via the admin.
He also mentioned he’d like to be able to set different shipping price rules for a given time window, e.g. Jan 1 to Jan 30.
He also mentioned he’d like to be able to set different shipping price rules for a given time window, e.g. Jan 1 to Jan 30.
He also mentioned he’d like to be able to set different shipping price rules for a given time window, e.g. Jan 1 to Jan 30.
These storyishes should be considered when choosing the appropriate module to use.
These storyishes should be considered when choosing the appropriate module to use.X
These storyishes should be considered when choosing the appropriate module to use.
Note that the enumerations stop when they encountered a mismatch header in the 4th line on the second file. All enumerations then stopped.
Creating an IDisposable wrapper as #Alex suggested is correct. It needs just a logic to dispose already opened files if some of them is locked and probably some logic for error states. Maybe something like this (error state logic is very simple):
public class HeaderChecker : IDisposable
{
private readonly string _folderPath;
private readonly int _headersCount;
private string _lockedFile;
private readonly List<IEnumerator<string>> _files = new List<IEnumerator<string>>();
public HeaderChecker(string folderPath, int headersCount)
{
_folderPath = folderPath;
_headersCount = headersCount;
}
public string LockedFile => _lockedFile;
public bool CheckFiles()
{
_lockedFile = null;
if (!TryOpenFiles())
{
return false;
}
if (_files.Count == 0)
{
return true; // Not sure what to return here.
}
for (int i = 0; i < _headersCount; i++)
{
if (!_files[0].MoveNext()) return false;
string currentLine = _files[0].Current;
for (int fileIndex = 1; fileIndex < _files.Count; fileIndex++)
{
if (!_files[fileIndex].MoveNext()) return false;
if (_files[fileIndex].Current != currentLine) return false;
}
}
return true;
}
private bool TryOpenFiles()
{
bool result = true;
foreach (string file in Directory.EnumerateFiles(_folderPath))
{
try
{
_files.Add(File.ReadLines(file).GetEnumerator());
}
catch
{
_lockedFile = file;
result = false;
break;
}
}
if (!result)
{
DisposeCore(); // Close already opened files.
}
return result;
}
private void DisposeCore()
{
foreach (var item in _files)
{
try
{
item.Dispose();
}
catch
{
}
}
_files.Clear();
}
public void Dispose()
{
DisposeCore();
}
}
// Usage
using (var checker = new HeaderChecker(folderPath, headersCount))
{
if (!checker.CheckFiles())
{
if (checker.LockedFile is null)
{
// Error while opening files.
}
else
{
// Headers do not match.
}
}
}
I also removed .Select() and .Distinct() when checking the lines. The first just iterates over the enumerators array - the same as foreach above it, so you are enumerating this array twice. Then creates a new list of lines and .Distinct() enumerates over it.
I have a program that I used a variable of type List < MapPdf > that I will detach in variables when filling I would like to have the possibility to use it another time but I did not have the right to identify it again as
public static void Create(List<MapPdf> pps, Saison s, Agence agence)
{
foreach (var pelerins in grouped)
{
if (string.IsNullOrEmpty(pelerins.Key) || pelerins.Count() <= 0)
break;
if (writer.PageEvent == null)
{
writer.PageEvent = new Header_List()
{
travel = ctx.Travels.Include("Transport").First(v => v.UniqueId == pelerins.Key),
travelretour = ctx.Travels.Find(pelerins.First().UniqueIdRetour),
Agence = agence,
CountAllPelerin = pelerins.Count().ToString(),
CountFeminin = pelerins.Count(x => x.Sexe == PelerinSexe.Feminin).ToString(),
CountMasculin = pelerins.Count(x => x.Sexe == PelerinSexe.Masculin).ToString(),
NomGroupe = pelerins.First().Groupe,
NumeroDoc = writer.PageNumber
};
}
}
}
And i want to use pelerins as a List when i used in another function when it is of this declaration
I used List < MapPdf > pls = pelerins.ToList(); but it does not work
CreateFr(pls, false, cb, s);
If you are referring to var pelerins within the for-each loop and if I understand the problem, you are unable use it into another method , because var pelerins is a local variable encapsulated within the for-each loop -It does not exist outside it.
You could do the following:
//public property to retrieve pelerins
public List <MapPdf> pls new List<MapPdf>();
...
...
public static void Create(List<MapPdf> pps, Saison s, Agence agence)
{
foreach (var pelerins in grouped)
{
if (string.IsNullOrEmpty(pelerins.Key) || pelerins.Count() <= 0)
break;
if (writer.PageEvent == null)
{
//do logic
...
//store the one you are interested in, so you can use it later on
pls = pelerins.ToList();
}
}
}
I know I can get a string from resources using
Resources.GetIdentifier(token, "string", ctx.ApplicationContext.PackageName)
(sorry, this is in C#, it's part of a Xamarin.Android project).
I know that if my elements are called foo_1, foo_2, foo_3, then I can iterate and grab the strings using something like
var myList = new List<string>();
for(var i = 0; i < 4; ++i)
{
var id = AppContent.GetIdentifier(token + i.ToString(), "string", "package_name");
if (id != 0)
myList.Add(AppContext.GetString(id));
}
My issue is that my token names all begin with "posn." (the posn can denote the position of anything, so you can have "posn.left_arm" and "posn.brokenose"). I want to be able to add to the list of posn elements, so I can't really store a list of the parts after the period. I can't use a string-array for this either (specific reason means I can't do this).
Is there a way that I can use something akin to "posn.*" in the getidentifer call to return the ids?
You can use some reflection foo to get what you want. It is not pretty at all but it works. The reflection stuff is based on https://gist.github.com/atsushieno/4e66da6e492dfb6c1dd0
private List<string> _stringNames;
private IEnumerable<int> GetIdentifiers(string contains)
{
if (_stringNames == null)
{
var eass = Assembly.GetExecutingAssembly();
Func<Assembly, Type> f = ass =>
ass.GetCustomAttributes(typeof(ResourceDesignerAttribute), true)
.OfType<ResourceDesignerAttribute>()
.Where(ca => ca.IsApplication)
.Select(ca => ass.GetType(ca.FullName))
.FirstOrDefault(ty => ty != null);
var t = f(eass) ??
AppDomain.CurrentDomain.GetAssemblies().Select(ass => f(ass)).FirstOrDefault(ty => ty != null);
if (t != null)
{
var strings = t.GetNestedTypes().FirstOrDefault(n => n.Name == "String");
if (strings != null)
{
var fields = strings.GetFields();
_stringNames = new List<string>();
foreach (var field in fields)
{
_stringNames.Add(field.Name);
}
}
}
}
if (_stringNames != null)
{
var names = _stringNames.Where(s => s.Contains(contains));
foreach (var name in names)
{
yield return Resources.GetIdentifier(name, "string", ComponentName.PackageName);
}
}
}
Then somewhere in your Activity you could do:
var ids = GetIdentifiers("action").ToList();
That will give you all the String Resources, which contain the string action.
I'm looking for some mechanism to automatically apply values for File Properties (as seen in the VS Properties Window) when adding a new files of particular types into VS2012 projects.
For example, if someone adds a *.cshtml file, I'd like the Custom Tool to be set by default to 'RazorGenerator' because it can be a hassle ensuring this is specified manually across all razor files.
Similar again for *.spark where the Build Action must always be set to Content (or the CI build breaks).
There doesn't seem to be any VS2012 built in settings for configuring default file properties, so what are people doing to solve this that could work well for a team of developers?
I'm looking for options.
The ANTLR 4 C# target requires similar actions be taken when *.g4 files which are added to the project.
Set the Build Action to Antlr4
Set several additional properties to default values
Expose several custom properties via customization of the Properties window in Visual Studio
Based on your question, I'll focus on only the first item.
Providing a default build action for a file type
This is actually a feature provided as part of the ANTLR Language Support extension. The extension performs the operations when a file with particular extensions is added to the project. See the comment at the beginning of OnAfterAddFilesEx for details of the behavior. This code is from a class which extends Package and implements IVsTrackProjectDocumentsEvents2. The implementation of the remaining methods for this interface simply return VSConstants.E_NOTIMPL. Note that this code may use custom extension methods that are defined elsewhere within my solution. The code is not intended to be used directly, but merely serve as a guide for someone looking to perform a similar operation via their own extension.
private static readonly Guid CSharpProjectTypeGuid = Guid.Parse(PrjKind.prjKindCSharpProject);
private uint _trackDocumentsEventsCookie;
protected override void Initialize()
{
base.Initialize();
IVsTrackProjectDocuments2 trackDocuments2 = serviceProvider.GetTrackProjectDocuments2();
trackDocuments2.AdviseTrackProjectDocumentsEvents(this, out _trackDocumentsEventsCookie);
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (_trackDocumentsEventsCookie != 0)
{
var serviceProvider = this.AsVsServiceProvider();
IVsTrackProjectDocuments2 trackDocuments = serviceProvider.GetTrackProjectDocuments2();
trackDocuments.UnadviseTrackProjectDocumentsEvents(_trackDocumentsEventsCookie);
_trackDocumentsEventsCookie = 0;
}
}
base.Dispose(disposing);
}
public int OnAfterAddFilesEx(int cProjects, int cFiles, IVsProject[] rgpProjects, int[] rgFirstIndices, string[] rgpszMkDocuments, VSADDFILEFLAGS[] rgFlags)
{
/* need to make the following alterations:
* 1. set the Build Action of *.g and *.g3 to Antlr3
* 2. set the Build Action of *.g4 to Antlr4
* 3. set the Custom Tool of *.g, *.g3, and *.g4 to MSBuild:Compile
* 4. set the Custom Tool Namespace of *.g4 to $(RootNamespace) + relative folder path
*/
for (int i = 0; i < cProjects; i++)
{
IVsProject project = rgpProjects[i];
int projectFiles = (i == cProjects - 1) ? cFiles : rgFirstIndices[i + 1];
projectFiles -= rgFirstIndices[i];
if (!IsCSharpProject(project))
continue;
for (int j = 0; j < projectFiles; j++)
{
string currentFile = rgpszMkDocuments[rgFirstIndices[i] + j];
if (string.IsNullOrEmpty(currentFile))
continue;
bool grammarFile =
currentFile.EndsWith(".tokens", StringComparison.OrdinalIgnoreCase)
|| currentFile.EndsWith(".g", StringComparison.OrdinalIgnoreCase)
|| currentFile.EndsWith(".g3", StringComparison.OrdinalIgnoreCase)
|| currentFile.EndsWith(".g4", StringComparison.OrdinalIgnoreCase);
if (grammarFile)
{
OnAfterAddedGrammarFile(project, currentFile);
}
}
}
return VSConstants.S_OK;
}
private static bool IsCSharpProject(IVsProject project)
{
IVsAggregatableProject aggregatableProject = project as IVsAggregatableProject;
if (aggregatableProject == null)
return false;
string guidsString = null;
if (ErrorHandler.Failed(ErrorHandler.CallWithCOMConvention(() => aggregatableProject.GetAggregateProjectTypeGuids(out guidsString))))
return false;
if (string.IsNullOrWhiteSpace(guidsString))
return false;
string[] guids = guidsString.Split(';');
foreach (var guidString in guids)
{
Guid guid;
if (Guid.TryParse(guidString, out guid) && guid == CSharpProjectTypeGuid)
return true;
}
return false;
}
private void OnAfterAddedGrammarFile(IVsProject project, string currentFile)
{
int found;
VSDOCUMENTPRIORITY[] priority = new VSDOCUMENTPRIORITY[1];
uint itemId;
if (ErrorHandler.Failed(project.IsDocumentInProject(currentFile, out found, priority, out itemId)))
return;
if (found == 0 || priority[0] != VSDOCUMENTPRIORITY.DP_Standard)
return;
string desiredItemType = "Antlr3";
if (string.Equals(Path.GetExtension(currentFile), ".tokens", StringComparison.OrdinalIgnoreCase))
desiredItemType = "AntlrTokens";
else if (string.Equals(Path.GetExtension(currentFile), ".g4", StringComparison.OrdinalIgnoreCase))
desiredItemType = "Antlr4";
IVsHierarchy hierarchy = project as IVsHierarchy;
if (hierarchy != null)
{
object browseObject = null;
PropertyDescriptorCollection propertyDescriptors = null;
int hr = ErrorHandler.CallWithCOMConvention(() => hierarchy.GetProperty(itemId, (int)__VSHPROPID.VSHPROPID_BrowseObject, out browseObject));
if (ErrorHandler.Succeeded(hr))
propertyDescriptors = TypeDescriptor.GetProperties(browseObject);
object obj;
hr = hierarchy.GetProperty(itemId, (int)__VSHPROPID4.VSHPROPID_BuildAction, out obj);
if (ErrorHandler.Succeeded(hr))
{
string buildAction = obj != null ? obj.ToString() : null;
if (string.IsNullOrWhiteSpace(buildAction) || string.Equals(buildAction, "None", StringComparison.OrdinalIgnoreCase))
{
hr = ErrorHandler.CallWithCOMConvention(() => hierarchy.SetProperty(itemId, (int)__VSHPROPID4.VSHPROPID_BuildAction, desiredItemType));
}
}
if (ErrorHandler.Failed(hr) && propertyDescriptors != null)
{
PropertyDescriptor itemTypeDescriptor = propertyDescriptors["ItemType"] ?? propertyDescriptors["BuildAction"];
if (itemTypeDescriptor != null)
{
obj = itemTypeDescriptor.GetValue(browseObject);
string buildAction = (string)itemTypeDescriptor.Converter.ConvertToInvariantString(obj);
if (string.IsNullOrWhiteSpace(buildAction) || string.Equals(buildAction, "None", StringComparison.OrdinalIgnoreCase))
{
try
{
obj = itemTypeDescriptor.Converter.ConvertFromInvariantString(desiredItemType);
itemTypeDescriptor.SetValue(browseObject, obj);
}
catch (NotSupportedException)
{
}
}
}
}
if (propertyDescriptors != null)
{
PropertyDescriptor customToolDescriptor = propertyDescriptors["CustomTool"];
if (customToolDescriptor != null)
{
obj = customToolDescriptor.GetValue(browseObject);
string customTool = customToolDescriptor.Converter.ConvertToInvariantString(obj);
if (string.IsNullOrWhiteSpace(customTool))
{
try
{
obj = customToolDescriptor.Converter.ConvertToInvariantString("MSBuild:Compile");
customToolDescriptor.SetValue(browseObject, obj);
}
catch (NotSupportedException)
{
}
}
}
PropertyDescriptor customToolNamespaceDescriptor = propertyDescriptors["CustomToolNamespace"];
if (customToolNamespaceDescriptor != null)
{
object defaultNamespace;
hr = hierarchy.GetProperty(itemId, (int)__VSHPROPID.VSHPROPID_DefaultNamespace, out defaultNamespace);
if (ErrorHandler.Succeeded(hr) && !string.IsNullOrEmpty(defaultNamespace as string))
{
obj = customToolNamespaceDescriptor.GetValue(browseObject);
string customToolNamespace = customToolNamespaceDescriptor.Converter.ConvertToInvariantString(obj);
if (string.IsNullOrWhiteSpace(customToolNamespace))
{
try
{
obj = customToolNamespaceDescriptor.Converter.ConvertToInvariantString(defaultNamespace);
customToolNamespaceDescriptor.SetValue(browseObject, obj);
}
catch (NotSupportedException)
{
}
}
}
}
}
}
}