I am trying to get some data for a specified user using ebay's getFeedback API and ended up with this code.
namespace one
{
class Program
{
private static ApiContext apiContext = null;
static void Main(string[] args)
{
ApiContext apiContext = GetApiContext();
GeteBayOfficialTimeCall apiCall = new GeteBayOfficialTimeCall(apiContext);
GetFeedbackCall call = new GetFeedbackCall(apiContext);
call.UserID = "abc";
Console.WriteLine(call.GetFeedback().ToString());
Console.ReadKey();
}
static ApiContext GetApiContext()
{
if (apiContext != null)
{
return apiContext;
}
else
{
apiContext = new ApiContext();
apiContext.SoapApiServerUrl = ConfigurationManager.AppSettings["Environment.ApiServerUrl"];
ApiCredential apiCredential = new ApiCredential();
apiCredential.eBayToken = ConfigurationManager.AppSettings["UserAccount.ApiToken"];
apiContext.ApiCredential = apiCredential;
apiContext.Site = SiteCodeType.US;
return apiContext;
}
}
}
}
It prints the following line in console
eBay.Service.Core.Soap.FeedbackDetailTypeCollection
How can I get the original data?
call.GetFeedback() returning collection of FeedbackDetailType members, so you can use foreach to retrieve informations (such as feedback score and other stuff) about all particular feedback.
see complete members list of FeedbackDetailType members
here!
e.g
foreach (FeedbackDetailType feedback in call.GetFeedback())
{
Console.WriteLine(feedback.CommentText);
//and other stuff
}
Or you can use something like that
call.GetFeedback();
Console.WriteLine(call.FeedbackScore);
Related
I'm getting the following error on my C# Web API: "Exception thrown: 'System.Threading.ThreadAbortException' in System.Data.dll
Thread was being aborted". I have a long running process on one thread using my data access logic class to get and update records being process. Meanwhile a user submits another group to process which has need of the same data access logic class, thus resulting in the error. Here is a rough sketch of what I'm doing.
WebAPI Class:
public IHttpActionResult OkToProcess(string groupNameToProcess)
{
var logic = GetLogic();
//Gets All Unprocessed Records and Adds them to Blocking Queue
Task.Factory.StartNew(() => dataAccessLogic.LoadAndProcess(groupNameToProcess);
}
public IHttpActionResult AddToProcess(int recordIdToProcess)
{
StaticProcessingFactory.AddToQueue(recordIdToProcess);
}
StaticProcessingFactory
internal static ConcurrentDictionary<ApplicationEnvironment, Logic> correctors = new ConcurrentDictionary<ApplicationEnvironment, Logic>();
internal static BlockingCollection<CorrectionMessage> MessageQueue = new BlockingCollection<Message>(2000);
public void StartService(){
Task.Factory.StartNew(() => LoadService());
}
public void LoadService(){
var logic = GetLogic();
if(isFirstGroupOkToProcessAsPerTextFileLog())
logic.LoadAndProcess("FirstGroup");
if(isSeconddGroupOkToProcessAsPerTextFileLog())
logic.LoadAndProcess("SecondGroup");
}
public static GetLogic(){
var sqlConnectionFactory = Tools.GetSqlConnectionFactory();
string environment = ConfigurationManager.AppSettings["DefaultApplicationEnvironment"];
ApplicationEnvironment applicationEnvironment =
ApplicationEnvironmentExtensions.ToApplicationEnvironment(environment);
return correctors.GetOrAdd(applicationEnvironment, new Logic(sqlConnectionFactory ));
}
public static void AddToQueue(Message message, bool completeAdding = true)
{
if (MessageQueue.IsAddingCompleted)
MessageQueue = new BlockingCollection<Message>();
if (completeAdding && message.ProcessImmediately)
StartQueue(message);
else
MessageQueue.Add(message);
}
public static void StartQueue(Message message = null)
{
if (message != null)
{
if(!string.IsNullOrEmpty(message.ID))
MessageQueue.Add(message);
Logic logic = GetLogic(message.Environment);
try
{
var messages = MessageQueue.TakeWhile(x => logic.IsPartOfGroup(x.GroupName, message.GroupName));
if (messages.Count() > 0)
MessageQueue.CompleteAdding();
int i = 0;
foreach (var msg in messages)
{
i++;
Process(msg);
}
}
catch (InvalidOperationException) { MessageQueue.CompleteAdding(); }
}
}
public static void Process(Message message)
{
Var logic = GetLogic(message.Environment);
var record = logic.GetRecord(message.ID);
record.Status = Status.Processed;
logic.Save(record);
}
Logic Class
private readonly DataAccess DataAccess;
public Logic(SqlConnectionFactory factory)
{
DataAccess = new DataAcess(factory);
}
public void LoadAndProcess(string groupName)
{
var groups = DataAccess.GetGroups();
var records = DataAccess.GetRecordsReadyToProcess(groups);
for(int i = 0; i < records.Count; i++)
{
Message message = new Message();
message.Enviornment = environment.ToString();
message.ID = records[i].ID;
message.User = user;
message.Group = groupName;
message.ProcessImmediately = true;
StaticProcessingFactory.AddToQueue(message, i + 1 == records.Count);
}
}
Any ideas how I might ensure that all traffic from all threads have access to the Data Access Logic without threads being systematically aborted?
I'm trying to map CSV file into class object with C#. My problem is that i have 3 different files, but I want to fallow DRY principles. Can someone tell me how to change 'ParseLine' method to make it possible?
C# consol app.
This is how my FileReader looks like:
public class FileReader<T> : IFileReader<T> where T : Entity
{
private readonly ITransactionReader<T> _transactionReader;
public FileReader(ITransactionReader<T> transactionReader)
{
_transactionReader = transactionReader;
}
public List<T> GetInfoFromFile(string filePath)
{
var lines = File.ReadAllLines(filePath);
var genericLines = new List<T>();
foreach (var line in lines)
{
genericLines.Add(_transactionReader.ParseLine(line));
}
return genericLines;
}
}
public interface IFileReader<T> where T : Entity
{
List<T> GetInfoFromFile(string filePath);
}
This is how the object should look like.
public class TransactionReader : ITransactionReader<Transaction>
{
public Transaction ParseLine(string line)
{
var fields = line.Split(";");
var transaction = new Transaction()
{
Id = fields[0],
Month = int.Parse(fields[1]),
Day = int.Parse(fields[2]),
Year = int.Parse(fields[3]),
IncomeSpecification = fields[4],
TransactionAmount = int.Parse(fields[5])
};
return transaction;
}
}
public interface ITransactionReader<T>
{
T ParseLine(string line);
}
This is how I run it for test purposes.
class Program
{
private static readonly string filePath = "C:/Users/<my_name>/Desktop/C# Practice/ERP/ERP/CsvFiles/Transaction.csv";
static void Main(string[] args)
{
ITransactionReader<Transaction> transactionReader = new TransactionReader();
IFileReader<Transaction> fileReader = new FileReader<Transaction>(transactionReader);
List<Transaction> Test()
{
var obj = fileReader.GetInfoFromFile(filePath);
return obj;
}
var list = Test();
}
}
I'm looking to modify that line:
genericLines.Add(_transactionReader.ParseLine(line));
and method arguments to make it open for any CSV fil.
I don't mind to change that composition into something more effective.
I want to get information from only 1 user out of 20,000 users. The response time of the method I used below is 40 seconds. What is the solution to this problem?
public AuthenticatedUserProperties Info(string Username)
{
try
{
var context = new PrincipalContext(ContextType.Domain, Settings.LDAPDomain, Settings.LDAPContainer, Settings.LDAPUsername, Settings.LDAPPassword);
UserPrincipal user = new UserPrincipal(context);
user.SamAccountName = Username;
var searcher = new PrincipalSearcher(user);
var searchResults = searcher.FindOne();
DirectoryEntry de = searchResults.GetUnderlyingObject() as DirectoryEntry;
ActiveDirectoryUserProperties prop = ConvertLdapUserPropertyToArray(de);
return new AuthenticatedUserProperties
{
Status = true,
Properties = prop
};
}
catch (Exception e)
{
return new AuthenticatedUserProperties
{
Status = false,
Properties = null
};
}
}
I use the System.DirectoryServices.Protocols library instead. It is always blazing fast. I can never get System.DirectoryServices.AccountManagement to have reliable performance and it is often agonizingly slow (10+ seconds) to get just one user. TBH - I think our Network setup is likely to blame causing the bind to be dysfunctional - but the Protocols library yields good results without much effort regardless of our network dysfunction.
You have to do slightly more work - but nothing particularly difficult. I'm not an expert with this library - but this sample code works reliably for me.
using System.DirectoryServices.Protocols;
public class UserInfo
{
public string SAMAccountName;
public string DomainHostName;
public string ADSDirectory;
public Dictionary<string, string> UserAttributes;
// Some attributes not really strings and require extra handling - but simplied for example
// This is really just for illustrative purposes
public UserInfo(string a_SAMAccountName, string a_DomainHostName = "ldap.mydomain:3268", string a_ADSDirectory = "ours.net")
{
UserAttributes = new Dictionary<string, string>();
SAMAccountName = a_SAMAccountName;
DomainHostName = a_DomainHostName;
ADSDirectory = a_ADSDirectory;
}
}
public static class GetUserAttributes
{
public static List<string> WantedAttributes;
static GetUserAttributes()
{
WantedAttributes = new List<string>();
WantedAttributes.Add("mail");
//... Add Properties Wanted
}
public static void GetUserAttributes(UserInfo a_user)
{
using (HostingEnvironment.Impersonate())
{
LdapDirectoryIdentifier z_entry = new LdapDirectoryIdentifier(a_user.DomainHostName, true, false);
using (LdapConnection z_remote = new LdapConnection(z_entry))
{
z_remote.SessionOptions.VerifyServerCertificate = delegate (LdapConnection l, X509Certificate c) { return true; };
z_remote.SessionOptions.ReferralChasing = ReferralChasingOptions.None;
z_remote.SessionOptions.ProtocolVersion = 3;
z_remote.Bind();
SearchRequest z_search = new SearchRequest();
z_search.Scope = System.DirectoryServices.Protocols.SearchScope.Subtree;
z_search.Filter = "(SAMAccountName=" + a_user.SAMAccountName + ")";
z_search.DistinguishedName = a_user.ADSdirectory;
foreach (List<string> z_item in WantedAttributes)
{
z_search.Attributes.Add(z_item);
}
SearchResponse z_response = (SearchResponse)z_remote.SendRequest(z_search);
if (z_response != null)
{
foreach (SearchResultEntry z_result in z_response.Entries)
{
foreach (string z_property in z_result.Attributes.AttributeNames)
{
if (WantedAttributes.ContainsKey(z_property))
{
DirectoryAttribute z_details = a_result.Attributes[z_property];
if (z_details.Count == 1)
{
// Special handling required for Attributes that aren't strings objectSid, objectGUID, etc
string z_value = z_details[0].ToString().Trim();
if (!string.IsNullOrWhiteSpace(z_value))
{
a_user.UserAttributes.Add(z_property, z_value);
}
}
}
}
}
}
}
}
}
}
I am trying to use the Unity Block of the enterprise library in a program i am writing.
But i think i am using dependency injection wrong. I was wondering if some one could point me in the right direction.
static void Main(string[] args)
{
using (IUnityContainer container = new UnityContainer())
{
InitialiseContainer(container);
DataCopierFactory dcFactory = new DataCopierFactory();
ERunOptions dataCopierType = ExtractParams(args);
IDataCopier dataCopier = dcFactory.CreateDataCopier((int)dataCopierType, container);
dataCopier.DetectChanges();
dataCopier.ParseData();
dataCopier.CopyData();
}
}
}
//use the ioc container to register the EF context type to the repository interfaces..
private static void InitialiseContainer(IUnityContainer container)
{
//add Extensions:
container.AddNewExtension<Interception>();
//Licence Schedule
container.RegisterType<IEFContext, LTE_DownFromWeb_EFContext>("DataCopier.ScheduleDataCopier.Source");
container.RegisterType<IEFContext, LTE_Licensing_EFContext>("DataCopier.ScheduleDataCopier.Destination");
container.RegisterType<IRepositorySession>("Schedule_Source",new InjectionConstructor(container.Resolve<IEFContext>("DataCopier.ScheduleDataCopier.Source")));
container.RegisterType<IRepositorySession>("Schedule_Destination",new InjectionConstructor(container.Resolve<IEFContext>("DataCopier.ScheduleDataCopier.Destination")));
}
So basically the DataCopier Factory creates an instance of a DataCopier like so:
DataCopierFactory:
//return a data copier that will transfer data from any DB to any other DB
public IDataCopier CreateDataCopier(int i, IUnityContainer container)
{
switch(i)
{
case 1:
return new ScheduleDataCopier(container);
default:
throw new InvalidOperationException("Parameter " + i + " does not exist");
}
}
a data copier looks like this:
class ScheduleDataCopier : IDataCopier
{
private List<Site> _sites;
private List<SitesAndApparatuses> _scheduleList;
private IUnityContainer _container;
public ScheduleDataCopier(IUnityContainer container)
{
_container = container;
_scheduleList = new List<SitesAndApparatuses>();
}
//check if new sites registration has arrived in tblSites on down from web db.
public bool DetectChanges()
{
using (var db = _container.Resolve<IRepositorySession>("Schedule_Source"))
{
SiteAudit lastSite = new SitesAuditRepository().GetLatest();
var sitesRepo = new SitesRepository();
var sites = sitesRepo.Where(x => x.SID > lastSite.SALatestSID);
if (sites.Count() < 1)
{
return false;
}
_sites = sites.ToList();
db.Dispose();
}
return true;
}
//parse the data into a list of object SitesAndApparatuses
public bool ParseData()
{
try
{
foreach (Site s in _sites)
{
var schedule = (SitesAndApparatuses)XmlObjectBuilder.Deserialize(typeof(SitesAndApparatuses), s.XMLFile);
schedule.acCode = s.Registration.RAcCode;
_scheduleList.Add(schedule);
}
}
catch (Exception ex)
{
throw new NotImplementedException("HANDLE THIS SHIT!", ex);
}
return true;
}
public bool CopyData()
{
try
{
using (var db = _container.Resolve<IRepositorySession>("Schedule_Destination"))
{
var licensingScheduleRepo = new LicensingScheduleRepository();
//some logic
db.Commit();
}
}
catch (Exception ex)
{
}
return true;
}
}
Second question, i resolve my unit of work object called RepositorySession in the Datacopier classes using the unity container i passed... is this the wrong approach and why, im struggling to find any info on it online?
This is probably too much code for someone to read.. but im hoping for an answer!
Thanks in advance
Neil
I'd do something like:
container.RegisterType<IEFContext, LTE_DownFromWeb_EFContext>("Source");
container.RegisterType<IEFContext, LTE_Licensing_EFContext>("Destination");
container.RegisterType<IRepositorySession>("Source",new InjectionConstructor(new ResolvedParameter<IEFContext>("Source"));
container.RegisterType<IRepositorySession>("Destination",new InjectionConstructor(new ResolvedParameter<IEFContext>("Destination")));
container.RegisterType<IDataCopier,ScheduleDataCopier>("0",new InjectionConstructor(new[] {new ResolvedParameter<IRepositorySession("Source"),new ResolvedParameter<IRepositorySesison>("Destination")}));
//Now resolve
ERunOptions dataCopierType = ExtractParams(args);
IDataCopier dataCopier = container.Resolve<IDataCopier(dataCopierType.ToString());
dataCopier.DetectChanges();
dataCopier.ParseData();
dataCopier.CopyData();
DataCopier Class
class ScheduleDataCopier : IDataCopier
{
private List<Site> _sites;
private List<SitesAndApparatuses> _scheduleList;
private IRepositorySession _source;
private (IRepositorySession _destination;
public ScheduleDataCopier(IRepositorySession source, (IRepositorySession destination)
{
_source=source;
_destination=destination;
_scheduleList = new List<SitesAndApparatuses>();
}
//check if new sites registration has arrived in tblSites on down from web db.
public bool DetectChanges()
{
SiteAudit lastSite = new SitesAuditRepository().GetLatest();
var sitesRepo = new SitesRepository();
var sites = sitesRepo.Where(x => x.SID > lastSite.SALatestSID);
if (sites.Count() < 1)
{
return false;
}
_sites = sites.ToList();
_source.DoSomething();
_source.CommitAndReleaseResources();//clean up but leave object reusable
return true;
}
//parse the data into a list of object SitesAndApparatuses
public bool ParseData()
{
try
{
foreach (Site s in _sites)
{
var schedule = (SitesAndApparatuses)XmlObjectBuilder.Deserialize(typeof(SitesAndApparatuses), s.XMLFile);
schedule.acCode = s.Registration.RAcCode;
_scheduleList.Add(schedule);
}
}
catch (Exception ex)
{
throw new NotImplementedException("HANDLE THIS SHIT!", ex);
}
return true;
}
public bool CopyData()
{
try
{
var licensingScheduleRepo = new LicensingScheduleRepository();
//some logic
_desitnation.Commit();
}
catch (Exception ex)
{
//handle exception
}
return true;
}
}
The two main differences between what you're doing and the above is that I'm using Injection Parameters (the ResolvedParameter class) to dynamically resolve instances of objects when they're needed.
This allows me to get Unity to do my entire DI process for me, including resolve my DataCopier. If I add another Datacopier I'd just need to add the new DataCopier type to Unity with a name that matches the appropriate ERunOptions type and I'd be able to resolve the new DataCopier with no change to my code:
container.RegisterType<IDataCopier,RandomDataCopier>("0",new InjectionConstructor(new[] {new ResolvedParameter<IRepositorySession("RandomSource"),new ResolvedParameter<IRepositorySesison>("RandomDestination")}));
and:
ERunOptions dataCopierType = ExtractParams(args);
IDataCopier dataCopier = container.Resolve<IDataCopier(dataCopierType.ToString());
dataCopier.DetectChanges();
dataCopier.ParseData();
dataCopier.CopyData();
Stays the same but can process a ScheduledDataCopier or a RandomDataCopier
I am using the Microsoft EnterPrise Library Caching library (Version 5) with a FileDependency.
On the class that I want cached, I have a static property that will either return the item from the cache, or else create a new class and add it to the cache.
This initially works well and the class is created once, and from then on, the cached copy is returned. However once the dependency file changes the cached item is never returned.
I have put together a sample program below to illustrate the issue.
The output from this is
999 cached , 1 uncached
999 cached , 1001 uncached
I would expect the results to be
999 cached , 1 uncached
1998 cached , 2 uncached
It would look like the object is added back to the cache, but is then immediately deleted as expired.
Any ideas why?
using System;
using Microsoft.Practices.EnterpriseLibrary.Common.Configuration;
using Microsoft.Practices.EnterpriseLibrary.Caching;
using Microsoft.Practices.EnterpriseLibrary.Caching.Expirations;
namespace TestCache
{
static class Program
{
[STAThread]
static void Main()
{
Cache.Create();
for (int i = 0; i < 1000; i++)
TestClass.Current.DummyMethod();
Console.WriteLine(String.Format("{0} cached , {1} uncached", TestClass.CachedItems, TestClass.UncachedItems));
System.IO.File.AppendAllText(Cache.dependencyFileName, "Test");
for (int i = 0; i < 1000; i++)
TestClass.Current.DummyMethod();
Console.WriteLine(String.Format("{0} cached , {1} uncached", TestClass.CachedItems, TestClass.UncachedItems));
Console.ReadLine();
}
}
public class Cache
{
public static CacheManager cacheManager = null;
public static string dependencyFileName;
public static FileDependency objFileDependency;
public static void Create()
{
var builder = new ConfigurationSourceBuilder();
builder.ConfigureCaching()
.ForCacheManagerNamed("TestCache")
.UseAsDefaultCache()
.StoreInMemory();
var configSource = new DictionaryConfigurationSource();
builder.UpdateConfigurationWithReplace(configSource);
EnterpriseLibraryContainer.Current = EnterpriseLibraryContainer.CreateDefaultContainer(configSource);
cacheManager = (CacheManager)EnterpriseLibraryContainer.Current.GetInstance<ICacheManager>("TestCache");
dependencyFileName = "testCache.xml";
if (!System.IO.File.Exists(dependencyFileName))
using (System.IO.File.Create(dependencyFileName)) { }
objFileDependency = new FileDependency(dependencyFileName);
}
}
public class TestClass
{
public static int CachedItems =0;
public static int UncachedItems = 0;
public void DummyMethod()
{
}
public static TestClass Current
{
get
{
TestClass current = (Cache.cacheManager.GetData("Test") as TestClass);
if (current != null)
CachedItems++;
else
{
UncachedItems++;
current = new TestClass();
Cache.cacheManager.Add("Test", current, CacheItemPriority.Normal, null, new ICacheItemExpiration[] { Cache.objFileDependency });
}
return current;
}
}
}
}
Your issue is that you are using a static FileDependency. This is causing the LastUpdateTime of the FileDependency to never be updated which in turn causes all items added to the cache to show as being expired (HasExpired() == true). Even though you are adding items to the cache since they are expired you can never retrieve them.
The solution is to use a new FileDependency object for all additions to the cache. The easiest change would be to replace the objFileDependency field with a property. Using your existing names and approach, the code would look like:
public class Cache
{
public static CacheManager cacheManager = null;
public static readonly string dependencyFileName = "testCache.xml";
public static FileDependency objFileDependency
{
get
{
return new FileDependency(dependencyFileName);
}
}
public static void Create()
{
var builder = new ConfigurationSourceBuilder();
builder.ConfigureCaching()
.ForCacheManagerNamed("TestCache")
.UseAsDefaultCache()
.StoreInMemory();
var configSource = new DictionaryConfigurationSource();
builder.UpdateConfigurationWithReplace(configSource);
EnterpriseLibraryContainer.Current = EnterpriseLibraryContainer.CreateDefaultContainer(configSource);
cacheManager = (CacheManager)EnterpriseLibraryContainer.Current.GetInstance<ICacheManager>("TestCache");
if (!System.IO.File.Exists(dependencyFileName))
using (System.IO.File.Create(dependencyFileName)) { }
}
}