C# Save Data To Database and Exit Program - c#

Hey Guys i have a script written in c# that generates some encryption keys that i want to save into my database my code looks like this:
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Threading;
using Microsoft.WindowsAzure.MediaServices.Client;
using Microsoft.WindowsAzure.MediaServices.Client.ContentKeyAuthorization;
using Microsoft.WindowsAzure.MediaServices.Client.DynamicEncryption;
using Microsoft.WindowsAzure.MediaServices.Client.Widevine;
using Newtonsoft.Json;
namespace DeliverDRMLicenses
{
class Program
{
// Read values from the App.config file.
private static readonly string _mediaServicesAccountName =
ConfigurationManager.AppSettings["MediaServicesAccountName"];
private static readonly string _mediaServicesAccountKey =
ConfigurationManager.AppSettings["MediaServicesAccountKey"];
private static readonly Uri _sampleIssuer =
new Uri(ConfigurationManager.AppSettings["Issuer"]);
private static readonly Uri _sampleAudience =
new Uri(ConfigurationManager.AppSettings["Audience"]);
// Field for service context.
private static CloudMediaContext _context = null;
private static MediaServicesCredentials _cachedCredentials = null;
static void Main(string[] args)
{
// Create and cache the Media Services credentials in a static class variable.
_cachedCredentials = new MediaServicesCredentials(
_mediaServicesAccountName,
_mediaServicesAccountKey);
// Used the cached credentials to create CloudMediaContext.
_context = new CloudMediaContext(_cachedCredentials);
bool tokenRestriction = true;
string tokenTemplateString = null;
string drm_key_id = null;
IContentKey key = CreateCommonTypeContentKey();
// Print out the key ID and Key in base64 string format
drm_key_id = key.Id;
Console.WriteLine(" key:{0}",
key.Id, System.Convert.ToBase64String(key.GetClearKeyValue()));
Console.WriteLine(" key value:{1} ",
key.Id, System.Convert.ToBase64String(key.GetClearKeyValue()));
sbasedrmdataDataSetTableAdapters.sbase_drm_keysTableAdapter sbasedrmTableAdapter =
new sbasedrmdataDataSetTableAdapters.sbase_drm_keysTableAdapter();
sbasedrmTableAdapter.Insert(drm_key_id);
Console.WriteLine("Protection key: {0} ",
key.ProtectionKeyId, System.Convert.ToBase64String(key.GetClearKeyValue()));
Console.WriteLine("PlayReady URL: {0}",
key.GetKeyDeliveryUrl(ContentKeyDeliveryType.PlayReadyLicense));
Console.WriteLine("Widevin URL: {0}",
key.GetKeyDeliveryUrl(ContentKeyDeliveryType.Widevine));
if (tokenRestriction)
tokenTemplateString = AddTokenRestrictedAuthorizationPolicy(key);
else
AddOpenAuthorizationPolicy(key);
Console.WriteLine("Auth policy: {0}",
key.AuthorizationPolicyId);
Console.WriteLine();
Console.ReadLine();
Environment.Exit(0);
}
static public void AddOpenAuthorizationPolicy(IContentKey contentKey)
{
// Create ContentKeyAuthorizationPolicy with Open restrictions
// and create authorization policy
List<ContentKeyAuthorizationPolicyRestriction> restrictions =
new List<ContentKeyAuthorizationPolicyRestriction>
{
new ContentKeyAuthorizationPolicyRestriction
{
Name = "Open",
KeyRestrictionType = (int)ContentKeyRestrictionType.Open,
Requirements = null
}
};
// Configure PlayReady and Widevine license templates.
string PlayReadyLicenseTemplate = ConfigurePlayReadyLicenseTemplate();
string WidevineLicenseTemplate = ConfigureWidevineLicenseTemplate();
IContentKeyAuthorizationPolicyOption PlayReadyPolicy =
_context.ContentKeyAuthorizationPolicyOptions.Create("",
ContentKeyDeliveryType.PlayReadyLicense,
restrictions, PlayReadyLicenseTemplate);
IContentKeyAuthorizationPolicyOption WidevinePolicy =
_context.ContentKeyAuthorizationPolicyOptions.Create("",
ContentKeyDeliveryType.Widevine,
restrictions, WidevineLicenseTemplate);
IContentKeyAuthorizationPolicy contentKeyAuthorizationPolicy = _context.
ContentKeyAuthorizationPolicies.
CreateAsync("Deliver Common Content Key with no restrictions").
Result;
contentKeyAuthorizationPolicy.Options.Add(PlayReadyPolicy);
contentKeyAuthorizationPolicy.Options.Add(WidevinePolicy);
// Associate the content key authorization policy with the content key.
contentKey.AuthorizationPolicyId = contentKeyAuthorizationPolicy.Id;
contentKey = contentKey.UpdateAsync().Result;
}
public static string AddTokenRestrictedAuthorizationPolicy(IContentKey contentKey)
{
string tokenTemplateString = GenerateTokenRequirements();
List<ContentKeyAuthorizationPolicyRestriction> restrictions =
new List<ContentKeyAuthorizationPolicyRestriction>
{
new ContentKeyAuthorizationPolicyRestriction
{
Name = "Token Authorization Policy",
KeyRestrictionType = (int)ContentKeyRestrictionType.TokenRestricted,
Requirements = tokenTemplateString,
}
};
// Configure PlayReady and Widevine license templates.
string PlayReadyLicenseTemplate = ConfigurePlayReadyLicenseTemplate();
string WidevineLicenseTemplate = ConfigureWidevineLicenseTemplate();
IContentKeyAuthorizationPolicyOption PlayReadyPolicy =
_context.ContentKeyAuthorizationPolicyOptions.Create("Token option",
ContentKeyDeliveryType.PlayReadyLicense,
restrictions, PlayReadyLicenseTemplate);
IContentKeyAuthorizationPolicyOption WidevinePolicy =
_context.ContentKeyAuthorizationPolicyOptions.Create("Token option",
ContentKeyDeliveryType.Widevine,
restrictions, WidevineLicenseTemplate);
IContentKeyAuthorizationPolicy contentKeyAuthorizationPolicy = _context.
ContentKeyAuthorizationPolicies.
CreateAsync("Deliver Common Content Key with token restrictions").
Result;
contentKeyAuthorizationPolicy.Options.Add(PlayReadyPolicy);
contentKeyAuthorizationPolicy.Options.Add(WidevinePolicy);
// Associate the content key authorization policy with the content key
contentKey.AuthorizationPolicyId = contentKeyAuthorizationPolicy.Id;
contentKey = contentKey.UpdateAsync().Result;
return tokenTemplateString;
}
static private string GenerateTokenRequirements()
{
TokenRestrictionTemplate template = new TokenRestrictionTemplate(TokenType.SWT);
template.PrimaryVerificationKey = new SymmetricVerificationKey();
template.AlternateVerificationKeys.Add(new SymmetricVerificationKey());
template.Audience = _sampleAudience.ToString();
template.Issuer = _sampleIssuer.ToString();
template.RequiredClaims.Add(TokenClaim.ContentKeyIdentifierClaim);
return TokenRestrictionTemplateSerializer.Serialize(template);
}
static private string ConfigurePlayReadyLicenseTemplate()
{
// The following code configures PlayReady License Template using .NET classes
// and returns the XML string.
//The PlayReadyLicenseResponseTemplate class represents the template
//for the response sent back to the end user.
//It contains a field for a custom data string between the license server
//and the application (may be useful for custom app logic)
//as well as a list of one or more license templates.
PlayReadyLicenseResponseTemplate responseTemplate =
new PlayReadyLicenseResponseTemplate();
// The PlayReadyLicenseTemplate class represents a license template
// for creating PlayReady licenses
// to be returned to the end users.
// It contains the data on the content key in the license
// and any rights or restrictions to be
// enforced by the PlayReady DRM runtime when using the content key.
PlayReadyLicenseTemplate licenseTemplate = new PlayReadyLicenseTemplate();
// Configure whether the license is persistent
// (saved in persistent storage on the client)
// or non-persistent (only held in memory while the player is using the license).
licenseTemplate.LicenseType = PlayReadyLicenseType.Nonpersistent;
// AllowTestDevices controls whether test devices can use the license or not.
// If true, the MinimumSecurityLevel property of the license
// is set to 150. If false (the default),
// the MinimumSecurityLevel property of the license is set to 2000.
licenseTemplate.AllowTestDevices = true;
// You can also configure the Play Right in the PlayReady license by using the PlayReadyPlayRight class.
// It grants the user the ability to playback the content subject to the zero or more restrictions
// configured in the license and on the PlayRight itself (for playback specific policy).
// Much of the policy on the PlayRight has to do with output restrictions
// which control the types of outputs that the content can be played over and
// any restrictions that must be put in place when using a given output.
// For example, if the DigitalVideoOnlyContentRestriction is enabled,
//then the DRM runtime will only allow the video to be displayed over digital outputs
//(analog video outputs won’t be allowed to pass the content).
// IMPORTANT: These types of restrictions can be very powerful
// but can also affect the consumer experience.
// If the output protections are configured too restrictive,
// the content might be unplayable on some clients.
// For more information, see the PlayReady Compliance Rules document.
// For example:
//licenseTemplate.PlayRight.AgcAndColorStripeRestriction = new AgcAndColorStripeRestriction(1);
responseTemplate.LicenseTemplates.Add(licenseTemplate);
return MediaServicesLicenseTemplateSerializer.Serialize(responseTemplate);
}
private static string ConfigureWidevineLicenseTemplate()
{
var template = new WidevineMessage
{
allowed_track_types = AllowedTrackTypes.SD_HD,
content_key_specs = new[]
{
new ContentKeySpecs
{
required_output_protection =
new RequiredOutputProtection { hdcp = Hdcp.HDCP_NONE},
security_level = 1,
track_type = "SD"
}
},
policy_overrides = new
{
can_play = true,
can_persist = true,
can_renew = false
}
};
string configuration = JsonConvert.SerializeObject(template);
return configuration;
}
static public IContentKey CreateCommonTypeContentKey()
{
// Create envelope encryption content key
Guid keyId = Guid.NewGuid();
byte[] contentKey = GetRandomBuffer(16);
IContentKey key = _context.ContentKeys.Create(
keyId,
contentKey,
"ContentKey",
ContentKeyType.CommonEncryption);
return key;
}
static private byte[] GetRandomBuffer(int length)
{
var returnValue = new byte[length];
using (var rng =
new System.Security.Cryptography.RNGCryptoServiceProvider())
{
rng.GetBytes(returnValue);
}
return returnValue;
}
}
}
So the issue im having is when i try to run the program i get an error at this line
sbasedrmTableAdapter.Insert(drm_key_id);
and the error i receive is:
Error CS7036 There is no argument given that corresponds to the
required formal parameter 'drm_key' of
'sbase_drm_keysTableAdapter.Insert(string, string, string, string)'
How can i solve this error

It looks like you are trying to call a method that requires 4 strings as parameters with just one parameter. Try supplying the correct parameters to the method.
Your problem looks quite similar to: OOP inheritance and default constructor

Related

Retrieving Connection String when Creating ServiceBus with Pulumi (AzureNative)

I create a Servicebus-Namespace using AzureNative on Pulumi:
public void CreateNamespace(string namespaceName, SkuName skuname, SkuTier tier)
{
var namespace = new Namespace(namespaceName, new NamespaceArgs
{
Location = _resourceGroup.Location,
NamespaceName = namespaceName,
ResourceGroupName = _resourceGroup.Name,
Sku = new Pulumi.AzureNative.ServiceBus.Inputs.SBSkuArgs
{
Name = skuname,
Tier = tier
}
});
}
The Servicebus Namespace is created correctly. After creating the Servicebus-Namespace I need to retrieve the ConnectionString for this resource. Either for the automatically created RootManageSharedAccessKey or alternatively by creating a specific additional policy for that task.
Within the Azure Portal I can retrieve the Key by navigating through
Settings/Shared access policies/Policy/ and copying the Primary access key from there.
I did not find any property or function within the AzureNative.ServiceBus - Namespace that seem to lead to that key. Any way to retrieve that property?
I solved it by creating a new NamespaceRule and return ListNamespaceKeys-Properties:
var namespaceRule = new NamespaceAuthorizationRule(rulename, new NamespaceAuthorizationRuleArgs
{
AuthorizationRuleName = rulename,
NamespaceName = namespace.Name,
ResourceGroupName = _resourceGroup.Name,
Rights = new[]
{
AccessRights.Listen,
AccessRights.Send
}
});
var nameSpaceKeys = Output
.Tuple(namespace.Name, namespaceRule.Name)
.Apply(t => ListNamespaceKeys.InvokeAsync(new ListNamespaceKeysArgs
{
NamespaceName = t.Item1,
AuthorizationRuleName = t.Item2,
ResourceGroupName = _resourceGroup.GetResourceName()
}));
Now NamespaceKeys contains all the required Properties like PrimaryConnectionString etc.

The domain account is locked out when certificate key is accessed with local account

When I access X509Certificate2.PublicKey or X509Certificate2.PrivateKey initialized from an object that was generated with BouncyCastle, I'm getting my domain account locked out (if I do it multiple times). It happens if I run the program on behalf of a local account with the same name but different password. Here is the code:
using Org.BouncyCastle.OpenSsl;
using Org.BouncyCastle.Security;
using System.IO;
using System.Security.Cryptography.X509Certificates;
namespace TestCertificateConversion
{
class Program
{
static void Main(string[] args)
{
var certString = GetCertificateString();
var textReader = new StringReader(certString);
var pemReader = new PemReader(textReader);
var bcCert = pemReader.ReadObject() as Org.BouncyCastle.X509.X509Certificate;
var netCert = DotNetUtilities.ToX509Certificate(bcCert);
var netCert2 = new X509Certificate2(netCert);
var publicKey = netCert2.PublicKey; // this locks out domain account
}
private static string GetCertificateString()
{
return #"-----BEGIN CERTIFICATE-----
MIICvDCCAaSgAwIBAgIQANDHl0sFjYUG3j76dYTadzANBgkqhkiG9w0BAQsFADAQ
MQ4wDAYDVQQDDAVwYWNlbTAgFw0xNjAxMDExMjQ4MzdaGA8yMjAwMDEwMTIyNTg0
N1owEDEOMAwGA1UEAwwFcGFjZW0wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
AoIBAQC5AKAkYnerRUmeAX0Z+aZsX39LXTVZiUd8U6waw7Hzd9E0YA50tHWizfEI
w7IBZwXS0aiXwHqJvrslc3NNs0grwu/iYQl+FGdudKmgXVE7Riu0uFAHo6eFr0dO
o0IP3LS+dPSWarXEBLbymaXRiPJDyHLefvslcSM9UQ2BHOe7dnHh9K1h+XMKTw3Z
/3szAyABBX9hsJU/mc9XjrMNXHJXALSxTfLIPzDrfh+aJtlopWpnb6vQcXwKksyk
4hyVUfw1klhglJgN0LgBGU7Ps3oxCbOqns7fB+tzkBV1E5Q97otgvMR14qLZgc8k
NQrdMl57GaWQJl6mAP1NR1gZt2f1AgMBAAGjEDAOMAwGA1UdEwQFMAMBAf8wDQYJ
KoZIhvcNAQELBQADggEBAAEz3vJOfqao9QXPWSz8YCjbWG1FeVG0NdYpd422dC2V
Vrzlo5zrkRv5XPhBOY3o81OhUe7iByiiM9suYfXLNxxd29TBGB5amO8Yv1ZX0hS/
zvVF6QS0+zZvOiujVhfHGiJxKypqgaigI6NM80ZDKPzsRPwFLIJiZYwQ7eQUlrpt
WGgFkZC23/mSOkY6VMmO5zugeMoiXRyFq33uWLlaAr+zJtRh1IPRmkA1lJv0bkC1
SslO0oSDoT2lcvZkQ5odFKX5i1z7T/wioQqG62i8nsDSz+iZOqUyDx7bL8fIEHog
qgwizgr2aAPLO/VQKU9pRTyRNFl/GL5bi7w8NN+rLxE=
-----END CERTIFICATE-----";
}
}
}
I'm not sure what I'm doing wrong, are there any security settings I might need to change to prevent it from locking out domain accounts?
Can you check and confirm if the service account is coming in this format
I checked the .net source code and found what causes an authentication problem in X509Certificate2.PublicKey. It is a creation of a new OID object:
public PublicKey PublicKey {
[SecuritySafeCritical]
get {
if (m_safeCertContext.IsInvalid)
throw new CryptographicException(SR.GetString(SR.Cryptography_InvalidHandle), "m_safeCertContext");
if (m_publicKey == null) {
string friendlyName = this.GetKeyAlgorithm();
byte[] parameters = this.GetKeyAlgorithmParameters();
byte[] keyValue = this.GetPublicKey();
Oid oid = new Oid(friendlyName, OidGroup.PublicKeyAlgorithm, true); // this line
m_publicKey = new PublicKey(oid, new AsnEncodedData(oid, parameters), new AsnEncodedData(oid, keyValue));
}
return m_publicKey;
}
}
The OID constructor is called with lookupFriendlyName set to 'true', which leads to FindOidInfoWithFallback function:
// Try to find OID info within a specific group, and if that doesn't work fall back to all
// groups for compatibility with previous frameworks
internal static string FindOidInfoWithFallback(uint key, string value, OidGroup group)
{
string info = FindOidInfo(key, value, group);
// If we couldn't find it in the requested group, then try again in all groups
if (info == null && group != OidGroup.All)
{
info = FindOidInfo(key, value, OidGroup.All);
}
return info;
}
The first FindOidInfo returns null and then it is called second time with OidGroup.All. Eventually it results in cryptAPI call:
CAPIMethods.CryptFindOIDInfo(dwKeyType, pvKey, dwGroupId);
From documentation:
The CryptFindOIDInfo function performs a lookup in the active
directory to retrieve the friendly names of OIDs under the following
conditions:
The key type in the dwKeyType parameter is set to CRYPT_OID_INFO_OID_KEY or CRYPT_OID_INFO_NAME_KEY.
No group identifier is specified in the dwGroupId parameter or the GroupID refers to EKU OIDs, policy OIDs or template OIDs.
It then attempts to authentication with local user account and as a result I'm getting my domain account locked. From the comments to the code I see that the second FindOidInfo call was added for compatibility with older frameworks and potentially I can remove it. Unfortunately there is no easy was to change the code since it is in the framework itself. I may try to inherit the X509Certificate2 object and rewrite PublicKey and PrivateKey, but I don't really like that idea.

Azure Text Analytics using C# giving errors

I am trying to use code that I got from github: https://github.com/liamca/azure-search-machine-learning-text-analytics and the creating of an index works perfect, but the Keyphrase portion is giving me a 403 - Forbidden: Access is denied error. This happens in the TextExtractionHelper class on the following line of code:
if (!response.IsSuccessStatusCode)
{
throw new Exception("Call to get key phrases failed with HTTP status code: " +
response.StatusCode + " and contents: " + content);
}
Based on the information in the comments, I created an account at this link: https://datamarket.azure.com/account/keys and used the key that it provided, but I am getting the above error.
Here is the code in case you do not want to download from github:
class Program
{
static string searchServiceName = "<removed>"; // Learn more here: https://azure.microsoft.com/en-us/documentation/articles/search-what-is-azure-search/
static string searchServiceAPIKey = "<removed>";
static string azureMLTextAnalyticsKey = "<removed>"; // Learn more here: https://azure.microsoft.com/en-us/documentation/articles/machine-learning-apps-text-analytics/
static string indexName = "textanalytics";
static SearchServiceClient serviceClient = new SearchServiceClient(searchServiceName, new SearchCredentials(searchServiceAPIKey));
static SearchIndexClient indexClient = serviceClient.Indexes.GetClient(indexName);
static void Main(string[] args)
{
string filetext = "Build great search experiences for your web and mobile apps. " +
"Many applications use search as the primary interaction pattern for their users. When it comes to search, user expectations are high. They expect great relevance, suggestions, near-instantaneous responses, multiple languages, faceting, and more. Azure Search makes it easy to add powerful and sophisticated search capabilities to your website or application. The integrated Microsoft natural language stack, also used in Bing and Office, has been improved over 16 years of development. Quickly and easily tune search results, and construct rich, fine-tuned ranking models to tie search results to business goals. Reliable throughput and storage provide fast search indexing and querying to support time-sensitive search scenarios. " +
"Reduce complexity with a fully managed service. " +
"Azure Search removes the complexity of setting up and managing your own search index. This fully managed service helps you avoid the hassle of dealing with index corruption, service availability, scaling, and service updates. Create multiple indexes with no incremental cost per index. Easily scale up or down as the traffic and data volume of your application changes.";
// Note, this will create a new Azure Search Index for the text and the key phrases
Console.WriteLine("Creating Azure Search index...");
AzureSearch.CreateIndex(serviceClient, indexName);
// Apply the Machine Learning Text Extraction to retrieve only the key phrases
Console.WriteLine("Extracting key phrases from processed text... \r\n");
KeyPhraseResult keyPhraseResult = TextExtraction.ProcessText(azureMLTextAnalyticsKey, filetext);
Console.WriteLine("Found the following phrases... \r\n");
foreach (var phrase in keyPhraseResult.KeyPhrases)
Console.WriteLine(phrase);
// Take the resulting key phrases to a new Azure Search Index
// It is highly recommended that you upload documents in batches rather
// individually like is done here
Console.WriteLine("Uploading extracted text to Azure Search...\r\n");
AzureSearch.UploadDocuments(indexClient, "1", keyPhraseResult);
Console.WriteLine("Wait 5 seconds for content to become searchable...\r\n");
Thread.Sleep(5000);
// Execute a test search
Console.WriteLine("Execute Search...");
AzureSearch.SearchDocuments(indexClient, "Azure Search");
Console.WriteLine("All done. Press any key to continue.");
Console.ReadLine();
}
}
The below is in the TextExtractionHelper class:
/// <summary>
/// This is a sample program that shows how to use the Azure ML Text Analytics app (https://datamarket.azure.com/dataset/amla/text-analytics)
/// </summary>
public class TextExtraction
{
private const string ServiceBaseUri = "https://api.datamarket.azure.com/";
public static KeyPhraseResult ProcessText(string accountKey, string inputText)
{
KeyPhraseResult keyPhraseResult = new KeyPhraseResult();
using (var httpClient = new HttpClient())
{
string inputTextEncoded = HttpUtility.UrlEncode(inputText);
httpClient.BaseAddress = new Uri(ServiceBaseUri);
string creds = "AccountKey:" + accountKey;
string authorizationHeader = "Basic " + Convert.ToBase64String(Encoding.ASCII.GetBytes(creds));
httpClient.DefaultRequestHeaders.Add("Authorization", authorizationHeader);
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
// get key phrases
string keyPhrasesRequest = "data.ashx/amla/text-analytics/v1/GetKeyPhrases?Text=" + inputTextEncoded;
Task<HttpResponseMessage> responseTask = httpClient.GetAsync(keyPhrasesRequest);
responseTask.Wait();
HttpResponseMessage response = responseTask.Result;
Task<string> contentTask = response.Content.ReadAsStringAsync();
contentTask.Wait();
string content = contentTask.Result;
if (!response.IsSuccessStatusCode)
{
throw new Exception("Call to get key phrases failed with HTTP status code: " +
response.StatusCode + " and contents: " + content);
}
keyPhraseResult = JsonConvert.DeserializeObject<KeyPhraseResult>(content);
}
return keyPhraseResult;
}
}
/// <summary>
/// Class to hold result of Key Phrases call
/// </summary>
public class KeyPhraseResult
{
public List<string> KeyPhrases { get; set; }
}
/// <summary>
/// Class to hold result of Sentiment call
/// </summary>
public class SentimentResult
{
public double Score { get; set; }
}
/// <summary>
/// Class to hold result of Language detection call
/// </summary>
public class LanguageResult
{
public bool UnknownLanguage { get; set; }
public IList<DetectedLanguage> DetectedLanguages { get; set; }
}
/// <summary>
/// Class to hold information about a single detected language
/// </summary>
public class DetectedLanguage
{
public string Name { get; set; }
/// <summary>
/// This is the short ISO 639-1 standard form of representing
/// all languages. The short form is a 2 letter representation of the language.
/// en = English, fr = French for example
/// </summary>
public string Iso6391Name { get; set; }
public double Score { get; set; }
}
UPDATE
After many hours of taking different sample code and trying to put them together, I finally got something "kind of" working. Here is all my code:
class Program
{
static string searchServiceName = "<removed>"; // Learn more here: https://azure.microsoft.com/en-us/documentation/articles/search-what-is-azure-search/
static string searchServiceAPIKey = "<removed>";
//static string azureMLTextAnalyticsKey = "<removed>"; // Learn more here: https://azure.microsoft.com/en-us/documentation/articles/machine-learning-apps-text-analytics/
static string indexName = "textanalytics";
static SearchServiceClient serviceClient = new SearchServiceClient(searchServiceName, new SearchCredentials(searchServiceAPIKey));
static SearchIndexClient indexClient = serviceClient.Indexes.GetClient(indexName);
static void Main()
{
MakeRequests();
Console.WriteLine("Hit ENTER to exit...");
Console.ReadLine();
}
static async void MakeRequests()
{
// Note, this will create a new Azure Search Index for the text and the key phrases
Console.WriteLine("Creating Azure Search index...");
AzureSearch.CreateIndex(serviceClient, indexName);
// Apply the Machine Learning Text Extraction to retrieve only the key phrases
Console.WriteLine("Extracting key phrases from processed text... \r\n");
KeyPhraseResult keyPhraseResult = await TextExtraction.ProcessText();
Console.WriteLine("Found the following phrases... \r\n");
foreach (var phrase in keyPhraseResult.KeyPhrases)
Console.WriteLine(phrase);
// Take the resulting key phrases to a new Azure Search Index
// It is highly recommended that you upload documents in batches rather
// individually like is done here
Console.WriteLine("Uploading extracted text to Azure Search...\r\n");
AzureSearch.UploadDocuments(indexClient, "1", keyPhraseResult);
Console.WriteLine("Wait 5 seconds for content to become searchable...\r\n");
Thread.Sleep(5000);
// Execute a test search
Console.WriteLine("Execute Search...");
AzureSearch.SearchDocuments(indexClient, "Azure Search");
Console.WriteLine("All done. Press any key to continue.");
Console.ReadLine();
}
}
Here is my TextExtractionHelper class:
public class TextExtraction
{
static string azureMLTextAnalyticsKey = "<Removed>"; // Learn more here: https://azure.microsoft.com/en-us/documentation/articles/machine-learning-apps-text-analytics/
private const string ServiceBaseUri = "https://westus.api.cognitive.microsoft.com/";
public static async Task<KeyPhraseResult> ProcessText()
{
string filetext = "Build great search experiences for your web and mobile apps. " +
"Many applications use search as the primary interaction pattern for their users. When it comes to search, user expectations are high. They expect great relevance, suggestions, near-instantaneous responses, multiple languages, faceting, and more. Azure Search makes it easy to add powerful and sophisticated search capabilities to your website or application. The integrated Microsoft natural language stack, also used in Bing and Office, has been improved over 16 years of development. Quickly and easily tune search results, and construct rich, fine-tuned ranking models to tie search results to business goals. Reliable throughput and storage provide fast search indexing and querying to support time-sensitive search scenarios. " +
"Reduce complexity with a fully managed service. " +
"Azure Search removes the complexity of setting up and managing your own search index. This fully managed service helps you avoid the hassle of dealing with index corruption, service availability, scaling, and service updates. Create multiple indexes with no incremental cost per index. Easily scale up or down as the traffic and data volume of your application changes.";
KeyPhraseResult keyPhraseResult = new KeyPhraseResult();
using (var httpClient = new HttpClient())
{
httpClient.BaseAddress = new Uri(ServiceBaseUri);
// Request headers.
httpClient.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", azureMLTextAnalyticsKey);
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
byte[] byteData = Encoding.UTF8.GetBytes("{\"documents\":[" +
"{\"id\":\"1\",\"text\":\"" + filetext + "\"},]}");
//byte[] byteData = Encoding.UTF8.GetBytes("{\"documents\":[" +
// "{\"id\":\"1\",\"text\":\"Build great search experiences for your web and mobile apps." +
// "Many applications use search as the primary interaction pattern for their users. When it comes to search, user expectations are high. They expect great relevance, suggestions, near-instantaneous responses, multiple languages, faceting, and more. Azure Search makes it easy to add powerful and sophisticated search capabilities to your website or application. The integrated Microsoft natural language stack, also used in Bing and Office, has been improved over 16 years of development. Quickly and easily tune search results, and construct rich, fine-tuned ranking models to tie search results to business goals. Reliable throughput and storage provide fast search indexing and querying to support time-sensitive search scenarios." +
// "Reduce complexity with a fully managed service. " +
// "Azure Search removes the complexity of setting up and managing your own search index. This fully managed service helps you avoid the hassle of dealing with index corruption, service availability, scaling, and service updates. Create multiple indexes with no incremental cost per index. Easily scale up or down as the traffic and data volume of your application changes.\"}," +
// "]}");
// Detect key phrases:
var keyPhrasesRequest = "text/analytics/v2.0/keyPhrases";
//var response = await CallEndpoint(httpClient, uri, byteData);
// get key phrases
using (var getcontent = new ByteArrayContent(byteData))
{
getcontent.Headers.ContentType = new MediaTypeHeaderValue("application/json");
var response = await httpClient.PostAsync(keyPhrasesRequest, getcontent);
Task<string> contentTask = response.Content.ReadAsStringAsync();
string content = contentTask.Result;
if (!response.IsSuccessStatusCode)
{
throw new Exception("Call to get key phrases failed with HTTP status code: " +
response.StatusCode + " and contents: " + content);
}
keyPhraseResult = JsonConvert.DeserializeObject<KeyPhraseResult>(content);
//return await response.Content.ReadAsStringAsync();
}
}
return keyPhraseResult;
}
}
/// <summary>
/// Class to hold result of Key Phrases call
/// </summary>
public class KeyPhraseResult
{
public List<string> KeyPhrases { get; set; }
}
/// <summary>
/// Class to hold result of Sentiment call
/// </summary>
public class SentimentResult
{
public double Score { get; set; }
}
/// <summary>
/// Class to hold result of Language detection call
/// </summary>
public class LanguageResult
{
public bool UnknownLanguage { get; set; }
public IList<DetectedLanguage> DetectedLanguages { get; set; }
}
/// <summary>
/// Class to hold information about a single detected language
/// </summary>
public class DetectedLanguage
{
public string Name { get; set; }
/// <summary>
/// This is the short ISO 639-1 standard form of representing
/// all languages. The short form is a 2 letter representation of the language.
/// en = English, fr = French for example
/// </summary>
public string Iso6391Name { get; set; }
public double Score { get; set; }
}
So I am now able to pull the KeyPhrases from the text! But, now I am sitting with a problem where it doesnt seem like the JSON string is being deserialized and my keyPhraseResult is now getting a null value.
What am I missing ?
If anyone is able to help, I would greatly appreciate it.
Thanks!
So I got it working! With the help from this link: Deserializing JSON using C# to return items, which i posted to simplify where my issue was occurring now.
So what this code is doing is the following:
Creating an Index in Azure called textanalytics.
Creating a JSON string of the text provided.
Retrieving the KeyPhrases and adding these to the Index created in point 1 above.
Below is my entire code, in case it helps someone else:
(Please ensure that you add the relevant references from Nuget packages: Microsoft.Azure.Search and Newtonsoft.Json)
Program.cs(This is a console application):
using Microsoft.Azure.Search;
using System;
using System.Configuration;
using System.IO;
using System.Threading;
namespace AzureSearchTextAnalytics
{
class Program
{
static string searchServiceName = "<removed>"; // This is the Azure Search service name that you create in Azure
static string searchServiceAPIKey = "<removed>"; // This is the Primary key that is provided after creating a Azure Search Service
static string indexName = "textanalytics";
static SearchServiceClient serviceClient = new SearchServiceClient(searchServiceName, new SearchCredentials(searchServiceAPIKey));
static SearchIndexClient indexClient = serviceClient.Indexes.GetClient(indexName);
static void Main()
{
MakeRequests();
Console.WriteLine("Hit ENTER to exit...");
Console.ReadLine();
}
static async void MakeRequests()
{
// Note, this will create a new Azure Search Index for the text and the key phrases
Console.WriteLine("Creating Azure Search index...");
AzureSearch.CreateIndex(serviceClient, indexName);
// Apply the Machine Learning Text Extraction to retrieve only the key phrases
Console.WriteLine("Extracting key phrases from processed text... \r\n");
KeyPhraseResult keyPhraseResult = await TextExtraction.ProcessText();
Console.WriteLine("Found the following phrases... \r\n");
foreach (var phrase in keyPhraseResult.KeyPhrases)
Console.WriteLine(phrase);
// Take the resulting key phrases to a new Azure Search Index
// It is highly recommended that you upload documents in batches rather
// individually like is done here
Console.WriteLine("Uploading extracted text to Azure Search...\r\n");
AzureSearch.UploadDocuments(indexClient, "1", keyPhraseResult);
Console.WriteLine("Wait 5 seconds for content to become searchable...\r\n");
Thread.Sleep(5000);
// Execute a test search
Console.WriteLine("Execute Search...");
AzureSearch.SearchDocuments(indexClient, "Azure Search");
Console.WriteLine("All done. Press any key to continue.");
Console.ReadLine();
}
}
}
My TextExtractionHelper.cs:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading.Tasks;
using System.Web;
using Newtonsoft.Json;
using System.Configuration; // get it from http://www.newtonsoft.com/json
using Newtonsoft.Json.Linq;
namespace AzureSearchTextAnalytics
{
/// </summary>
public class TextExtraction
{
static string azureMLTextAnalyticsKey = "<removed>"; // This key you will get when you have added TextAnalytics in Azure.
private const string ServiceBaseUri = "https://westus.api.cognitive.microsoft.com/"; //This you will get when you have added TextAnalytics in Azure
public static async Task<KeyPhraseResult> ProcessText()
{
string filetext = "Build great search experiences for your web and mobile apps. " +
"Many applications use search as the primary interaction pattern for their users. When it comes to search, user expectations are high. They expect great relevance, suggestions, near-instantaneous responses, multiple languages, faceting, and more. Azure Search makes it easy to add powerful and sophisticated search capabilities to your website or application. The integrated Microsoft natural language stack, also used in Bing and Office, has been improved over 16 years of development. Quickly and easily tune search results, and construct rich, fine-tuned ranking models to tie search results to business goals. Reliable throughput and storage provide fast search indexing and querying to support time-sensitive search scenarios. " +
"Reduce complexity with a fully managed service. " +
"Azure Search removes the complexity of setting up and managing your own search index. This fully managed service helps you avoid the hassle of dealing with index corruption, service availability, scaling, and service updates. Create multiple indexes with no incremental cost per index. Easily scale up or down as the traffic and data volume of your application changes.";
KeyPhraseResult keyPhraseResult = new KeyPhraseResult();
using (var httpClient = new HttpClient())
{
httpClient.BaseAddress = new Uri(ServiceBaseUri);
// Request headers.
httpClient.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", azureMLTextAnalyticsKey);
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
byte[] byteData = Encoding.UTF8.GetBytes("{\"documents\":[" +
"{\"id\":\"1\",\"text\":\"" + filetext + "\"},]}");
// Detect key phrases:
var keyPhrasesRequest = "text/analytics/v2.0/keyPhrases";
// get key phrases
using (var getcontent = new ByteArrayContent(byteData))
{
getcontent.Headers.ContentType = new MediaTypeHeaderValue("application/json");
var response = await httpClient.PostAsync(keyPhrasesRequest, getcontent);
Task<string> contentTask = response.Content.ReadAsStringAsync();
string content = contentTask.Result;
if (!response.IsSuccessStatusCode)
{
throw new Exception("Call to get key phrases failed with HTTP status code: " +
response.StatusCode + " and contents: " + content);
}
var result = JsonConvert.DeserializeObject<RootObject>(content);
keyPhraseResult.KeyPhrases = result.documents[0].keyPhrases;
}
}
return keyPhraseResult;
}
}
public class Documents
{
public List<string> keyPhrases { get; set; }
public string id { get; set; }
}
public class RootObject
{
public List<Documents> documents { get; set; }
public List<object> errors { get; set; }
}
/// <summary>
/// Class to hold result of Key Phrases call
/// </summary>
public class KeyPhraseResult
{
public List<string> KeyPhrases { get; set; }
}
}
AzureSearch.cs:
using Microsoft.Azure.Search;
using Microsoft.Azure.Search.Models;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AzureSearchTextAnalytics
{
public class AzureSearch
{
public static void CreateIndex(SearchServiceClient serviceClient, string indexName)
{
if (serviceClient.Indexes.Exists(indexName))
{
serviceClient.Indexes.Delete(indexName);
}
var definition = new Index()
{
Name = indexName,
Fields = new[]
{
new Field("fileId", DataType.String) { IsKey = true },
new Field("fileText", DataType.String) { IsSearchable = true, IsFilterable = false, IsSortable = false, IsFacetable = false },
new Field("keyPhrases", DataType.Collection(DataType.String)) { IsSearchable = true, IsFilterable = true, IsFacetable = true }
}
};
serviceClient.Indexes.Create(definition);
}
public static void UploadDocuments(SearchIndexClient indexClient, string fileId, KeyPhraseResult keyPhraseResult)
{
List<IndexAction> indexOperations = new List<IndexAction>();
var doc = new Document();
doc.Add("fileId", fileId);
doc.Add("keyPhrases", keyPhraseResult.KeyPhrases.ToList());
indexOperations.Add(IndexAction.Upload(doc));
try
{
indexClient.Documents.Index(new IndexBatch(indexOperations));
}
catch (IndexBatchException e)
{
// Sometimes when your Search service is under load, indexing will fail for some of the documents in
// the batch. Depending on your application, you can take compensating actions like delaying and
// retrying. For this simple demo, we just log the failed document keys and continue.
Console.WriteLine(
"Failed to index some of the documents: {0}",
String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key)));
}
}
public static void SearchDocuments(SearchIndexClient indexClient, string searchText)
{
// Search using the supplied searchText and output documents that match
try
{
var sp = new SearchParameters();
DocumentSearchResult<OCRTextIndex> response = indexClient.Documents.Search<OCRTextIndex>(searchText, sp);
foreach (SearchResult<OCRTextIndex> result in response.Results)
{
Console.WriteLine("File ID: {0}", result.Document.fileId);
Console.WriteLine("Key Phrases: {0}", string.Join(",", result.Document.keyPhrases));
}
}
catch (Exception e)
{
Console.WriteLine("Failed search: {0}", e.Message.ToString());
}
}
}
}
DataModel.cs
using Microsoft.Azure.Search.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AzureSearchTextAnalytics
{
[SerializePropertyNamesAsCamelCase]
public class OCRTextIndex
{
public string fileId { get; set; }
public string[] keyPhrases { get; set; }
}
}

RavenDB throws a JSON deserialisation error when retrieving document

I've just completed a round of refactoring of my application, which has resulted in my removing a project that was no longer required and moving its classes into a different project. A side effect of this is that my User class, which is stored in RavenDB, has a collection property of a type moved to the new assembly. As soon as I attempt to query the session for the User class I get a Json deserialisation error. The issue is touched upon here but the answers don't address my issue. Here's the offending property:
{
"OAuthAccounts": {
"$type": "System.Collections.ObjectModel.Collection`1[
[Friendorsement.Contracts.Membership.IOAuthAccount,
Friendorsement.Contracts]], mscorlib",
"$values": []
},
}
OAuthAccounts is a collection property of User that used to map here:
System.Collections.ObjectModel.Collection`1[[Friendorsement.Contracts.Membership.IOAuthAccount, Friendorsement.Contracts]]
It now maps here:
System.Collections.ObjectModel.Collection`1[[Friendorsement.Domain.Membership.IOAuthAccount, Friendorsement.Domain]]
Friendorsement.Contracts no longer exists. All of its types are now in Friendorsement.Domain
I've tried using store.DatabaseCommands.StartsWith("User", "", 0, 128) but that didn't return anything.
I've tried looking at UpdateByIndex but not got very far with it:
store.DatabaseCommands.UpdateByIndex("Raven/DocumentsByEntityName",
new IndexQuery {Query = "Tag:Users"},
new[]
{
new PatchRequest { // unsure what to set here }
});
I'm using Raven 2.0
Below is a simple sample application that shows you the patching Metadata. While your example is a little different this should be a good starting point
namespace SO19941925
{
internal class Program
{
private static void Main(string[] args)
{
IDocumentStore store = new DocumentStore
{
Url = "http://localhost:8080",
DefaultDatabase = "SO19941925"
}.Initialize();
using (IDocumentSession session = store.OpenSession())
{
for (int i = 0; i < 10; i++)
{
session.Store(new User {Name = "User" + i});
}
session.SaveChanges();
}
using (IDocumentSession session = store.OpenSession())
{
List<User> users = session.Query<User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).ToList();
Console.WriteLine("{0} SO19941925.Users", users.Count);
}
Operation s = store.DatabaseCommands.UpdateByIndex("Raven/DocumentsByEntityName",
new IndexQuery {Query = "Tag:Users"},
new ScriptedPatchRequest
{
Script = #"this['#metadata']['Raven-Clr-Type'] = 'SO19941925.Models.User, SO19941925';"
}, true
);
s.WaitForCompletion();
using (IDocumentSession session = store.OpenSession())
{
List<Models.User> users =
session.Query<Models.User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).ToList();
Console.WriteLine("{0} SO19941925.Models.Users", users.Count);
}
Console.ReadLine();
}
}
internal class User
{
public string Name { get; set; }
}
}
namespace SO19941925.Models
{
internal class User
{
public string Name { get; set; }
}
}
UPDATE: Based on the initial answer above, here is the code that actually solves the OP question:
store.DatabaseCommands.UpdateByIndex("Raven/DocumentsByEntityName",
new IndexQuery {Query = "Tag:Users"},
new ScriptedPatchRequest
{
Script = #"this['OAuthAccounts']['$type'] =
'System.Collections.ObjectModel.Collection`1[
[Friendorsement.Domain.Membership.IFlexOAuthAccount,
Friendorsement.Domain]], mscorlib';",
}, true
);
Here are two possible solutions:
Option 1: Depending on what state your project is in, for example if you are still in development, you could easily just delete that collection out of RavenDB from the Raven Studio and recreate all those User documents. All the new User documents should then have the correct class name and assembly and should then deserialize correctly. Obviously, if you are already in production, this probably won't be a good option.
Option 2: Depending on how many User documents you have, you should be able to manually edit each one to specify the correct C# class name and assembly, so that they will be deserialized correctly. Again, if you have too many objects to manually modify, this may not be a good option; however, if there are just a few, it shouldn't be too bad to open each one up go to the metadata tab and paste the correct value for "Raven-Entity-Name" and "Raven-Clr-Type".
I ended up doing this:
Advanced.DatabaseCommands.UpdateByIndex(
"Raven/DocumentsByEntityName",
new IndexQuery {Query = "Tag:Album"},
new []{ new PatchRequest() {
Type = PatchCommandType.Modify,
Name = "#metadata",
Nested= new []{
new PatchRequest{
Name= "Raven-Clr-Type",
Type = PatchCommandType.Set,
Value = "Core.Model.Album, Core" }}}},
false);

Cannot find com.apiconnector.API in the dotMailer API for C#

I am trying to make sense of the dotMailer API for C#.
I have a class library where I intend to store the functionality that will consume the dotMailer API which references version 1.5 of the API. I also have a Service Reference set up from this WSDL
I was looking through the C# examples, but already I'm stumped! The following was pulled directly from here
Example of use in C#
/// <summary>
/// Adds a contact to an address book
/// </summary>
public void AddContactToAddressBook()
{
const string username = "apiuser-XXXXXXXXXXXX#apiconnector.com";
const string password = "password";
const int addressBookId = 1; // ID of the target address book
Console.WriteLine("AddContactToAddressBook");
Console.WriteLine("-----------------------");
// Get an instance to the web reference
com.apiconnector.API api = new com.apiconnector.API();
try
{
// we need a new contact
com.apiconnector.APIContact contact = new com.apiconnector.APIContact();
// populate the contact
contact.AudienceType = com.apiconnector.ContactAudienceTypes.B2B;
// populate the data fields
contact.DataFields = new com.apiconnector.ContactDataFields();
contact.DataFields.Keys = new string[3];
contact.DataFields.Values = new object[3];
contact.DataFields.Keys[0] = "FIRSTNAME";
contact.DataFields.Values[0] = "John";
contact.DataFields.Keys[1] = "LASTNAME";
contact.DataFields.Values[1] = "Smith";
contact.DataFields.Keys[2] = "POSTCODE";
contact.DataFields.Values[2] = "IP4 1XU";
// email address
contact.Email = "joe.smith#example.com";
contact.EmailType = com.apiconnector.ContactEmailTypes.PlainText;
contact.Notes = "This is a test only email";
contact.OptInType = com.apiconnector.ContactOptInTypes.Single;
// This method will create the contact required if it doesn't already exist within the dotMailer system,
// so we don't have to call CreateContact as a prerequisite.
//
// This method will also overwrite an existing contact, with the information provided here.
//
// This method will fail if you try to add a contact to the "Test" or "All Contacts" address books.
//
com.apiconnector.APIContact newContact = api.AddContactToAddressBook(username, password, contact, addressBookId);
// Did we get something back from the API ?
if (newContact != null)
{
Console.WriteLine("Contact added to address book {0} -> {1}", newContact.ID, addressBookId);
}
}
catch (SoapException ex) // catch any soap issues/errors from the web service
{
Console.WriteLine("Error -> {0}", ex.Message);
}
Console.WriteLine();
}
My problem is that the following line does not resolve.
com.apiconnector.API api = new com.apiconnector.API();
I have looked in namespace dotMailer.Sdk.com.apiconnector for API but it does not exist, so where is it?
Am I missing something?
Add the wsdl as a service reference. In the example below I've called it "ServiceReference1" (because that's the default and I was lazy). You then use the reference to the APISoapClient (I've called it Client) instead of "api" that you're having trouble declaring.
All compiles fine, I'm not going to execute it because I've no idea what shenanigans my random code snippet is going to cause for the server! Should point you in the right direction?
using WindowsFormsApplication1.ServiceReference1;
namespace WindowsFormsApplication1
{
public partial class Form1 : Form
{
const string username = "apiuser-XXXXXXXXXXXX#apiconnector.com";
const string password = "password";
const int addressBookId = 1; // ID of the target address book
public Form1()
{
InitializeComponent();
}
private void button1_Click(object sender, EventArgs e)
{
AddContactToAddressBook();
}
private void AddContactToAddressBook()
{
using (ServiceReference1.APISoapClient Client = new ServiceReference1.APISoapClient())
{
APIContact Contact = new APIContact();
Contact.AudienceType = ContactAudienceTypes.B2B;
APIContact NewContact = Client.AddContactToAddressBook(username, password, Contact, addressBookId); // etc. etc.
}
}
}
}

Categories