Bind Multi level json with configuration in c# web api? - c#

I have multi-level json coming from aws secret and I want to bind this json or secret with the configuration of c# so that I can use it in the whole project.
public class AmazonSecretsManagerConfigurationProvider : ConfigurationProvider
{
private readonly string _region;
private readonly string _secretName;
/// <summary>
/// Initializes a new instance of the <see cref="AmazonSecretsManagerConfigurationProvider"/> class.
/// </summary>
/// <param name="region"></param>
/// <param name="secretName"></param>
public AmazonSecretsManagerConfigurationProvider(string region, string secretName)
{
_region = region;
_secretName = secretName;
}
public override void Load()
{
var secret = GetSecret();
Dictionary<string, object> data = JsonConvert.DeserializeObject<Dictionary<string, object>>(secret);
Dictionary<string, string> Data = data.ToDictionary(k => k.Key, k => k.Value.ToString());
}
private string GetSecret()
{
var request = new GetSecretValueRequest
{
SecretId = "awsseceret",
VersionStage = "AWSCURRENT" // VersionStage defaults to AWSCURRENT if unspecified.
};
string accesskey = "################";
string secretkey = "#######################";
using (var client =
new AmazonSecretsManagerClient(accesskey, secretkey, RegionEndpoint.GetBySystemName(this._region)))
{
var response = client.GetSecretValueAsync(request).Result;
string secretString;
if (response.SecretString != null)
{
secretString = response.SecretString;
}
else
{
var memoryStream = response.SecretBinary;
var reader = new StreamReader(memoryStream);
secretString =
System.Text.Encoding.UTF8
.GetString(Convert.FromBase64String(reader.ReadToEnd()));
}
return secretString;
}
}
}
}
I get aws secretString in the secret variable but how can I bind this to the configuration?

Related

Generating pre-signed Url in .Net for AWS s3

Based on the documentation, with provided sample data, it should be possible to generate a signed key with value of:
aeeed9bbccd4d02ee5c0109b86d86835f995330da4c265957d157751f604d404
Here is my code in .Net:
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
namespace PlayingWithAmazonS3
{
public class ReadTextFilePerRest
{
private string _regionSample = "us-east-1";
private string _dateSample = "20130524";
private string _secretAccessKeySample = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY";
private string _canonicalRequestPath = "..\\Files\\SampleFiles\\CanonicalRequest.txt";
private string _stringToSignPath = "..\\Files\\SampleFiles\\StringToSign.txt";
private string _canonicalRequest;
private string _stringToSign;
public void ReadPayloadFiles()
{
_stringToSign = File.ReadAllText(_stringToSignPath);
_canonicalRequest = File.ReadAllText(_canonicalRequestPath);
}
// it needs to return: aeeed9bbccd4d02ee5c0109b86d86835f995330da4c265957d157751f604d404
public string SigningKey()
{
var keyBytes = Encoding.ASCII.GetBytes("AWS4" + _secretAccessKeySample);
var dateBytes = Encoding.ASCII.GetBytes(_dateSample);
var regionBytes = Encoding.ASCII.GetBytes(_regionSample);
var serviceBytes = Encoding.ASCII.GetBytes("s3");
var requestBytes = Encoding.ASCII.GetBytes("aws4_request");
var stringToSignBytes = Encoding.ASCII.GetBytes(_stringToSign);
using (HMACSHA256 hmac = new HMACSHA256(dateBytes))
{
var dateKey = hmac.ComputeHash(keyBytes);
using (HMACSHA256 hmac2 = new HMACSHA256(regionBytes))
{
var dateRegionKey = hmac2.ComputeHash(dateKey);
using (HMACSHA256 hmac3 = new HMACSHA256(serviceBytes))
{
var dateRegionServiceKey = hmac3.ComputeHash(dateRegionKey);
using (HMACSHA256 hmac4 = new HMACSHA256(requestBytes))
{
var signingKey = hmac4.ComputeHash(dateRegionServiceKey);
using (HMACSHA256 hmac5 = new HMACSHA256(stringToSignBytes))
{
var signature = hmac5.ComputeHash(signingKey);
return ByteToString(signature);
}
}
}
}
}
}
private string ByteToString(IEnumerable<byte> buffer)
{
var sBinary = buffer.Aggregate("", (current, buff) => current + buff.ToString("X2"));
return sBinary;
}
}
}
However, my generated signed key is different. Can anybody tell me where is my mistake?
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
namespace PlayingWithAmazonS3
{
/// <summary>
/// this class is only responsible for calculating the final signature for creating a pre-signed Url to
/// communicate through REST with iam service
/// </summary>
public class PreSignedUrlRestSignatureCal
{
private const string RegionSample = "us-east-1";
private const string DateSample = "20150830";
private const string ServiceSample = "iam";
// it is provided as an example by AWS
private const string SecretAccessKeySample = "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY";
/// <summary>
/// this method will be called in main
/// </summary>
public void ExecuteMethod()
{
var finalSignature = SigningKey();
Console.WriteLine("Final Signature: " + finalSignature);
}
private string SigningKey()
{
// generating derived signing key
var derivedSigningKey =
GetSignatureKey(SecretAccessKeySample, DateSample, RegionSample, ServiceSample);
// example signingKey provided by aws for test
var stringToSign = "AWS4-HMAC-SHA256" + "\n" +
"20150830T123600Z" + "\n" +
"20150830/us-east-1/iam/aws4_request" + "\n" +
"f536975d06c0309214f805bb90ccff089219ecd68b2577efef23edd43b7e1a59";
// generating the final signature
var signature = HmacSha256(stringToSign, derivedSigningKey);
// returning the hex value of the final signature
return ByteToString(signature);
}
/// <summary>
/// calculating hmac-sha256 in .Net
/// </summary>
/// <param name="data"></param>
/// <param name="key"></param>
/// <returns></returns>
private byte[] HmacSha256(string data, byte[] key)
{
const string algorithm = "HmacSHA256";
var kha = KeyedHashAlgorithm.Create(algorithm);
kha.Key = key;
return kha.ComputeHash(Encoding.UTF8.GetBytes(data));
}
/// <summary>
/// get derived signing key (not the final signature) from provided info
/// </summary>
/// <param name="key"></param>
/// <param name="dateStamp"></param>
/// <param name="regionName"></param>
/// <param name="serviceName"></param>
/// <returns></returns>
private byte[] GetSignatureKey(string key, string dateStamp, string regionName, string serviceName)
{
var kSecret = Encoding.UTF8.GetBytes(("AWS4" + key).ToCharArray());
var kDate = HmacSha256(dateStamp, kSecret);
var kRegion = HmacSha256(regionName, kDate);
var kService = HmacSha256(serviceName, kRegion);
var kSigning = HmacSha256("aws4_request", kService);
return kSigning;
}
/// <summary>
/// it returns the hex value of byte[]
/// </summary>
/// <param name="buffer"></param>
/// <returns></returns>
private string ByteToString(IEnumerable<byte> buffer)
{
var sBinary = buffer.Aggregate("", (current, buff) => current + buff.ToString("X2"));
return sBinary.ToLower();
}
}
}

Each-loop in mandrill using handlebars and Mandrill-dotnet -library

I'm using Mandrill to send emails, and are using Handlebars to render content in the email.
If I add variables like this everything works fine:
Backend:
message.AddRecipientVariable("test#gmail.com", "MYVALUE", "some value");
Html-Template:
<p>{{MYVALUE}}</p>
But if I try to use {{#each}} - the each section in the sent email is empty. What am I doing wrong here. Do I pass the objStringArray the wrong way or is there somthing else missing out to render the loop?
Backend:
var objList = new List<MyObj> {new MyObj() {Qty = "125"}, new MyObj() { Qty = "16"}};
var jsonSerialiser = new JavaScriptSerializer();
var objStringArray = jsonSerialiser.Serialize(objList.ToArray());
message.AddRecipientVariable("test#gmail.com", "VALUES", objStringArray);
message.merge_language = "handlebars";
_mandrillApi.SendMessage(message, templateName, new List<TemplateContent>());
Html-Template:
<ul>
{{#each VALUES}}
<li>{{Qty}}</li>
{{/each}}
</ul>
Handlebars in mandrill
Mandrill-dotnet
According to the library I'm using (Mandrill-DotNet) the method: AddRecipientVariable() only takes type string as the content variable. And the madrill-API wants a list when doing the {{#each}} -thing. So by changing the library source code a bit, I can pass in a regular List<dynamic>() to AddRecipientVariable-s content-parameter instead, and it works perfect.
By changing source-code:
public struct merge_var
{
#region Fields
/// <summary>
/// The content.
/// </summary>
public string content;
/// <summary>
/// The name.
/// </summary>
public string name;
#endregion
}
public void AddRecipientVariable(string recipient, string name, string content)
{
if (this.merge_vars == null)
{
this.merge_vars = new List<rcpt_merge_var>();
}
rcpt_merge_var entry = this.merge_vars.Where(e => e.rcpt == recipient).FirstOrDefault();
if (entry == null)
{
entry = new rcpt_merge_var { rcpt = recipient };
this.merge_vars.Add(entry);
}
var mv = new merge_var { name = name, content = content };
entry.vars.Add(mv);
}
To this (string content to dynamic content) :
public struct merge_var
{
#region Fields
/// <summary>
/// The content.
/// </summary>
public dynamic content;
/// <summary>
/// The name.
/// </summary>
public string name;
#endregion
}
public void AddRecipientVariable(string recipient, string name, dynamic content)
{
if (this.merge_vars == null)
{
this.merge_vars = new List<rcpt_merge_var>();
}
rcpt_merge_var entry = this.merge_vars.Where(e => e.rcpt == recipient).FirstOrDefault();
if (entry == null)
{
entry = new rcpt_merge_var { rcpt = recipient };
this.merge_vars.Add(entry);
}
var mv = new merge_var { name = name, content = content };
entry.vars.Add(mv);
}
I am working with Djrill in Python, so I a not familiar with the library you are using.
But you should make sure your JSON is not nested in a wrong way as mentioned in {{#each ... }} on Mandrill is resulting in an empty string

Google analytics with web application Error

private static readonly IAuthorizationCodeFlow flow =
new GoogleAuthorizationCodeFlow(new GoogleAuthorizationCodeFlow.Initializer
{
ClientSecrets = new ClientSecrets
{
ClientId = "XXXXXXXXX",
ClientSecret = "XXXXXXXXXXXX"
},
Scopes = new[] { AnalyticsService.Scope.AnalyticsReadonly, AnalyticsService.Scope.AnalyticsEdit },
DataStore = new FileDataStore("Analytics.Auth.Store")//new FileDataStore("Drive.Api.Auth.Store")
});
I am using above code for google console web application(Google Analytic) but it gives error System.UnauthorizedAccessException: Access to the path 'Analytics.Auth.Store' is denied.
FileDataStore stores the data in %AppData% on the pc. You need to make sure that you have access to that.
If you are planning on running this from a webserver you should not be using FileDataStore. You should create your own implementation of iDataStore, this will enable you to store the refresh tokens in the database.
Example:
///
/// Saved data store that implements .
/// This Saved data store stores a StoredResponse object.
///
class SavedDataStore : IDataStore
{
public StoredResponse _storedResponse { get; set; }
///
/// Constructs Load previously saved StoredResponse.
///
///Stored response
public SavedDataStore(StoredResponse pResponse)
{
this._storedResponse = pResponse;
}
public SavedDataStore()
{
this._storedResponse = new StoredResponse();
}
///
/// Stores the given value. into storedResponse
/// .
///
///The type to store in the data store
///The key
///The value to store in the data store
public Task StoreAsync(string key, T value)
{
var serialized = NewtonsoftJsonSerializer.Instance.Serialize(value);
JObject jObject = JObject.Parse(serialized);
// storing access token
var test = jObject.SelectToken("access_token");
if (test != null)
{
this._storedResponse.access_token = (string)test;
}
// storing token type
test = jObject.SelectToken("token_type");
if (test != null)
{
this._storedResponse.token_type = (string)test;
}
test = jObject.SelectToken("expires_in");
if (test != null)
{
this._storedResponse.expires_in = (long?)test;
}
test = jObject.SelectToken("refresh_token");
if (test != null)
{
this._storedResponse.refresh_token = (string)test;
}
test = jObject.SelectToken("Issued");
if (test != null)
{
this._storedResponse.Issued = (string)test;
}
return TaskEx.Delay(0);
}
///
/// Deletes StoredResponse.
///
///The key to delete from the data store
public Task DeleteAsync(string key)
{
this._storedResponse = new StoredResponse();
return TaskEx.Delay(0);
}
///
/// Returns the stored value for_storedResponse
///The type to retrieve
///The key to retrieve from the data store
/// The stored object
public Task GetAsync(string key)
{
TaskCompletionSource tcs = new TaskCompletionSource();
try
{
string JsonData = Newtonsoft.Json.JsonConvert.SerializeObject(this._storedResponse);
tcs.SetResult(Google.Apis.Json.NewtonsoftJsonSerializer.Instance.Deserialize(JsonData));
}
catch (Exception ex)
{
tcs.SetException(ex);
}
return tcs.Task;
}
///
/// Clears all values in the data store.
///
public Task ClearAsync()
{
this._storedResponse = new StoredResponse();
return TaskEx.Delay(0);
}
///// Creates a unique stored key based on the key and the class type.
/////The object key
/////The type to store or retrieve
//public static string GenerateStoredKey(string key, Type t)
//{
// return string.Format("{0}-{1}", t.FullName, key);
//}
}
Then instead of using FileDataStore you use your new SavedDataStore
//Now we load our saved refreshToken.
StoredResponse myStoredResponse = new StoredResponse(tbRefreshToken.Text);
// Now we pass a SavedDatastore with our StoredResponse.
credential = GoogleWebAuthorizationBroker.AuthorizeAsync(
new ClientSecrets { ClientId = "YourClientId", ClientSecret = "YourClientSecret" },
new[] { AnalyticsService.Scope.AnalyticsReadonly},
"user",
CancellationToken.None,
new SavedDataStore(myStoredResponse)).Result; }
This is because you dont have write access to AppData folder on a Webserver and FileDataStore uses that folder by default.
You can use a different folder by giving full path as parameter
FileDataStore(string folder, bool fullPath = false)
sample implementation
static FileDataStore GetFileDataStore()
{
var path = HttpContext.Current.Server.MapPath("~/App_Data/Drive.Api.Auth.Store");
var store = new FileDataStore(path, fullPath: true);
return store;
}
This way FileDataStore uses the App_Data folder of your application to write the TokenResponse. Dont forget to give write access to App_Data folder on the Webserver
You can read more about this at here and here

ObjectDisposedException when trying to upload a file

I have this service class:
public delegate string AsyncMethodCaller(string id, HttpPostedFileBase file);
public class ObjectService : IDisposable
{
private readonly IObjectRepository repository;
private readonly IAmazonS3 client;
private readonly string bucketName;
private static object syncRoot = new object();
private static IDictionary<string, int> processStatus { get; set; }
public ObjectService(string accessKey, string secretKey, string bucketName)
{
var credentials = new BasicAWSCredentials(accessKey, secretKey);
this.bucketName = bucketName;
this.client = new AmazonS3Client(credentials, RegionEndpoint.EUWest1);
this.repository = new ObjectRepository(this.client, this.bucketName);
if (processStatus == null)
processStatus = new Dictionary<string, int>();
}
public IList<S3Object> GetAll()
{
return this.repository.GetAll();
}
public S3Object Get(string key)
{
return this.GetAll().Where(model => model.Key.Equals(key, StringComparison.OrdinalIgnoreCase)).SingleOrDefault();
}
/// <summary>
/// Note: You can upload objects of up to 5 GB in size in a single operation. For objects greater than 5 GB you must use the multipart upload API.
/// Using the multipart upload API you can upload objects up to 5 TB each. For more information, see http://docs.aws.amazon.com/AmazonS3/latest/dev/uploadobjusingmpu.html.
/// </summary>
/// <param name="id">Unique id for tracking the upload progress</param>
/// <param name="bucketName">The name of the bucket that the object is being uploaded to</param>
/// <param name="file">The file that will be uploaded</param>
/// <returns>The unique id</returns>
public string Upload(string id, HttpPostedFileBase file)
{
var reader = new BinaryReader(file.InputStream);
var data = reader.ReadBytes((int)file.InputStream.Length);
var stream = new MemoryStream(data);
var utility = new TransferUtility(client);
var request = new TransferUtilityUploadRequest()
{
BucketName = this.bucketName,
Key = file.FileName,
InputStream = stream
};
request.UploadProgressEvent += (sender, e) => request_UploadProgressEvent(sender, e, id);
utility.Upload(request);
return id;
}
private void request_UploadProgressEvent(object sender, UploadProgressArgs e, string id)
{
lock (syncRoot)
{
processStatus[id] = e.PercentDone;
}
}
public void Add(string id)
{
lock (syncRoot)
{
processStatus.Add(id, 0);
}
}
public void Remove(string id)
{
lock (syncRoot)
{
processStatus.Remove(id);
}
}
public int GetStatus(string id)
{
lock (syncRoot)
{
if (processStatus.Keys.Count(x => x == id) == 1)
{
return processStatus[id];
}
else
{
return 100;
}
}
}
public void Dispose()
{
this.repository.Dispose();
this.client.Dispose();
}
}
and my controller looks like this:
public class _UploadController : Controller
{
public void StartUpload(string id, HttpPostedFileBase file)
{
var bucketName = CompanyProvider.CurrentCompanyId();
using (var service = new ObjectService(ConfigurationManager.AppSettings["AWSAccessKey"], ConfigurationManager.AppSettings["AWSSecretKey"], bucketName))
{
service.Add(id);
var caller = new AsyncMethodCaller(service.Upload);
var result = caller.BeginInvoke(id, file, new AsyncCallback(CompleteUpload), caller);
}
}
public void CompleteUpload(IAsyncResult result)
{
var caller = (AsyncMethodCaller)result.AsyncState;
var id = caller.EndInvoke(result);
}
//
// GET: /_Upload/GetCurrentProgress
public JsonResult GetCurrentProgress(string id)
{
try
{
var bucketName = CompanyProvider.CurrentCompanyId();
this.ControllerContext.HttpContext.Response.AddHeader("cache-control", "no-cache");
using (var service = new ObjectService(ConfigurationManager.AppSettings["AWSAccessKey"], ConfigurationManager.AppSettings["AWSSecretKey"], bucketName))
{
return new JsonResult { Data = new { success = true, progress = service.GetStatus(id) } };
}
}
catch (Exception ex)
{
return new JsonResult { Data = new { success = false, error = ex.Message } };
}
}
}
Now, I have found that sometimes, I get the error ObjectDisposedException when trying to upload a file on this line: var data = reader.ReadBytes((int)file.InputStream.Length);. I read that I should not be using the using keyword because of the asynchronous calls but it still seems to be disposing the stream.
Can anyone tell me why?
Update 1
I have changed my controller to this:
private ObjectService service = new ObjectService(ConfigurationManager.AppSettings["AWSAccessKey"], ConfigurationManager.AppSettings["AWSSecretKey"], CompanyProvider.CurrentCompanyId());
public void StartUpload(string id, HttpPostedFileBase file)
{
service.Add(id);
var caller = new AsyncMethodCaller(service.Upload);
var result = caller.BeginInvoke(id, file, new AsyncCallback(CompleteUpload), caller);
}
public void CompleteUpload(IAsyncResult result)
{
var caller = (AsyncMethodCaller)result.AsyncState;
var id = caller.EndInvoke(result);
this.service.Dispose();
}
but I am still getting the error on the file.InputStream line.
Update 2
The problem seems to be with the BinaryReader.
I changed the code to look like this:
var inputStream = file.InputStream;
var i = inputStream.Length;
var n = (int)i;
using (var reader = new BinaryReader(inputStream))
{
var data = reader.ReadBytes(n);
var stream = new MemoryStream(data);
var request = new TransferUtilityUploadRequest()
{
BucketName = this.bucketName,
Key = file.FileName,
InputStream = stream
};
try
{
request.UploadProgressEvent += (sender, e) => request_UploadProgressEvent(sender, e, id);
utility.Upload(request);
}
catch
{
file.InputStream.Dispose(); // Close our stream
}
}
If the upload fails and I try to re-upload the item, that is when the error is thrown. It is like the item is locked or something.
You are disposing the service with the using statement when you are calling the service with BeginInvoke.
using (var service = new ObjectService(ConfigurationManager.AppSettings["AWSAccessKey"], ConfigurationManager.AppSettings["AWSSecretKey"], bucketName))
{
service.Add(id);
var caller = new AsyncMethodCaller(service.Upload);
var result = caller.BeginInvoke(id, file, new AsyncCallback(CompleteUpload), caller);
}
You have to dispose your service when the job is done:
var service = new ObjectService(ConfigurationManager.AppSettings["AWSAccessKey"], ConfigurationManager.AppSettings["AWSSecretKey"], bucketName)
public void StartUpload(string id, HttpPostedFileBase file)
{
var bucketName = CompanyProvider.CurrentCompanyId();
service.Add(id);
var caller = new AsyncMethodCaller(service.Upload);
var result = caller.BeginInvoke(id, file, new AsyncCallback(CompleteUpload), caller);
}
public void CompleteUpload(IAsyncResult result)
{
var caller = (AsyncMethodCaller)result.AsyncState;
var id = caller.EndInvoke(result);
service.Close();
service.Dispose();
}
Also your file might be corrupted, try this code:
byte[] buffer = new byte[file.InputStream.Length];
file.InputStream.Seek(0, SeekOrigin.Begin);
file.InputStream.Read(buffer, 0, file.InputStream.Length);

MongoDB geospatial index in C#

I have been trying to get started but run into the same rock time after time trying to create and query MongoDB with C# official driver. The problem is how to create data with geo information. I am just not finding the answer.
Code:
MongoUrl url = new MongoUrl("mongodb://xxx.xx.x.xx/mydb");
MongoServer server = MongoServer.Create(url);
MongoDatabase database = server.GetDatabase("mydb");
<-- this works fine
BsonDocument[] batch = {
new BsonDocument {
{ "name", "Bran" },
{ "loc", "10, 10" }
},
new BsonDocument {
{ "name", "Ayla" },
{ "loc", "0, 0" }
}
};
places.InsertBatch(batch);
<-- that part is wrong somehow
places.EnsureIndex(IndexKeys.GeoSpatial("loca"));
var queryplaces = Query.WithinCircle("loca", 0, 0, 11);
var cursor = places.Find(queryplaces);
foreach (var hit in cursor)
{
foreach (var VARIABLE in hit)
{
Console.WriteLine(VARIABLE.Value);
}
}
<-- I think that part should show both documents, now showing none. A simple find shows them both.
Would be happy for some help.
example below is in C# (it's important to note order in the array which is longitude, latitude- follows the more logical order of x,y as opposed to the more commonly used form where latitude precedes longitude) :
1.) first your class needs to have this:
public double[] Location { get; set; }
public double Latitude
{
get { return _latitude; }
set
{
Location[1] = value;
_latitude = value;
}
}
public double Longitude
{
get { return _longitude; }
set
{
Location[0] = value;
_longitude = value;
}
}
public MyClass()
{
Location = new double[2];
}
2.) then here is some code to get you started with the official C# driver and doing an insert w/ use of geo indexing:
/// <summary>
/// Inserts object and creates GeoIndex on collection (assumes TDocument is a class
/// containing an array double[] Location where [0] is the x value (as longitude)
/// and [1] is the y value (as latitude) - this order is important for spherical queries.
///
/// Collection name is assigned as typeof(TDocument).ToString()
/// </summary>
/// <param name="dbName">Your target database</param>
/// <param name="data">The object you're storing</param>
/// <param name="geoIndexName">The name of the location based array on which to create the geoIndex</param>
/// <param name="indexNames">optional: a dictionary containing any additional fields on which you would like to create an index
/// where the key is the name of the field on which you would like to create your index and the value should be either SortDirection.Ascending
/// or SortDirection.Descending. NOTE: this should not include geo indexes! </param>
/// <returns>void</returns>
public static void MongoGeoInsert<TDocument>(string dbName, TDocument data, string geoIndexName, Dictionary<string, SortDirection> indexNames = null)
{
Connection connection = new Connection(dbName);
MongoCollection collection = connection.GetMongoCollection<TDocument>(typeof(TDocument).Name, connection.Db);
collection.Insert<TDocument>(data);
/* NOTE: Latitude and Longitude MUST be wrapped in separate class or array */
IndexKeysBuilder keys = IndexKeys.GeoSpatial(geoIndexName);
IndexOptionsBuilder options = new IndexOptionsBuilder();
options.SetName("idx_" + typeof(TDocument).Name);
// since the default GeoSpatial range is -180 to 180, we don't need to set anything here, but if
// we wanted to use something other than latitude/longitude, we could do so like this:
// options.SetGeoSpatialRange(-180.0, 180.0);
if (indexNames != null)
{
foreach (var indexName in indexNames)
{
if (indexName.Value == SortDirection.Decending)
{
keys = keys.Descending(indexName.Key);
}
else if (indexName.Value == SortDirection.Ascending)
{
keys = keys.Ascending(indexName.Key);
}
}
}
collection.EnsureIndex(keys, options);
connection.Db.Server.Disconnect();
}
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using MongoDB.Bson;
using MongoDB.Driver;
namespace MyMongo.Helpers
{
public class Connection
{
private const string DbName = "";
private const string Prefix = "mongodb://";
//private const string Server = "(...):27017/";
private const string Server = "localhost:27017/";
private const string PassWord = "";
private const string UserName = "";
private const string Delimeter = "";
//if using MongoHQ
//private const string Delimeter = ":";
//private const string Prefix = "mongodb://";
//private const string DbName = "(...)";
//private const string UserName = "(...)";
//private const string Server = "#flame.mongohq.com:(<port #>)/";
//private const string PassWord = "(...)";
private readonly string _connectionString = string.Empty;
public MongoDatabase Db { get; private set; }
public MongoCollection Collection { get; private set; }
public Connection()
{
_connectionString = Prefix + UserName + Delimeter + PassWord + Server + DbName;
}
public Connection(string dbName)
{
_connectionString = Prefix + UserName + Delimeter + PassWord + Server + DbName;
Db = GetDatabase(dbName);
}
//mongodb://[username:password#]hostname[:port][/[database][?options]]
public MongoDatabase GetDatabase(string dbName)
{
MongoServer server = MongoServer.Create(_connectionString);
MongoDatabase database = server.GetDatabase(dbName);
return database;
}
public MongoCollection<TDocument> GetMongoCollection<TDocument>(string collectionName, MongoDatabase db, SafeMode safeMode = null)
{
if (safeMode == null) { safeMode = new SafeMode(true); }
MongoCollection<TDocument> result = db.GetCollection<TDocument>(collectionName, safeMode);
return result;
}
}
}
After looking and searching I found the answer here: https://github.com/karlseguin/pots-importer/blob/master/PotsImporter/NodeImporter.cs
This to be read with my first piece of code as this fixes it.
MongoCollection<BsonDocument> places =
database.GetCollection<BsonDocument>("places");
BsonDocument[] batch = {
new BsonDocument { { "name", "Bran" }, { "loc", new BsonArray(new[] { 10, 10 }) } },
new BsonDocument { { "name", "Ayla" }, { "loc", new BsonArray(new[] { 0, 0 }) } }
};
places.InsertBatch(batch);
places.EnsureIndex(IndexKeys.GeoSpatial("loc"));
var queryplaces = Query.WithinCircle("loc", 5, 5, 10);
var cursor = places.Find(queryplaces);
foreach (var hit in cursor)
{
Console.WriteLine("in circle");
foreach (var VARIABLE in hit)
{
Console.WriteLine(VARIABLE.Value);
}
}
As a point of clarification: The problem with the code in the question is that location information should not be stored as a string, but rather as an array of 2 elements (x, y).

Categories