This is my Elastic Search Query for Obtaining the desired output the only reason i am using sroll is that i am having multiple reocrds and i am getting only some records limited by the size i want to get all the records
filters.Add(new TermsQuery
{
Field = new Field("MERCHANTNO"),
Terms = BranchCode,
});
filterClause.Add(new DateRangeQuery
{
Boost = 1.1,
Field = new Field("TRANSACTIONDATE"),
GreaterThanOrEqualTo = DateMath.Anchored(Date).RoundTo(DateMathTimeUnit.Month),
Format = "yyyy-MM",
TimeZone = "+01:00"
});
var SearchRequest = new SearchRequest<Acquirer>("acquiringdata")
{
Size = 2,
Scroll=10, //I have Tried
Scroll="10s" //I have tried this
Scroll=1m //I have tried this
Query = new BoolQuery
{
Must = filters,
Filter = filterClause
}
};
var searchResponse = await _elasticClient.SearchAsync<Acquirer>(SearchRequest);
if (searchResponse.ApiCall.ResponseBodyInBytes != null)
{
var requestJson = System.Text.Encoding.UTF8.GetString(searchResponse.ApiCall.RequestBodyInBytes);
var JsonFormatQuery = JsonConvert.SerializeObject(JsonConvert.DeserializeObject(requestJson), Formatting.Indented);
}
while (searchResponse.Documents.Any())
{
//ProcessResponse(searchResponse);
searchResponse = _elasticClient.Scroll<Acquirer>("10s", searchResponse.ScrollId);
}
It is giving me empty array when i just Removed these While Loop it gives me the desired output the Size of 10 which i have asked for
while (searchResponse.Documents.Any())
{
//ProcessResponse(searchResponse);
searchResponse = _elasticClient.Scroll<Acquirer>("10s", searchResponse.ScrollId);
}
This is my Json i got from the Query and i not getting the point that Scroll is not coming in the Query when i have declared it
{
"query": {
"bool": {
"filter": [
{
"range": {
"TRANSACTIONDATE": {
"format": "yyyy-MM",
"gte": "2019-10||/M",
"time_zone": "+01:00",
"boost": 1.1
}
}
}
]
}
},
"size": 2
}
Related
I am trying to use filter with search query. Search requset works correctly without filter. But using filter I get 400 error as response.
This is type mapping:
var mapp = new
{
mappings = new
{
posts = new
{
properties = new
{
FullText = new
{
type = "string",
analyzer = "russian"
},
Title = new
{
type = "string",
analyzer = "russian"
},
PostPubDate = new
{
type = "date"
},
Link = new
{
type = "string",
index = "not_analyzed"
},
RubricsIds = new
{
type = "integer"
},
ObjectsIds = new
{
type = "integer"
},
SourceId = new
{
type = "integer"
}
}
}
}
};
This is a request to index with filtered query:
string url = "http://localhost:9200/neg_collector/posts/_search";
var request = (HttpWebRequest)HttpWebRequest.Create(url);
var o = new
{
size = 20,
query = new
{
filtered = new
{
query = new
{
query_string = new
{
fields = new[] { "Title" },
query = search_query
}
},
filter = new
{
#bool = new
{
should = new
{
term = new
{
SourceId = sIds
}
}
}
}
}
}
};
request.Method = "POST";
var jsonObj = JsonConvert.SerializeObject(o);
var data = Encoding.UTF8.GetBytes(jsonObj);
request.ContentType = "application/x-www-form-urlencoded";
request.ContentLength = data.Length;
I want to use an array of integers to filter result with certain SourceId-s. But I got error 400.
What am I doing wrong? Thank you
So the problem was that this syntax was for elasticsearch 2 version (And it worked nice on another computer). Here I have ElasticSearch 5 and should use another was of filtering:
var o = new
{
size = 20,
query = new
{
#bool = new
{
must = new
{
query_string = new
{
fields = new[] { "Title" },
query = search_query
}
},
filter = new
{
terms = new
{
SourceId = new[] {10,11,12}
}
}
}
}
};
It is describerd HERE
I need to add one more node to Json string.
Following is the code from where I am reading the data.
var url = "https://xyz_12232_abc/0908978978979.json";
var sys = new WebClient();
var content = sys.DownloadString(url);
I received following output from above code:
{
"2312312312313":
{
"emailId":"abc#gmail.com",
"model":"XYZ001",
"phone":"+654784512547",
"userName":"User1"
},
"23456464512313":
{
"emailId":"abcd#gmail.com",
"model":"XYZ002",
"phone":"+98745114474",
"userName":"User2"
},
"45114512312313":
{
"emailId":"abcde#gmail.com",
"model":"XYZ3",
"phone":"+214784558741",
"userName":"User3"
}
}
But, I want this output like below:
{
"Records": [
{
"UID":"2312312312313":,
"emailId":"abc#gmail.com",
"model":"XYZ001",
"phone":"+654784512547",
"userName":"User1"
},
{
"UID":"23456464512313":,
"emailId":"abcd#gmail.com",
"model":"XYZ002",
"phone":"+98745114474",
"userName":"User2"
},
{
"UID":"45114512312313":,
"emailId":"abcde#gmail.com",
"model":"XYZ3",
"phone":"+214784558741",
"userName":"User3"
}
]
}
Now, how can it be achieved ?
You can use Json.NET to massage the data into your desired output:
var jsonStr = #"..."; // your JSON here
var obj = JsonConvert.DeserializeObject<Dictionary<string, JObject>>(jsonStr);
var formattedObj = new
{
Records = obj.Select(x =>
{
x.Value.AddFirst(new JProperty("UID", x.Key));
return x.Value;
})
};
// serialize back to JSON
var formattedJson = JsonConvert.SerializeObject(formattedObj);
I am trying to retrieve the data from Dynamo DB based on a search criteria using Scan request. I am following the steps mentioned in http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LowLevelDotNetScanning.html" page. My DynamoDB table contains more than 1 million records. I know that by using the do-while loop and ExclusiveStartKey we can fetch the records from dynamo DB. but in my case I can not wait till search process is complete as this will hang my angularJS UI. instead I want to progressively load the data with out waiting for the search process to complete. How we can do that.?
sample request:
var lastEvaluatedKey = new Dictionary<string, AttributeValue>(); ;
AmazonDynamoDBClient amazonDynamoDbClient= new AmazonDynamoDBClient()
var filterExpression = "#aws_s3_bucket = :v_aws_s3_bucket and contains(#aws_s3_key,:v_aws_s3_key)";
var projectExpression = "#aws_s3_key,filename,#region,aws_s3_bucket,#projecttype,folder,#siteid,locationname,createdon,modifiedon";
do
{
var request = new ScanRequest
{
TableName = "Test1",
ExclusiveStartKey=lastEvaluatedKey,
FilterExpression = filterExpression,
ExpressionAttributeNames = new Dictionary<string, string>
{
{ "#region", "region" },
{ "#siteid", "siteid" },
{ "#projecttype", "projecttype" },
{ "#aws_s3_key", "aws_s3_key" },
{ "#aws_s3_bucket", "aws_s3_bucket" }
},
ExpressionAttributeValues = new Dictionary<string, AttributeValue> {
{":v_aws_s3_bucket", new AttributeValue { S = "sampleBucket"}},
{":v_aws_s3_key", new AttributeValue { S = "92226"}}
},
ConsistentRead = true,
ProjectionExpression = projectExpression
};
response = amazonDynamoDbClient.Scan(request);
lastEvaluatedKey = response.LastEvaluatedKey;}while(lastEvaluatedKey!=null && lastEvaluatedKey.count()!=0)
I tried executing the above request with out using do-while loop and saved the ExclusiveStartKey for the next request it throws error the "The provided starting key is invalid: One or more parameter values were invalid: Null attribute value types must have the value of true".
any help on this issue will be helpful...
The error you're getting appears to be because you're setting ExclusiveStartKey on the request without setting any values for its parameters. Notice how you aren't updating request.ExclusiveStartKey after you get your response. Obviously, if you don't do that, the scan won't know where to pick up again when you hit your limit. See below.
AmazonDynamoDBClient amazonDynamoDbClient= new AmazonDynamoDBClient()
var filterExpression = "#aws_s3_bucket = :v_aws_s3_bucket and contains(#aws_s3_key,:v_aws_s3_key)";
var projectExpression = "#aws_s3_key,filename,#region,aws_s3_bucket,#projecttype,folder,#siteid,locationname,createdon,modifiedon";
ScanRequest request = new ScanRequest
{
TableName = "Test1",
FilterExpression = filterExpression,
ExpressionAttributeNames = new Dictionary<string, string>
{
{ "#region", "region" },
{ "#siteid", "siteid" },
{ "#projecttype", "projecttype" },
{ "#aws_s3_key", "aws_s3_key" },
{ "#aws_s3_bucket", "aws_s3_bucket" }
},
ExpressionAttributeValues = new Dictionary<string, AttributeValue> {
{":v_aws_s3_bucket", new AttributeValue { S = "sampleBucket"}},
{":v_aws_s3_key", new AttributeValue { S = "92226"}}
},
ConsistentRead = true,
ProjectionExpression = projectExpression
};
do
{
response = amazonDynamoDbClient.Scan(request);
request.ExclusiveStartKey = response.LastEvaluatedKey;
} while (response.lastEvaluatedKey.Count != 0);
I am trying to display/list data after using aggregation function but it isn't happening.
This code works absolutely fine.
var connectionstring = "mongodb://localhost:27017";
var client = new MongoClient(connectionstring);
var db = client.GetDatabase("school");
var col = db.GetCollection<BsonDocument>("students");
var filter = new BsonDocument("type", "homework");
var filter2 = Builders<BsonDocument>.Filter.Eq("scores.type", "homework");
var myresults = await col.Find(filter2)
.Limit(2)
.Project("{name:1,scores:1,_id:0}")
.Sort("{score:1}")
.ToListAsync();
foreach (var result in myresults)
{
Console.WriteLine(result);
}
This code fetches document as it should however when I replace
var myresults = await col.Find(filter2)
.Limit(2)
.Project("{name:1,scores:1,_id:0}")
.Sort("{score:1}")
.ToListAsync();
with this
var myresults = await col.Aggregate()
.Unwind("{$scores}")
.Group(new BsonDocument { { "_id", "$_id" }, { "lowscore", new BsonDocument("$min", "$scores.score") } })
//.Group("{_id:'$_id',lowscore:{$min:'$scores.score'}}")
.ToListAsync();
No record is being pulled.
I do not want to use Pipeline method. I simply want to display the result obtained via aggregate function.
This is my Mongo Query (I want the same result as this in C#)-
db.students.aggregate([{$sort:{_id:-1}},{$unwind:"$scores"},{$group:{_id:"$_id", lowscore:{"$min":"$scores.score"}}}])
Building aggregation pipeline is bit tricky.
Try:
var pipeline = new BsonDocument[] {
new BsonDocument{ { "$sort", new BsonDocument("_id", 1) }},
new BsonDocument{{"$unwind", "$scores"}},
new BsonDocument{{"$group", new BsonDocument{
{"_id", "$_id"},
{"lowscore",new BsonDocument{
{"$min","$scores.score"}}
}}
}}
};
var result = collection.Aggregate<BsonDocument> (pipeline).ToListAsync();
If you do pipeline.ToJson(), you'll get following JSON equivalent string which is same as of your original and tested MongoShell query.
[
{
"$sort": {
"_id": 1
}
},
{
"$unwind": "$scores"
},
{
"$group": {
"_id": "$_id",
"lowscore": {
"$min": "$scores.score"
}
}
}
]
This is wrong... {$scores} isn't even valid json. Remove the curly braces and the dollar sign from the $unwind directive.
The parameter name is field, so you need to provide a field name to it.
Try with writing only $score instead of #scores.score. may be it helpful.
db.students.aggregate([{$sort:{_id:-1}},{$unwind:"$scores"},{$group:{_id:"$_id", lowscore:{"$min":"$score"}}}])
I'm working on a project in C# in which I want to enter a search term, hit the search button and then retrieve parts of the response from Google to an array so I can iterate through them.
Searching Google using their JSON-based API is pretty easy
var client = new HttpClient();
var address = new Uri("https://ajax.googleapis.com/ajax/services/search/web?v=1.0&q=" + term);
HttpResponseMessage response = await client.GetAsync(address);
String stream = await response.Content.ReadAsStringAsync();
This returns a JSON string like the one below (Results for the term "Test search")
{
"responseData":{
"results":[
{
"GsearchResultClass":"GwebSearch",
"unescapedUrl":"http://en.wikipedia.org/wiki/Wikipedia:Search_engine_test",
"url":"http://en.wikipedia.org/wiki/Wikipedia:Search_engine_test",
"visibleUrl":"en.wikipedia.org",
"cacheUrl":"http://www.google.com/search?q\u003dcache:g6KEStELS_MJ:en.wikipedia.org",
"title":"Wikipedia:\u003cb\u003eSearch\u003c/b\u003eengine\u003cb\u003etest\u003c/b\u003e-Wikipedia,thefreeencyclopedia",
"titleNoFormatting":"Wikipedia:Searchenginetest-Wikipedia,thefreeencyclopedia",
"content":"A\u003cb\u003esearch\u003c/b\u003eengine\u003cb\u003etest\u003c/b\u003ecannothelpyouavoidtheworkofinterpretingyourresultsanddecidingwhattheyreallyshow.Appearanceinanindexaloneisnotusually\u003cb\u003e...\u003c/b\u003e"
},
{
"GsearchResultClass":"GwebSearch",
"unescapedUrl":"http://techcrunch.com/2008/07/16/google-continues-to-test-a-search-interface-that-looks-more-like-digg-every-day/",
"url":"http://techcrunch.com/2008/07/16/google-continues-to-test-a-search-interface-that-looks-more-like-digg-every-day/",
"visibleUrl":"techcrunch.com",
"cacheUrl":"http://www.google.com/search?q\u003dcache:r2laSUVQw8kJ:techcrunch.com",
"title":"GoogleContinuesTo\u003cb\u003eTest\u003c/b\u003eA\u003cb\u003eSearch\u003c/b\u003eInterfaceThatLooksMoreLike\u003cb\u003e...\u003c/b\u003e",
"titleNoFormatting":"GoogleContinuesToTestASearchInterfaceThatLooksMoreLike...",
"content":"Jul16,2008\u003cb\u003e...\u003c/b\u003eAcoupleofdaysagowepostedscreenshotsofanew\u003cb\u003esearch\u003c/b\u003einterfacebeingbucket\u003cb\u003etested\u003c/b\u003ebyGooglethatletsusersvoteupordownon\u003cb\u003e...\u003c/b\u003e"
},
{
"GsearchResultClass":"GwebSearch",
"unescapedUrl":"http://googleblog.blogspot.com/2006/04/this-is-test-this-is-only-test.html",
"url":"http://googleblog.blogspot.com/2006/04/this-is-test-this-is-only-test.html",
"visibleUrl":"googleblog.blogspot.com",
"cacheUrl":"http://www.google.com/search?q\u003dcache:Ozl1cQzRT0IJ:googleblog.blogspot.com",
"title":"Thisisa\u003cb\u003etest\u003c/b\u003e.Thisisonlya\u003cb\u003etest\u003c/b\u003e.|OfficialGoogleBlog",
"titleNoFormatting":"Thisisatest.Thisisonlyatest.|OfficialGoogleBlog",
"content":"Apr24,2006\u003cb\u003e...\u003c/b\u003eFromtimetotime,werunliveexperimentsonGoogle—\u003cb\u003etests\u003c/b\u003evisibletoarelativelyfewpeople--todiscoverbetterwaysto\u003cb\u003esearch\u003c/b\u003e.Wedothis\u003cb\u003e...\u003c/b\u003e"
},
{
"GsearchResultClass":"GwebSearch",
"unescapedUrl":"http://alistapart.com/article/testing-search-for-relevancy-and-precision",
"url":"http://alistapart.com/article/testing-search-for-relevancy-and-precision",
"visibleUrl":"alistapart.com",
"cacheUrl":"http://www.google.com/search?q\u003dcache:02Sjrd5mb0YJ:alistapart.com",
"title":"\u003cb\u003eTestingSearch\u003c/b\u003eforRelevancyandPrecision·AnAListApartArticle",
"titleNoFormatting":"TestingSearchforRelevancyandPrecision·AnAListApartArticle",
"content":"Sep22,2009\u003cb\u003e...\u003c/b\u003eDespitethefactthatsite\u003cb\u003esearch\u003c/b\u003eoftenreceivesthemosttraffic,it\u0026#39;salsotheplacewheretheuserexperiencedesignerbearstheleastinfluence."
}
],
"cursor":{
"resultCount":"1,010,000,000",
"pages":[
{
"start":"0",
"label":1
},
{
"start":"4",
"label":2
},
{
"start":"8",
"label":3
},
{
"start":"12",
"label":4
},
{
"start":"16",
"label":5
},
{
"start":"20",
"label":6
},
{
"start":"24",
"label":7
},
{
"start":"28",
"label":8
}
],
"estimatedResultCount":"1010000000",
"currentPageIndex":0,
"moreResultsUrl":"http://www.google.com/search?oe\u003dutf8\u0026ie\u003dutf8\u0026source\u003duds\u0026start\u003d0\u0026hl\u003den\u0026q\u003dTest+search",
"searchResultTime":"0.23"
}
},
"responseDetails":null,
"responseStatus":200
}
How do I get the value of url in each node pushed into an array so I can iterate through it?
You can use dynamic keyword with Json.Net
dynamic jObj = JsonConvert.DeserializeObject(json);
foreach (var res in jObj.responseData.results)
{
Console.WriteLine("{0} => {1}\n",res.title,res.url);
}
You can use Linq too
var jObj = (JObject)JsonConvert.DeserializeObject(json);
string[] urls = jObj["responseData"]["results"]
.Select(x => (string)x["url"])
.ToArray();