Tabulator highlight entries on data refresh - refresh

I have a tabulator table used to display statistics from live football matches, the table is updated every 60 seconds using the following function
setInterval(function(){
selectedtableData = table.getSelectedRows();
table.replaceData()
.then(function(){
var f;var filterarray= [];
for (f = 0; f < selectedtableData.length; f++) {
var rowIndex = selectedtableData[f].getIndex();
table.selectRow(rowIndex);
}
})
.catch(function(error){
//handle error loading data
});
}, 60000);
I allow users to highlight rows so this function saves the rows on refresh and refreshdata updates the table
The table is fairly standard stuff using icons for headers to conserve space and is refreshed from a Json file
var table = new Tabulator("#example-table", {
layout:"fitColumns",
ajaxURL:"../new/console.json",
// ajaxURL:"console.json",
height:800,
groupBy:"league",
paginationSize:100,
selectable:true,
index:"matchid",
selectablePersistence:true,
columns:[
{field:"",
columns:[
{width:5,headerSort:false},
{formatter:"rowSelection", titleFormatter:"rowSelection", align:"center", headerSort:false,width:10},
{title:"league", field:"league",width:5,headerSort:false,visible:false,formatter:"html"},
{title:"img/clock2.png", field:"time", sorter:"date",width:40,titleFormatter:"image",headerSort:false,headerTooltip:"Time gone"},
{title:"img/goal2.png", field:"score", sorter:"string",width:40,formatter:"html",align:"center",headerSort:false,titleFormatter:"image",headerTooltip:"Current Score"},
{headerFilter:true,title:"Teams",field:"team",headerSort:false, formatter:"html",width:160},
{title:"timep", field:"timep",visible:false},
{title:"matchid", field:"matchid",visible:false},
{title:"totalap1", field:"totalap1",visible:false},
{title:"draw", field:"draw",visible:false},
{title:"homescore", field:"homescore",visible:false},
{title:"awayscore", field:"awayscore",visible:false},
{title:"homecorners", field:"homecorners",visible:false},
{title:"awaycorners", field:"awaycorners",visible:false},
{title:"homesont", field:"homesont",visible:false},
{title:"awaysont", field:"awaysont",visible:false},
{title:"homesoft", field:"homesoft",visible:false},
{title:"awaysoft", field:"awaysoft",visible:false},
{title:"homeposs", field:"homeposs",visible:false},
{title:"awayposs", field:"awayposs",visible:false},
{title:"homeda", field:"homeda",visible:false},
{title:"awayda", field:"awayda",visible:false},
{title:"homelp", field:"homelp",visible:false},
{title:"awaylp", field:"awaylp",visible:false},
{title:"homeyc", field:"homeyc",visible:false},
{title:"awayyc", field:"awayyc",visible:false},
{title:"homerc", field:"homerc",visible:false},
{title:"awayrc", field:"awayrc",visible:false},
{title:"homet", field:"homet",visible:false},
{title:"awayt", field:"awayt",visible:false},
{title:"img/pressure2.png",field:"pi1",width:30,formatter:"html",titleFormatter:"image",headerClick:ap1sort,headerSort:false,align:"center",headerTooltip:"Attacking Pressure1",
cellMouseEnter:function(e, cell, row){var row=cell.getRow().getData();
var celldata=cell.getElement();if (!celldata._tippy){tippy(celldata, { content: "Attacking Pressure <iframe style=\"background:#FFFFFF;\" src=\"../pressure/showgraph.php?gameid="+row.matchid+"&pi=pi1&time="+row.timep+"\" width=420 height=225></iframe>",maxWidth:"450px",a11y: false,arrow:true,
})}celldata._tippy.show();}},
{title:"img/pressure2.png",field:"pi2",width:30,formatter:"html",headerSort:false,titleFormatter:"image",headerTooltip:"Attacking Pressure2",
cellMouseEnter:function(e, cell, row){var row=cell.getRow().getData();
var celldata=cell.getElement();if (!celldata._tippy){tippy(celldata, { content: "Attacking Pressure 2 <iframe style=\"background:#FFFFFF;\" src=\"../pressure/showgraph.php?gameid="+row.matchid+"&pi=pi2&time="+row.timep+"\" width=420 height=225></iframe>",maxWidth:"450px",a11y: false,arrow:true,
})}celldata._tippy.show();}},
{title:"img/poss.png", field:"poss",headerSort:false,headerTooltip:"possesion",width:50,formatter:"html",titleFormatter:"image"},
{title:"img/soont.png" , field:"shotsont", sorter:"number", width:45,align:"center",headerSort:false,titleFormatter:"image",formatter:"html",headerTooltip:"Shots on Target"},
{title:"img/sooft.png" , field:"shotofft", sorter:"number", width:45,align:"center",headerSort:false,titleFormatter:"image",formatter:"html",headerTooltip:"Shots off Target"},
{title:"img/shotsinbox.png" , field:"sinbox", width:40,titleFormatter:"image",headerTooltip:"Shots in the Box",headerSort:false,align:"center",formatter:"html",headerTooltip:"Shots in the Box"},
{title:"img/dattacks.png" , field:"dattack", sorter:"number", width:60,headerSort:false,align:"center",titleFormatter:"image",formatter:"html",headerTooltip:"Dangerous attacks"},
{title:"img/attacks.png" , field:"attack", sorter:"number", width:60,headerSort:false,align:"center",titleFormatter:"image",formatter:"html",headerTooltip:"Attacks"},
{title:"img/corner.png" , field:"corners", width:40,titleFormatter:"image",headerTooltip:"Corners",headerSort:false,align:"center",formatter:"html",headerTooltip:"Corners"},
{title:"img/foul.png" , field:"fouls", width:40,headerSort:false,align:"center",formatter:"html",titleFormatter:"image",headerTooltip:"Fouls"},
{title:"img/ycard.png" , field:"ycards", width:30 ,titleFormatter:"image",headerTooltip:"Yellow Card",headerSort:false,align:"center",formatter:"html"},
{title:"img/rcard.png" , field:"rcards", width:30 ,titleFormatter:"image",headerTooltip:"Red Card",headerSort:false,align:"center",formatter:"html"},
{title:"img/form.png", field:"form",headerSort:false,headerTooltip:"form",formatter:"html",width:100,align:"center",titleFormatter:"image",
cellMouseEnter:function(e, cell, row){var row=cell.getRow().getData();
var celldata=cell.getElement();if (!celldata._tippy){tippy(celldata, { content: "% Stats are based on the last 5 games<iframe style=\"background:#FFFFFF;\" src=\"../stats/games/"+today+"/"+row.matchid+".html\" width=620 height=225</iframe>",maxWidth:"650px",a11y: false,arrow:true,
})}celldata._tippy.show();}},
]},
{title:"Previous 10 mins",
columns:[
{title:"img/poss.png", field:"10poss",headerSort:false,headerTooltip:"Possesion - Last 10 mins",width:74,formatter:"html",titleFormatter:"image"},
{title:"img/soont.png" , field:"10shotsont", sorter:"number", width:45,align:"center",headerSort:false,titleFormatter:"image",formatter:"html",headerTooltip:"Shots on Target - Last 10 mins"},
{title:"img/sooft.png" , field:"10shotofft", sorter:"number", width:45,align:"center",headerSort:false,titleFormatter:"image",formatter:"html",headerTooltip:"Shots off Target - Last 10 mins"},
{title:"img/dattacks.png" , field:"10dattack", sorter:"number", width:55,headerSort:false,align:"center",titleFormatter:"image",formatter:"html",headerTooltip:"Dangerous attacks - Last 10 mins"},
{title:"img/corner.png" , field:"10corners", width:30,titleFormatter:"image",headerTooltip:"Corners",headerSort:false,align:"center",formatter:"html",headerTooltip:"Corners - Last 10 mins"},
]},
{title:"Previous 5 mins",
columns:[
{title:"img/poss.png", field:"5poss",headerSort:false,headerTooltip:"Possesion - Last 5 mins",width:74,formatter:"html",titleFormatter:"image",visible:false},
{title:"img/soont.png" , field:"5shotsont", sorter:"number", width:45,align:"center",headerSort:false,titleFormatter:"image",formatter:"html",visible:false,headerTooltip:"Shots on Target - Last 5 mins"},
{title:"img/sooft.png" , field:"5shotofft", sorter:"number", width:45,align:"center",headerSort:false,titleFormatter:"image",formatter:"html",visible:false,headerTooltip:"Shots off Target - Last 5 mins"},
{title:"img/dattacks.png" , field:"5dattack", sorter:"number", width:55,headerSort:false,align:"center",titleFormatter:"image",formatter:"html",visible:false,headerTooltip:"Dangerous attacks - Last 5 mins"},
{title:"img/corner.png" , field:"5corners", width:30,titleFormatter:"image",headerTooltip:"Corners",headerSort:false,align:"center",formatter:"html",visible:false,headerTooltip:"Corners - Last 5 mins"},
]},
{title:"Previous 20 mins",
columns:[
{title:"img/poss.png", field:"20poss",headerSort:false,headerTooltip:"Possesion - Last 20 mins",width:74,formatter:"html",titleFormatter:"image",visible:false},
{title:"img/soont.png" , field:"20shotsont", sorter:"number", width:45,align:"center",headerSort:false,titleFormatter:"image",formatter:"html",visible:false,headerTooltip:"Shots on Target - Last 20 mins"},
{title:"img/sooft.png" , field:"20shotofft", sorter:"number", width:45,align:"center",headerSort:false,titleFormatter:"image",formatter:"html",visible:false,headerTooltip:"Shots off Target - Last 20 mins"},
{title:"img/dattacks.png" , field:"20dattack", sorter:"number", width:55,headerSort:false,align:"center",titleFormatter:"image",formatter:"html",visible:false,headerTooltip:"Dangerous attacks - Last 20 mins"},
{title:"img/corner.png" , field:"20corners", width:30,titleFormatter:"image",headerTooltip:"Corners",headerSort:false,align:"center",formatter:"html",visible:false,headerTooltip:"Corners - Last 20 mins"},
]},],
}
) };
The datafile is a simple Json file, it can contain 300+ nodes or just one depening on how many games are ongoing
{
"league" : "<img src=flags/iran.png> Iran :- Azadegan League",
"time" : "90+",
"selectmh" : "",
"timep" : "90",
"matchid" : "1084ED7",
"team" : "Mes Rafsanjan FC(3)<br>Sepidrood Rasht(16)",
"score" : "<b>1<br>0</b>",
"homescore" : "1",
"awayscore" : "0",
"totalg" : "1",
"totalshot" : "0",
"totalshotot" : "0",
"totalda" : "0",
"totalco" : "0",
"totalshotib" : "0",
"scorediff" : "1",
"homecorners" : "0",
"awaycorners" : "0",
"homesont" : "0",
"awaysont" : "0",
"homesoft" : "0",
"awaysoft" : "0",
"homeposs" : "0",
"awayposs" : "0",
"homeda" : "0",
"awayda" : "0",
"homelp" : "3",
"awaylp" : "16",
"homeyc" : "0",
"awayyc" : "0",
"homerc" : "0",
"awayrc" : "0",
"homet" : "Mes Rafsanjan FC",
"awayt" : "Sepidrood Rasht",
"activity" : "",
"pi1" : "0<br>10",
"pi2" : "1<br>1",
"hpi1" : "0",
"api1" : "10",
"hpi2" : "1",
"api2" : "1",
"poss" : "0%<br>0%",
"sinbox" : "0<br>0",
"shotsont" : "0<br>0",
"shotofft" : "0<br>0",
"corners" : "0<br>0",
"dattack" : "0<br>0",
"attack" : "0<br>0",
"fouls" : "0<br>0",
"ycards" : "0<br>0",
"rcards" : "0<br>0",
"10poss" : "0%<br>0%",
"10hshotsont" : "0",
"10ashotsont" : "1",
"10tshotsont" : "1",
"5tshotsont" : "0",
"20tshotsont" : "1",
"5shotofftt" : "0",
"5cornert" : "0",
"10shotofftt" : "0",
"10cornert" : "0",
"10shotsont" : "0<br><span class=away>1</span>",
"10shotofft" : "0<br>0",
"10corners" : "0<br>0",
"10dattack" : "2<br>2",
"20hshotsont" : "0",
"20ashotsont" : "1",
"20poss" : "0%<br>0%",
"20shotsont" : "0<br><span class=away>1</span>",
"20shotofft" : "0<br>0",
"20corners" : "0<br>0",
"20dattack" : "13<br>9",
"5hshotsont" : "0",
"5ashotsont" : "0",
"5poss" : "0%<br>0%",
"5shotsont" : "0<br>0",
"5shotofft" : "0<br>0",
"5corners" : "0<br>0",
"5dattack" : "0<br>0",
"form" : "<span class=d>D</Span><span class=lost>L</Span><span class=w>W</Span><span class=w>W</Span><span class=w>W</Span><br><span class=lost>L</Span><span class=lost>L</Span><span class=lost>L</Span><span class=lost>L</Span><span class=lost>L</Span>",
"draw" : "0",
"totalap1" : "10"}
As the data refreshes the score can change and I want to highlight this, I could do it in the source json file by adding some CSS attributes to highlight it but as time is critical in this app then this would be an extra comparsion and I would need to store the previuos score somewhere and retrieve it, is there any function I can use in Tabulator to apply some CSS automatically if the values have changed from the previous refresh
Hope that all makes sense

So I eventually solved this outside of tabulator, I have a php script which creates the JSON tabulator reads, in this file I compared the last score with the current score, if they matched I set a JSON variable , then in tabulator i made this Varibale non visible and added a row formatter function to the score. So my variable is gs (goal scored) and I set this to 1 if a goal has been scored
{title:"img/goal2.png", field:"score", sorter:"string",width:40,align:"center",headerSort:false,titleFormatter:"image",headerTooltip:"Current Score",formatter:function(row,cell, formatterParams, onRendered){
var data = row.getData();
if (data.gs==1)
{var score2= '<span class="blinking">'+data.score+'</span>';}
else
{var score2=data.score;}
return score2;
},},
I then added some css
.blinking{
animation:blinkingText 1.2s infinite;
}
#keyframes blinkingText{
0%{ color: #FF0000; }
49%{ color: #FF0000; }
60%{ color: transparent; }
99%{ color:transparent; }
100%{ color: #FF0000; }
}
The data updates every 30 seconds so the blink effect is set to infinite and is then overwritten
probably not the neatest way to do it but it works

Related

elasticsearch-dsl-py query filter with term and range

I'm trying to filter a query with term and range along with query-string. filter(range) and query string works but not filter(term). am i doing something wrong?
es = Elasticsearch([{'host': '192.168.121.121', 'port': 9200}])
index = Index("filebeat-*",using=es)
search = index.search()
searchStr = "OutOfMemoryError"
search = search.query("query_string", query=searchStr)
search = search.filter('range' , **{'#timestamp': {'gte': 1589399137000 , 'lt': 1589399377000, 'format' : 'epoch_millis'}})
search = search.filter('term' , **{'can.deployment': 'can-*' })
response = search.execute(ignore_cache=True)
print(response.hits.total)
print(response.hits.hits._source.can.deployment)
json:
filter-term - ['hits']['hits']['_source']['can']['deployment']
filter-range- ['hits']['hits']['_source']['#timestamp']
{
"hits" : {
"total" : 138351328,
"max_score" : 6.5700893,
"hits" : [
{
"_index" : "filebeat-6.1.2-2020.05.13",
"_type" : "doc",
"_score" : 2.0166037,
"_source" : {
"#timestamp" : "2020-05-13T01:14:03.354Z",
"source" : "/var/log/gw_rest/gw_rest.log",
"message" : "[2020-05-13 01:14:03.354] WARN can_gw_rest [EventLoopGroup-3-2]: An exceptionCaught() event was fired.OutOfMemoryError,
"fileset" : {...},
"can" : {
"level" : "WARN",
>>>>>>>> "message" : "An exceptionCaught() event was fired- OutOfMemoryError,
"timestamp" : "2020-05-13 01:14:03.354",
>>>>>>>> "deployment" : "can-6b721b93965b-w3we4-4074-9903"
}
}
}
]
}
}
I actually didn't need a filter(term). this worked:
dIds=response['hits']['hits'][1]['_source']['can']['deployment']
print(dIds)
#loop through the response
for i in response['hits']['hits']:
id = i['_source']['can']['deployment']
print(id)

Filter array elements with $regex

///sample Data
{
"_id" : "CUST1234",
"Phone Number" : "9585290750",
"First Name" : "jeff",
"Last Name" : "ayan",
"Email ID" : "",
"createddate" : 1462559400000.0,
"services" : [
{
"type" : "Enquiry",
"timeSpent" : "0:00",
"trxID" : "TRXE20160881",
"CustomerQuery" : "Enquiry about travell agent numbers in basaveshwara nagara",
"ServiceProvided" : "provided info through whatsapp",
"Category" : "Tours/Travels",
"callTime" : "2016-05-06T18:30:00.000Z",
"ActualAmount" : 0,
"FinalAmount" : 0,
"DiscountRuppes" : 0,
"DiscountPerctange" : 0
},
{
"type" : "Enquiry",
"timeSpent" : "0:00",
"trxID" : "TRXE20160882",
"CustomerQuery" : "Enquiry about Electric bill payment of house",
"ServiceProvided" : "Service provided",
"Category" : "Utility Services",
"callTime" : "2016-05-10T18:30:00.000Z",
"ActualAmount" : 0,
"FinalAmount" : 0,
"DiscountRuppes" : 0,
"DiscountPerctange" : 0
},
{
"type" : "Enquiry",
"timeSpent" : "0:00",
"trxID" : "TRXE20160883",
"CustomerQuery" : "Enquiry about KPSC office number",
"ServiceProvided" : "provided info through whatsapp",
"Category" : "Govt Offices/Enquiries",
"callTime" : "2016-05-13T18:30:00.000Z",
"ActualAmount" : 0,
"FinalAmount" : 0,
"DiscountRuppes" : 0,
"DiscountPerctange" : 0
},
{
"type" : "Enquiry",
"timeSpent" : "0:00",
"trxID" : "TRXE20160884",
"CustomerQuery" : "Enquiry about Sagara appolo hospital contact number",
"ServiceProvided" : "provided the information through call",
"Category" : "Hospitals/Equipments",
"callTime" : "2016-05-14T18:30:00.000Z",
"ActualAmount" : 0,
"FinalAmount" : 0,
"DiscountRuppes" : 0,
"DiscountPerctange" : 0
},
]
}
Expected Output : entire data that matches particular string in search box from "services" field.
db.collection.aggregate([
{
$match: {
"Phone Number": "9585290750",
"services": { $regex: "/^t/", $options: "s i" }
}
},
{
$project: {
"Services": "services"
}
}
]);
I am facing an issue in regex portion in the above Collection, services is an array field. Please help me to filter the data.
Guys since i am new to Mongodb it took me a day to find a proper solution to my task. I have a solution to my issue. If you guys have better query than this, just post it or modify it....
db.collections.aggregate([
{"$match":{"Corporate_ID":"id"}},
{"$unwind":"$services"},
{"$match":{"$or":[
{"services.type":{$regex:'TRXF2016088142',"$options": "i"}},
{"services.timeSpent":{$regex:'TRXF2016088142',"$options": "i"}},
{"services.trxID":{$regex:'TRXF2016088142',"$options": "i"}},
{"services.CustomerQuery":{$regex:'F',"$options": "i"}},
{"services.ServiceProvided":{$regex:'F',"$options": "i"}},
{"services.Category":{$regex:'F',"$options": "i"}},
{"services.callTime":{$regex:'TRXF2016088142',"$options": "i"}},
{"services.ActualAmount":{$regex:'TRXF2016088142',"$options": "i"}},
{"services.FinalAmount":{$regex:'TRXF2016088142',"$options": "i"}},
{"services.DiscountRuppes":{$regex:'TRXF2016088142',"$options": "i"}},
{"services.DiscountPerctange":{$regex:'TRXF2016088142',"$options": "i"}}
]}},
{"$unwind":"$services"},
{"$project":{
"service":"$services"}
}
])
This is because you are passing in a string of JavaScript regular expression object to $regex. Change your regex to one of the following.
"service": { "$regex": /^t/, "$options": "si" }
or
"service": { "$regex": "^t", "$options": "si" }

Mongodb Index usage slower with regex anchor

I've got a query that's using a regex anchor and it seems to be slower when running an index scan rather than a collection scan.
A bit of background to the question:
I have a MSSQL database that has approximately 2.8 million rows in a table. We were running the following query against the table to return approximately 2.6 million results in 23 seconds:
select * from table where 'column' like "IL%"
So out of curiosity I decided to see if mongodb could perform this any faster than my MSSQL database and on a new test server I created a mongodb database which I filled 1 collection (test1) with just under 3 million objects. Here's the basic structure of a document in a collection:
> db.test1.findOne()
{
"_id" : 2,
"Other_REV" : "NULL",
"Holidex_Code" : "W8BP0",
"Segment_Name" : "NULL",
"Source" : "Forecast",
"Date_" : ISODate("2009-11-12T11:14:00Z"),
"Rooms_Sold" : 3,
"FB_REV" : "NULL",
"Rate_Code" : "ILM87",
"Export_Date" : ISODate("2010-12-12T11:14:00Z"),
"Rooms_Rev" : 51
}
All of my records have Rate_Code prefixed with IL and I ran the following query against the database which took just over 3 seconds:
> db.test1.find({'Rate_Code':{$regex: /^IL/}}).explain()
{
"cursor" : "BasicCursor",
"isMultiKey" : false,
"n" : 2999999,
"nscannedObjects" : 2999999,
"nscanned" : 2999999,
"nscannedObjectsAllPlans" : 2999999,
"nscannedAllPlans" : 2999999,
"scanAndOrder" : false,
"indexOnly" : false,
"nYields" : 4,
"nChunkSkips" : 0,
"millis" : 3398,
"indexBounds" : {
},
"server" : "MONGODB:27017"
}
Out of curiosity I created an index to see if I could speed up the retrieval at all:
> db.test1.ensureIndex({'Rate_Code':1})
However this appears to actually slow down the resolution of the query to approximately 6 seconds on average:
> db.test1.find({'Rate_Code':{$regex: /^IL/}}).explain()
{
"cursor" : "BtreeCursor Rate_Code_1",
"isMultiKey" : false,
"n" : 2999999,
"nscannedObjects" : 2999999,
"nscanned" : 2999999,
"nscannedObjectsAllPlans" : 2999999,
"nscannedAllPlans" : 2999999,
"scanAndOrder" : false,
"indexOnly" : false,
"nYields" : 4,
"nChunkSkips" : 0,
"millis" : 5895,
"indexBounds" : {
"Rate_Code" : [
[
"IL",
"IM"
]
]
},
"server" : "MONGODB:27017"
}
The OS has 2GB of memory and appears to be holding both indexes quite comfortably in memory with no disk usage being recorded when the query is ran:
> db.test1.stats()
{
"ns" : "purify.test1",
"count" : 2999999,
"size" : 623999808,
"avgObjSize" : 208.0000053333351,
"storageSize" : 790593536,
"numExtents" : 18,
"nindexes" : 2,
"lastExtentSize" : 207732736,
"paddingFactor" : 1,
"systemFlags" : 0,
"userFlags" : 0,
"totalIndexSize" : 153218240,
"indexSizes" : {
"_id_" : 83722240,
"Rate_Code_1" : 69496000
},
"ok" : 1
}
I'm thinking the slow down is due to mongodb performing a full scan of the index followed by a full collection scan as it can't be sure that all my matches are in the index but I'm not entirely sure if this is the case. Is there any way that this could be improved upon for better performance?
Thanks for any help.

Using multiple location fields in ElasticSearch + Django-Haystack

I'm using django-haystack and ElasticSearch to index Stores.
Until now, each store had one lat,long coordinate pair; we had to change this to represent the fact that one store can deliver products to very different regions (disjunct) I've added up to ten locations (lat,long pairs) to them.
When using one location field everything was working fine and I got right results. Now, with multiple location fields, I can't get any results, not even the previuos one, for the same user and store coordinates.
My Index is as following:
class StoreIndex(indexes.SearchIndex,indexes.Indexable):
text = indexes.CharField(document=True, use_template=True,
template_name='search/indexes/store/store_text.txt')
location0 = indexes.LocationField()
location1 = indexes.LocationField()
location2 = indexes.LocationField()
location3 = indexes.LocationField()
location4 = indexes.LocationField()
location5 = indexes.LocationField()
location6 = indexes.LocationField()
location7 = indexes.LocationField()
location8 = indexes.LocationField()
location9 = indexes.LocationField()
def get_model(self):
return Store
def prepare_location0(self, obj):
# If you're just storing the floats...
return "%s,%s" % (obj.latitude, obj.longitude)
# ..... up to prepare_location9
def prepare_location9(self, obj):
# If you're just storing the floats...
return "%s,%s" % (obj.latitude_9, obj.longitude_9)
Is this the correct way to build my index?
From elasticsearch I get this mapping information:
curl -XGET http://localhost:9200/stores/_mapping?pretty=True
{
"stores" : {
"modelresult" : {
"properties" : {
"django_id" : {
"type" : "string"
},
"location0" : {
"type" : "geo_point",
"store" : "yes"
},
"location1" : {
"type" : "geo_point",
"store" : "yes"
},
"location2" : {
"type" : "geo_point",
"store" : "yes"
},
"location3" : {
"type" : "geo_point",
"store" : "yes"
},
"location4" : {
"type" : "geo_point",
"store" : "yes"
},
"location5" : {
"type" : "geo_point",
"store" : "yes"
},
"location6" : {
"type" : "geo_point",
"store" : "yes"
},
"location7" : {
"type" : "geo_point",
"store" : "yes"
},
"location8" : {
"type" : "geo_point",
"store" : "yes"
},
"location9" : {
"type" : "geo_point",
"store" : "yes"
},
"text" : {
"type" : "string",
"analyzer" : "snowball",
"store" : "yes",
"term_vector" : "with_positions_offsets"
}
}
}
}
}
Then, I try to query this way:
sqs0 = SearchQuerySet().dwithin('location0', usuario, max_dist).distance('location0',usuario).using('stores')
where:
usuario is a Point instance representing the user trying to find stores near his position and
max_dist is a D instance.
If I query directly, using curl I got no results, too.
Here is the result of quering using curl with multiple location fields:
$ curl -XGET http://localhost:9200/stores/modelresult/_search?pretty=true -d '{ "query" : { "match_all": {} }, "filter" : {"geo_distance" : { "distance" : "6km", "location0" : { "lat" : -23.5, "lon" : -46.6 } } } } '
{
"took" : 1,
"timed_out" : false,
"_shards" : {
"total" : 5,
"successful" : 5,
"failed" : 0
},
"hits" : {
"total" : 0,
"max_score" : null,
"hits" : [ ]
}
}
If comment out the fields location1-9 from the StoreIndex class everything works fine, but if I leave them to get multiple location points, I get no results for the same query (user position). This happens for the same query, in django as directly, using curl. That is, if I have only one location (say location0), both queries returns correct results. With more locations (location0-9), both queries didn't give any results.
Here's the results of quering directly using curl with only one location field:
$ curl -XGET http://localhost:9200/stores/modelresult/_search?pretty=true -d '{ "query" : { "match_all": {} }, "filter" : {"geo_distance" : { "distance" : "6km", "location0" : { "lat" : -23.5, "lon" : -46.6 } } } } '
{
"took" : 3,
"timed_out" : false,
"_shards" : {
"total" : 5,
"successful" : 5,
"failed" : 0
},
"hits" : {
"total" : 9,
"max_score" : 1.0,
"hits" : [ {
"_index" : "stores",
"_type" : "modelresult",
"_id" : "store.store.110",
"_score" : 1.0, "_source" : {"django_ct": "store.store", "text": "RESULT OF THE SEARCH \n\n", "django_id": "110", "id": "store.store.110", "location0": "-23.4487554,-46.58912"}
},
lot's of results here
]
}
}
Of course, I rebuild_index after any change in StoreIndex.
Any help on how to get multiple location fields working with elasticsearch and django?
PS.: I've cross posted this question on Django-Haystack and ElasticSearch Google Groups.
https://groups.google.com/d/topic/elasticsearch/85fg7vdCBBU/discussion
https://groups.google.com/d/topic/django-haystack/m2A3_SF8-ls/discussion
Thanks in advance
Mário

Using grep to replace every instance of a pattern after the first in bbedit

So I've got a really long txt file that follows this pattern:
},
"303" :
{
"id" : "4k4hk2l",
"color" : "red",
"moustache" : "no"
},
"303" :
{
"id" : "4k52k2l",
"color" : "red",
"moustache" : "yes"
},
"303" :
{
"id" : "fask2l",
"color" : "green",
"moustache" : "yes"
},
"304" :
{
"id" : "4k4hf4f4",
"color" : "red",
"moustache" : "yes"
},
"304" :
{
"id" : "tthj2l",
"color" : "red",
"moustache" : "yes"
},
"304" :
{
"id" : "hjsk2l",
"color" : "green",
"moustache" : "no"
},
"305" :
{
"id" : "h6shgfbs",
"color" : "red",
"moustache" : "no"
},
"305" :
{
"id" : "fdh33hk7",
"color" : "cyan",
"moustache" : "yes"
},
and I'm trying to format it to be a proper json object with the following structure....
"303" :
{ "list" : [
{
"id" : "4k4hk2l",
"color" : "red",
"moustache" : "no"
},
{
"id" : "4k52k2l",
"color" : "red",
"moustache" : "yes"
},
{
"id" : "fask2l",
"color" : "green",
"moustache" : "yes"
}
]
}
"304" :
{ "list" : [
etc...
meaning I look for all patterns of ^"\d\d\d" : and leave the first unique one , but remove all the subsequent ones (example, leave first instance of "303" :, but completely remove the rest of them. then leave the first instance of "304" :, but completely remove all the rest of them, etc.).
I've been attempting to do this within the bbedit application, which has a grep option for search/replace. My pattern matching fu is too weak to accomplish this. Any ideas? Or a better way to accomplish this task?
You can't capture repeating capturing group. The capture will always contain only last match of a group. So there's no way you can do this with a single search/replace except of dumb repeating your group in pattern. But even that can be a solution only if you know a max count of elements in resulting groups.
Say we have a tring that is a simplified version of your data:
1a;1b;1c;1d;1e;2d;2e;2f;2g;3x;3y;3z;
We see that maximum count of element is 5, so we repeat the capturing group 5 times.
/([0-9])([a-z]*);?(\1([a-z]);)?(\1([a-z]);)?(\1([a-z]);)?(\1([a-z]);)?/
And replace that with
\1:\2\4\6\8\10;
Then we get desired result:
1:abcde;2:defg;3:xyz;
You can apply this technique to your data if you're in great hurry (and after 2 days I suppose you don't), but using some scripting language will be better and cleaner solution.
For my simplified example you have to iterate through matches of /([0-9])[a-z];?(\1[a-z];?)*/. Those will be:
1a;1b;1c;1d;1e;
2d;2e;2f;2g;
3x;3y;3z;
And there you can capture all values and bind them to responsive key, which is only one for each iteration.