I used AJAX to read the result of my query:
function e() {
$.ajax({
url: "tbl/",
dataType: "json",
beforeSend: function () {
$("#info_tbl").show().text("Getting data from server...Please wait.").addClass("getme")
},
success: function (e) {
function t(e) {
return _.reduce(e, function (e, t) {
return e + parseFloat(t || 0)
}, 0)
}
$("#info_tbl").text("Successfully Loaded!").fadeOut(1e3).removeClass("getme"), $("#myErrorWrapper").hide(), $("#table_wrapper").show();
var a = _.chain(e).flatten().groupBy("brgy_locat", "municipali").map(function (e, r) {
return {
municipality: _(e).chain().pluck("municipali").first(),
brgy: r,
low: t(_(e).chain().pluck("countlow").value()),
medium: t(_(e).chain().pluck("countmedium").value()),
high: t(_(e).chain().pluck("counthigh").value())
}
}).value();
r.fnClearTable();
for (var o = 0; o < a.length; o++) r.fnAddData([a[o].municipality, a[o].brgy, a[o].low || 0, a[o].medium || 0, a[o].high || 0])
},
error: function () {
e(), $("#myErrorWrapper").show()
}
})
}
Is there any way to make it more efficient?
I am using PostgreSQL with PostGIS plugin. The data that I've been trying to query from the table consists of:
- 28k+ rows (multipolygon field and its attributes;varchar,int) -
FloodHazard
- 2,500+ rows (multipolygon field and its attributes;varchar,int) -
PolyStructures
That's why it takes a minute or more to show the result.
This is my function or query to identify if the building is affected by the flood hazard:
def floodhazard_tbl(request):
if request.method == "GET":
reference_high = FloodHazard.objects.filter(hazard='High')
reference_medium = FloodHazard.objects.filter(hazard='Medium')
reference_low = FloodHazard.objects.filter(hazard='Low')
#get all ids based on filter
ids_high = reference_high.values_list('id', flat=True)
ids_medium = reference_medium.values_list('id', flat=True)
ids_low = reference_low.values_list('id', flat=True)
# create a list
to_json = []
# this code is results a messy JSON data that need underscore.js to manipulate
# in order for us to use datatables
for myid in ids_high:
getgeom = FloodHazard.objects.get(id=myid).geom
response_high = list(PolyStructures.objects.filter(geom__within=getgeom).values(
'brgy_locat', 'municipali').annotate(counthigh=Count('brgy_locat')))
to_json.append(response_high)
for myid in ids_medium:
getgeom = FloodHazard.objects.get(id=myid).geom
response_medium = list(PolyStructures.objects.filter(geom__within=getgeom).values(
'brgy_locat', 'municipali').annotate(countmedium=Count('brgy_locat')))
to_json.append(response_medium)
for myid in ids_low:
getgeom = FloodHazard.objects.get(id=myid).geom
response_low = list(PolyStructures.objects.filter(geom__within=getgeom).values(
'brgy_locat', 'municipali').annotate(countlow=Count('brgy_locat')))
to_json.append(response_low)
return HttpResponse(list(json.dumps(to_json)), content_type='application/json')
It returns JSON data which is reconstructed using underscore.js and shows it using datatables.