service as a query argument, merge data model, hpcdash format
diff --git a/planetstack/hpc_wizard/planetstack_analytics.py b/planetstack/hpc_wizard/planetstack_analytics.py
index d567e22..a558bbc 100644
--- a/planetstack/hpc_wizard/planetstack_analytics.py
+++ b/planetstack/hpc_wizard/planetstack_analytics.py
@@ -1,12 +1,38 @@
from bigquery_analytics import BigQueryAnalytics
+import os
+import sys
import json
import traceback
+if os.path.exists("/home/smbaker/projects/vicci/plstackapi/planetstack"):
+ sys.path.append("/home/smbaker/projects/vicci/plstackapi/planetstack")
+else:
+ sys.path.append("/opt/planetstack")
+
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "planetstack.settings")
+from django import db
+from django.db import connection
+from core.models import Slice, Sliver, ServiceClass, Reservation, Tag, Network, User, Node, Image, Deployment, Site, NetworkTemplate, NetworkSlice, Service
+
+BLUE_LOAD=5000000
+RED_LOAD=15000000
+
class PlanetStackAnalytics(BigQueryAnalytics):
def __init__(self, tableName="demoevents"):
BigQueryAnalytics.__init__(self, tableName)
- def compose_query(self, slice=None, site=None, node=None, timeField="MinuteTime", avg=[], sum=[], count=[], computed=[], groupBy=["MinuteTime"], orderBy=["MinuteTime"], tableName="demoevents"):
+ def service_to_sliceNames(self, serviceName):
+ service=Service.objects.get(name=serviceName)
+ try:
+ slices = service.slices.all()
+ except:
+ # BUG in data model -- Slice.service has related name 'service' and
+ # it should be 'slices'
+ slices = service.service.all()
+
+ return [slice.name for slice in slices]
+
+ def compose_query(self, slice=None, site=None, node=None, service=None, timeField="MinuteTime", avg=[], sum=[], count=[], computed=[], groupBy=["MinuteTime"], orderBy=["MinuteTime"], tableName="demoevents"):
tablePart = "%s.%s@-3600000--1" % ("vicci", tableName)
fields = []
@@ -42,6 +68,11 @@
fields.append("SUM(%s)%sSUM(%s) as %s" % (parts[0], operator, parts[1], computedFieldName))
fieldNames.append(computedFieldName)
+ for fieldName in groupBy:
+ if (fieldName not in ["MinuteTime", "HourTime", "DayTime"]):
+ fields.append(fieldName)
+ fieldNames.append(fieldName)
+
fields = ", ".join(fields)
where = []
@@ -52,6 +83,10 @@
where.append("%%site='%s'" % site)
if node:
where.append("%%hostname='%s'" % node)
+ if service:
+ sliceNames = self.service_to_sliceNames(service)
+ if sliceNames:
+ where.append("(" + " OR ".join(["%%slice='%s'" % sliceName for sliceName in sliceNames]) +")")
if where:
where = " WHERE " + " AND ".join(where)
@@ -59,8 +94,10 @@
where =""
if groupBy:
+ groupBySub = " GROUP BY " + ",".join(groupBy + ["%hostname"])
groupBy = " GROUP BY " + ",".join(groupBy)
else:
+ groupBySub = " GROUP BY %hostname"
groupBy = ""
if orderBy:
@@ -72,14 +109,18 @@
subQuery = "SELECT %%hostname, %s FROM [%s]" % (fields, tablePart)
if where:
subQuery = subQuery + where
- subQuery = subQuery + " GROUP BY %s,%%hostname" % timeField
+ subQuery = subQuery + groupBySub
+ #subQuery = subQuery + " GROUP BY %s,%%hostname" % timeField
sumFields = []
for fieldName in fieldNames:
if fieldName.startswith("avg"):
sumFields.append("AVG(%s) as avg_%s"%(fieldName,fieldName))
- else:
+ sumFields.append("MAX(%s) as max_%s"%(fieldName,fieldName))
+ elif (fieldName.startswith("count")) or (fieldName.startswith("sum")) or (fieldName.startswith("computed")):
sumFields.append("SUM(%s) as sum_%s"%(fieldName,fieldName))
+ else:
+ sumFields.append(fieldName)
sumFields = ",".join(sumFields)
@@ -132,6 +173,49 @@
return ("text/html", new_result)
+ elif (format == "json_hpcdash"):
+ new_rows = {}
+ for row in result:
+ new_row = {"lat": float(row.get("lat", 0)),
+ "long": float(row.get("long", 0)),
+ "health": 0,
+ "numNodes": int(row.get("numNodes",0)),
+ "numHPCSlivers": int(row.get("sum_count_hostname", 0)),
+ "siteUrl": row.get("url", ""),
+ "hot": float(row.get("hotness", 0.0)),
+ "load": int(float(row.get("max_avg_cpu", 0)))}
+ new_rows[row["site"]] = new_row
+ return ("application/javascript", json.dumps(new_rows))
+
+ def only_largest(self, rows, fieldName):
+ """ Given a fieldName, only return the set of rows that had the
+ maximum value of that fieldName.
+ """
+ maxVal = max( [int(row[fieldName]) for row in rows] )
+ new_rows = [row for row in rows if int(row[fieldName])==maxVal]
+ return new_rows
+
+ def merge_datamodel_sites(self, rows):
+ """ For a query that included "site" in its groupby, merge in the
+ opencloud site information.
+ """
+ for row in rows:
+ sitename = row["site"]
+ try:
+ model_site = Site.objects.get(name=sitename)
+ except:
+ # we didn't find it in the data model
+ continue
+
+ row["lat"] = float(model_site.location.latitude)
+ row["long"] = float(model_site.location.longitude)
+ row["url"] = model_site.site_url
+ row["numNodes"] = model_site.nodes.count()
+
+ if "max_avg_cpu" in row:
+ cpu=float(row["max_avg_cpu"])/100.0
+ row["hotness"] = max(0.0, ((cpu*RED_LOAD) - BLUE_LOAD)/(RED_LOAD-BLUE_LOAD))
+
def process_request(self, req):
print req.GET
@@ -140,6 +224,7 @@
slice = req.GET.get("slice", None)
site = req.GET.get("site", None)
node = req.GET.get("node", None)
+ service = req.GET.get("service", None)
format = req.GET.get("format", "json_dicts")
@@ -152,8 +237,10 @@
orderBy = self.get_list_from_req(req, "orderBy", ["MinuteTime"])
maxRows = req.GET.get("maxRows", None)
+ onlyLargest = req.GET.get("onlyLargest", None)
+ mergeDataModelSites = req.GET.get("mergeDataModelSites", None)
- q = self.compose_query(slice, site, node, timeField, avg, sum, count, computed, groupBy, orderBy)
+ q = self.compose_query(slice, site, node, service, timeField, avg, sum, count, computed, groupBy, orderBy)
print q
@@ -164,6 +251,12 @@
else:
result = self.run_query(q)
+ if onlyLargest:
+ result = self.only_largest(result, onlyLargest)
+
+ if mergeDataModelSites:
+ self.merge_datamodel_sites(result)
+
if maxRows:
result = result[-int(maxRows):]
@@ -179,6 +272,7 @@
def main():
bq = PlanetStackAnalytics()
+ """
q=bq.compose_query(avg=["%cpu"], count=["%hostname"], slice="HyperCache")
print q
bq.dump_table(bq.run_query(q))
@@ -187,12 +281,21 @@
print
print q
bq.dump_table(bq.run_query(q))
- #print bq.run_query_raw(q)
q=bq.compose_query(timeField="HourTime", avg=["%cpu"], count=["%hostname"], computed=["%bytes_sent/%elapsed"], groupBy=["HourTime"], orderBy=["HourTime"])
print
print q
bq.dump_table(bq.run_query(q))
+ """
+
+ q=bq.compose_query(avg=["%cpu"], count=["%hostname"], computed=["%bytes_sent/%elapsed"], service="HPC Service", groupBy=["MinuteTime","%site"])
+ print
+ print q
+ result=bq.run_query(q)
+ result = bq.only_largest(result, "MinuteTime")
+ bq.merge_datamodel_sites(result)
+ #bq.dump_table(result)
+ print bq.format_result("json_hpcdash", result, q)
if __name__ == "__main__":
main()