Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 1 | import os |
| 2 | import sys |
| 3 | from django.views.generic import TemplateView, View |
| 4 | import datetime |
| 5 | from pprint import pprint |
| 6 | import json |
jcnelson | 160012b | 2014-07-10 19:32:58 -0400 | [diff] [blame] | 7 | from syndicate_storage.models import * |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 8 | from core.models import * |
| 9 | from hpc.models import ContentProvider |
| 10 | from operator import attrgetter |
| 11 | from django import template |
| 12 | from django.views.decorators.csrf import csrf_exempt |
| 13 | from django.http import HttpResponse, HttpResponseServerError, HttpResponseForbidden |
| 14 | from django.core import urlresolvers |
| 15 | from django.contrib.gis.geoip import GeoIP |
| 16 | from django.db.models import Q |
| 17 | from ipware.ip import get_ip |
| 18 | from operator import itemgetter, attrgetter |
| 19 | import traceback |
| 20 | import math |
| 21 | |
| 22 | if os.path.exists("/home/smbaker/projects/vicci/cdn/bigquery"): |
| 23 | sys.path.append("/home/smbaker/projects/vicci/cdn/bigquery") |
| 24 | else: |
| 25 | sys.path.append("/opt/planetstack/hpc_wizard") |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 26 | from planetstack_analytics import DoPlanetStackAnalytics, PlanetStackAnalytics, RED_LOAD, BLUE_LOAD |
| 27 | |
| 28 | def getDashboardContext(user, context={}, tableFormat = False): |
| 29 | context = {} |
| 30 | |
| 31 | userSliceData = getSliceInfo(user) |
| 32 | if (tableFormat): |
| 33 | context['userSliceInfo'] = userSliceTableFormatter(userSliceData) |
| 34 | else: |
| 35 | context['userSliceInfo'] = userSliceData |
| 36 | context['cdnData'] = getCDNOperatorData(wait=False) |
| 37 | context['cdnContentProviders'] = getCDNContentProviderData() |
| 38 | |
| 39 | (dashboards, unusedDashboards)= getDashboards(user) |
| 40 | unusedDashboards=[x for x in unusedDashboards if x!="Customize"] |
| 41 | context['dashboards'] = dashboards |
| 42 | context['unusedDashboards'] = unusedDashboards |
| 43 | |
| 44 | return context |
| 45 | |
| 46 | def getDashboards(user): |
| 47 | dashboards = user.get_dashboards() |
| 48 | |
| 49 | dashboard_names = [d.name for d in dashboards] |
| 50 | |
| 51 | unused_dashboard_names = [] |
| 52 | for dashboardView in DashboardView.objects.all(): |
| 53 | if not dashboardView.name in dashboard_names: |
| 54 | unused_dashboard_names.append(dashboardView.name) |
| 55 | |
| 56 | return (dashboard_names, unused_dashboard_names) |
| 57 | |
| 58 | def getSliceInfo(user): |
| 59 | sliceList = Slice.objects.all() |
| 60 | slicePrivs = SlicePrivilege.objects.filter(user=user) |
| 61 | userSliceInfo = [] |
| 62 | for entry in slicePrivs: |
| 63 | |
Scott Baker | d7c3131 | 2014-11-05 11:05:45 -0800 | [diff] [blame] | 64 | slice = Slice.objects.filter(id=entry.slice.id) |
| 65 | if not slice: |
| 66 | # the privilege is to a slice that doesn't exist |
| 67 | print "data model consistency problem, slice %s doesn't exist" % entry.slice.id |
| 68 | continue |
| 69 | slice = slice[0] |
| 70 | slicename = slice.name |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 71 | sliverList=Sliver.objects.all() |
| 72 | sites_used = {} |
| 73 | for sliver in slice.slivers.all(): |
| 74 | #sites_used['deploymentSites'] = sliver.node.deployment.name |
| 75 | # sites_used[sliver.image.name] = sliver.image.name |
Scott Baker | 4f3c9d5 | 2014-09-02 17:38:40 -0700 | [diff] [blame] | 76 | sites_used[sliver.node.site.name] = 1 #sliver.numberCores |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 77 | sliceid = Slice.objects.get(id=entry.slice.id).id |
| 78 | try: |
| 79 | sliverList = Sliver.objects.filter(slice=entry.slice.id) |
| 80 | siteList = {} |
| 81 | for x in sliverList: |
| 82 | if x.node.site not in siteList: |
| 83 | siteList[x.node.site] = 1 |
| 84 | slivercount = len(sliverList) |
| 85 | sitecount = len(siteList) |
| 86 | except: |
| 87 | traceback.print_exc() |
| 88 | slivercount = 0 |
| 89 | sitecount = 0 |
| 90 | |
| 91 | userSliceInfo.append({'slicename': slicename, 'sliceid':sliceid, |
| 92 | 'sitesUsed':sites_used, |
| 93 | 'role': SliceRole.objects.get(id=entry.role.id).role, |
| 94 | 'slivercount': slivercount, |
| 95 | 'sitecount':sitecount}) |
| 96 | |
| 97 | return userSliceInfo |
| 98 | |
| 99 | def getCDNContentProviderData(): |
| 100 | cps = [] |
| 101 | for dm_cp in ContentProvider.objects.all(): |
| 102 | cp = {"name": dm_cp.name, |
| 103 | "account": dm_cp.account} |
| 104 | cps.append(cp) |
| 105 | |
| 106 | return cps |
| 107 | |
| 108 | def getCDNOperatorData(randomizeData = False, wait=True): |
| 109 | HPC_SLICE_NAME = "HyperCache" |
| 110 | |
| 111 | bq = PlanetStackAnalytics() |
| 112 | |
| 113 | rows = bq.get_cached_query_results(bq.compose_cached_query(), wait) |
| 114 | |
| 115 | # wait=False on the first time the Dashboard is opened. This means we might |
| 116 | # not have any rows yet. The dashboard code polls every 30 seconds, so it |
| 117 | # will eventually pick them up. |
| 118 | |
| 119 | if rows: |
| 120 | rows = bq.postprocess_results(rows, filter={"event": "hpc_heartbeat"}, maxi=["cpu"], count=["hostname"], computed=["bytes_sent/elapsed"], groupBy=["Time","site"], maxDeltaTime=80) |
| 121 | |
| 122 | # dictionaryize the statistics rows by site name |
| 123 | stats_rows = {} |
| 124 | for row in rows: |
| 125 | stats_rows[row["site"]] = row |
| 126 | else: |
| 127 | stats_rows = {} |
| 128 | |
| 129 | slice = Slice.objects.filter(name=HPC_SLICE_NAME) |
| 130 | if slice: |
| 131 | slice_slivers = list(slice[0].slivers.all()) |
| 132 | else: |
| 133 | slice_slivers = [] |
| 134 | |
| 135 | new_rows = {} |
| 136 | for site in Site.objects.all(): |
| 137 | # compute number of slivers allocated in the data model |
| 138 | allocated_slivers = 0 |
| 139 | for sliver in slice_slivers: |
| 140 | if sliver.node.site == site: |
| 141 | allocated_slivers = allocated_slivers + 1 |
| 142 | |
| 143 | stats_row = stats_rows.get(site.name,{}) |
| 144 | |
| 145 | max_cpu = stats_row.get("max_avg_cpu", stats_row.get("max_cpu",0)) |
| 146 | cpu=float(max_cpu)/100.0 |
| 147 | hotness = max(0.0, ((cpu*RED_LOAD) - BLUE_LOAD)/(RED_LOAD-BLUE_LOAD)) |
| 148 | |
Scott Baker | 7945490 | 2014-06-26 10:26:48 -0700 | [diff] [blame] | 149 | try: |
Scott Baker | 958ef22 | 2014-07-15 14:36:47 -0700 | [diff] [blame] | 150 | lat=float(site.location.latitude) |
Scott Baker | 7945490 | 2014-06-26 10:26:48 -0700 | [diff] [blame] | 151 | long=float(site.location.longitude) |
| 152 | except: |
| 153 | lat=0 |
| 154 | long=0 |
| 155 | |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 156 | # format it to what that CDN Operations View is expecting |
Scott Baker | 7945490 | 2014-06-26 10:26:48 -0700 | [diff] [blame] | 157 | new_row = {"lat": lat, |
| 158 | "long": long, |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 159 | "health": 0, |
| 160 | "numNodes": int(site.nodes.count()), |
| 161 | "activeHPCSlivers": int(stats_row.get("count_hostname", 0)), # measured number of slivers, from bigquery statistics |
| 162 | "numHPCSlivers": allocated_slivers, # allocated number of slivers, from data model |
| 163 | "siteUrl": str(site.site_url), |
| 164 | "bandwidth": stats_row.get("sum_computed_bytes_sent_div_elapsed",0), |
| 165 | "load": max_cpu, |
| 166 | "hot": float(hotness)} |
| 167 | new_rows[str(site.name)] = new_row |
| 168 | |
| 169 | # get rid of sites with 0 slivers that overlap other sites with >0 slivers |
| 170 | for (k,v) in new_rows.items(): |
| 171 | bad=False |
| 172 | if v["numHPCSlivers"]==0: |
| 173 | for v2 in new_rows.values(): |
| 174 | if (v!=v2) and (v2["numHPCSlivers"]>=0): |
| 175 | d = haversine(v["lat"],v["long"],v2["lat"],v2["long"]) |
| 176 | if d<100: |
| 177 | bad=True |
| 178 | if bad: |
| 179 | del new_rows[k] |
| 180 | |
| 181 | return new_rows |
| 182 | |
Scott Baker | 866c5b3 | 2014-08-29 11:34:00 -0700 | [diff] [blame] | 183 | def slice_increase_slivers(user, user_ip, siteList, slice, image, count, noAct=False): |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 184 | sitesChanged = {} |
| 185 | |
| 186 | # let's compute how many slivers are in use in each node of each site |
| 187 | for site in siteList: |
| 188 | site.nodeList = list(site.nodes.all()) |
| 189 | for node in site.nodeList: |
| 190 | node.sliverCount = 0 |
| 191 | for sliver in node.slivers.all(): |
| 192 | if sliver.slice.id == slice.id: |
| 193 | node.sliverCount = node.sliverCount + 1 |
| 194 | |
| 195 | # Allocate slivers to nodes |
| 196 | # for now, assume we want to allocate all slivers from the same site |
| 197 | nodes = siteList[0].nodeList |
| 198 | while (count>0): |
| 199 | # Sort the node list by number of slivers per node, then pick the |
| 200 | # node with the least number of slivers. |
| 201 | nodes = sorted(nodes, key=attrgetter("sliverCount")) |
| 202 | node = nodes[0] |
| 203 | |
| 204 | print "adding sliver at node", node.name, "of site", node.site.name |
| 205 | |
| 206 | if not noAct: |
| 207 | sliver = Sliver(name=node.name, |
| 208 | slice=slice, |
| 209 | node=node, |
Scott Baker | 866c5b3 | 2014-08-29 11:34:00 -0700 | [diff] [blame] | 210 | image = image, |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 211 | creator = User.objects.get(email=user), |
Scott Baker | 4f3c9d5 | 2014-09-02 17:38:40 -0700 | [diff] [blame] | 212 | deploymentNetwork=node.deployment) |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 213 | sliver.save() |
| 214 | |
| 215 | node.sliverCount = node.sliverCount + 1 |
| 216 | |
| 217 | count = count - 1 |
| 218 | |
| 219 | sitesChanged[node.site.name] = sitesChanged.get(node.site.name,0) + 1 |
| 220 | |
| 221 | return sitesChanged |
| 222 | |
| 223 | def slice_decrease_slivers(user, siteList, slice, count, noAct=False): |
| 224 | sitesChanged = {} |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 225 | if siteList: |
| 226 | siteNames = [site.name for site in siteList] |
| 227 | else: |
| 228 | siteNames = None |
| 229 | |
Scott Baker | 86baef6 | 2014-07-11 09:48:45 -0700 | [diff] [blame] | 230 | for sliver in list(slice.slivers.all()): |
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 231 | if count>0: |
Scott Baker | 86baef6 | 2014-07-11 09:48:45 -0700 | [diff] [blame] | 232 | if(not siteNames) or (sliver.node.site.name in siteNames): |
| 233 | sliver.delete()
|
| 234 | print "deleting sliver",sliver.name,"at node",sliver.node.name
|
| 235 | count=count-1
|
| 236 | sitesChanged[sliver.node.site.name] = sitesChanged.get(sliver.node.site.name,0) - 1
|
Scott Baker | c7325a4 | 2014-05-30 16:06:46 -0700 | [diff] [blame] | 237 |
|
| 238 | return sitesChanged |
| 239 | |
| 240 | def haversine(site_lat, site_lon, lat, lon): |
| 241 | d=0 |
| 242 | if lat and lon and site_lat and site_lon: |
| 243 | site_lat = float(site_lat) |
| 244 | site_lon = float(site_lon) |
| 245 | lat = float(lat) |
| 246 | lon = float(lon) |
| 247 | R = 6378.1 |
| 248 | a = math.sin( math.radians((lat - site_lat)/2.0) )**2 + math.cos( math.radians(lat) )*math.cos( math.radians(site_lat) )*(math.sin( math.radians((lon - site_lon)/2.0 ) )**2) |
| 249 | c = 2 * math.atan2( math.sqrt(a), math.sqrt(1 - a) ) |
| 250 | d = R * c |
| 251 | |
| 252 | return d |
| 253 | |
| 254 | def userSliceTableFormatter(data): |
| 255 | formattedData = { |
| 256 | 'rows' : data |
| 257 | } |
| 258 | return formattedData |