save per-URL details
diff --git a/xos/core/xoslib/methods/hpcview.py b/xos/core/xoslib/methods/hpcview.py
index 77ca7ed..e4b39d7 100644
--- a/xos/core/xoslib/methods/hpcview.py
+++ b/xos/core/xoslib/methods/hpcview.py
@@ -39,6 +39,11 @@
else:
return None
+def json_default(d, default):
+ if not d:
+ return default
+ return json.loads(d)
+
def compute_config_run(d):
if not d:
return "null"
@@ -178,6 +183,7 @@
"watcher.HPC-hb.time": lookup_time(hpc_service, sliver, "watcher.HPC-hb.time"),
"watcher.HPC-fetch.msg": lookup_tag(hpc_service, sliver, "watcher.HPC-fetch.msg"),
"watcher.HPC-fetch.time": lookup_time(hpc_service, sliver, "watcher.HPC-fetch.time"),
+ "watcher.HPC-fetch.urls": json_default(lookup_tag(hpc_service, sliver, "watcher.HPC-fetch-urls.msg"), []),
"config_age": compute_config_run(watcherd_hpc),
})
diff --git a/xos/observers/hpc/hpc_watcher.py b/xos/observers/hpc/hpc_watcher.py
index 177d17f..2af9d00 100644
--- a/xos/observers/hpc/hpc_watcher.py
+++ b/xos/observers/hpc/hpc_watcher.py
@@ -269,6 +269,7 @@
raise
response_code = c.getinfo(c.RESPONSE_CODE)
bytes_downloaded = int(c.getinfo(c.SIZE_DOWNLOAD))
+ total_time = float(c.getinfo(c.TOTAL_TIME))
except Exception, e:
#traceback.print_exc()
job["status"] = self.curl_error_message(e)
@@ -285,6 +286,8 @@
return
job["status"] = "success"
+ job["bytes_downloaded"] = bytes_downloaded
+ job["total_time"] = total_time
class WatcherWorker(Thread):
def __init__(self, queue):
@@ -507,6 +510,7 @@
def fetch_hpc(self, service, slivers):
for sliver in slivers:
sliver.has_error = False
+ sliver.url_status = []
checks = HpcHealthCheck.objects.filter(kind="http")
if not checks:
@@ -524,10 +528,13 @@
while self.fetch_queue.outstanding > 0:
result = self.fetch_queue.get_result()
sliver = result["sliver"]
+ if (result["status"] == "success"):
+ sliver.url_status.append( (result["url"], "success", result["bytes_downloaded"], result["total_time"]) )
if (result["status"]!="success") and (not sliver.has_error):
self.set_status(sliver, service, "watcher.HPC-fetch", result["status"])
for sliver in slivers:
+ self.set_status(sliver, service, "watcher.HPC-fetch-urls", json.dumps(sliver.url_status))
if not sliver.has_error:
self.set_status(sliver, service, "watcher.HPC-fetch", "success")