David K. Bainbridge | b541504 | 2016-05-13 17:06:10 -0700 | [diff] [blame] | 1 | #!/usr/bin/python |
David K. Bainbridge | df9df63 | 2016-07-07 18:47:46 -0700 | [diff] [blame] | 2 | ## Copyright 2016 Open Networking Laboratory |
| 3 | ## |
| 4 | ## Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | ## you may not use this file except in compliance with the License. |
| 6 | ## You may obtain a copy of the License at |
| 7 | ## |
| 8 | ## http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | ## |
| 10 | ## Unless required by applicable law or agreed to in writing, software |
| 11 | ## distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | ## See the License for the specific language governing permissions and |
| 14 | ## limitations under the License. |
David K. Bainbridge | b541504 | 2016-05-13 17:06:10 -0700 | [diff] [blame] | 15 | import sys, threading, thread, subprocess, re, time, datetime, bisect, BaseHTTPServer |
| 16 | from optparse import OptionParser |
| 17 | from Queue import Queue |
| 18 | |
| 19 | def parse_timestamp(raw_str): |
| 20 | tokens = raw_str.split() |
| 21 | |
| 22 | if len(tokens) == 1: |
| 23 | if tokens[0].lower() == 'never': |
| 24 | return 'never'; |
| 25 | |
| 26 | else: |
| 27 | raise Exception('Parse error in timestamp') |
| 28 | |
| 29 | elif len(tokens) == 3: |
| 30 | return datetime.datetime.strptime(' '.join(tokens[1:]), |
| 31 | '%Y/%m/%d %H:%M:%S') |
| 32 | |
| 33 | else: |
| 34 | raise Exception('Parse error in timestamp') |
| 35 | |
| 36 | def timestamp_is_ge(t1, t2): |
| 37 | if t1 == 'never': |
| 38 | return True |
| 39 | |
| 40 | elif t2 == 'never': |
| 41 | return False |
| 42 | |
| 43 | else: |
| 44 | return t1 >= t2 |
| 45 | |
| 46 | |
| 47 | def timestamp_is_lt(t1, t2): |
| 48 | if t1 == 'never': |
| 49 | return False |
| 50 | |
| 51 | elif t2 == 'never': |
| 52 | return t1 != 'never' |
| 53 | |
| 54 | else: |
| 55 | return t1 < t2 |
| 56 | |
| 57 | |
| 58 | def timestamp_is_between(t, tstart, tend): |
| 59 | return timestamp_is_ge(t, tstart) and timestamp_is_lt(t, tend) |
| 60 | |
| 61 | |
| 62 | def parse_hardware(raw_str): |
| 63 | tokens = raw_str.split() |
| 64 | |
| 65 | if len(tokens) == 2: |
| 66 | return tokens[1] |
| 67 | |
| 68 | else: |
| 69 | raise Exception('Parse error in hardware') |
| 70 | |
| 71 | |
| 72 | def strip_endquotes(raw_str): |
| 73 | return raw_str.strip('"') |
| 74 | |
| 75 | |
| 76 | def identity(raw_str): |
| 77 | return raw_str |
| 78 | |
| 79 | |
| 80 | def parse_binding_state(raw_str): |
| 81 | tokens = raw_str.split() |
| 82 | |
| 83 | if len(tokens) == 2: |
| 84 | return tokens[1] |
| 85 | |
| 86 | else: |
| 87 | raise Exception('Parse error in binding state') |
| 88 | |
| 89 | |
| 90 | def parse_next_binding_state(raw_str): |
| 91 | tokens = raw_str.split() |
| 92 | |
| 93 | if len(tokens) == 3: |
| 94 | return tokens[2] |
| 95 | |
| 96 | else: |
| 97 | raise Exception('Parse error in next binding state') |
| 98 | |
| 99 | |
| 100 | def parse_rewind_binding_state(raw_str): |
| 101 | tokens = raw_str.split() |
| 102 | |
| 103 | if len(tokens) == 3: |
| 104 | return tokens[2] |
| 105 | |
| 106 | else: |
| 107 | raise Exception('Parse error in next binding state') |
| 108 | |
| 109 | def parse_res_fixed_address(raw_str): |
| 110 | return raw_str |
| 111 | |
| 112 | def parse_res_hardware(raw_str): |
| 113 | tokens = raw_str.split() |
| 114 | return tokens[1] |
| 115 | |
| 116 | def parse_reservation_file(res_file): |
| 117 | valid_keys = { |
| 118 | 'hardware' : parse_res_hardware, |
| 119 | 'fixed-address' : parse_res_fixed_address, |
| 120 | } |
| 121 | |
| 122 | res_db = {} |
| 123 | res_rec = {} |
| 124 | in_res = False |
| 125 | for line in res_file: |
| 126 | if line.lstrip().startswith('#'): |
| 127 | continue |
| 128 | tokens = line.split() |
| 129 | |
| 130 | if len(tokens) == 0: |
| 131 | continue |
| 132 | |
| 133 | key = tokens[0].lower() |
| 134 | |
| 135 | if key == 'host': |
| 136 | if not in_res: |
| 137 | res_rec = {'hostname' : tokens[1]} |
| 138 | in_res = True |
| 139 | |
| 140 | else: |
| 141 | raise Exception("Parse error in reservation file") |
| 142 | elif key == '}': |
| 143 | if in_res: |
| 144 | for k in valid_keys: |
| 145 | if callable(valid_keys[k]): |
| 146 | res_rec[k] = res_rec.get(k, '') |
| 147 | else: |
| 148 | res_rec[k] = False |
| 149 | |
| 150 | hostname = res_rec['hostname'] |
| 151 | |
| 152 | if hostname in res_db: |
| 153 | res_db[hostname].insert(0, res_rec) |
| 154 | |
| 155 | else: |
| 156 | res_db[hostname] = [res_rec] |
| 157 | |
| 158 | res_rec = {} |
| 159 | in_res = False |
| 160 | |
| 161 | else: |
| 162 | raise Exception('Parse error in reservation file') |
| 163 | |
| 164 | elif key in valid_keys: |
| 165 | if in_res: |
| 166 | value = line[(line.index(key) + len(key)):] |
| 167 | value = value.strip().rstrip(';').rstrip() |
| 168 | |
| 169 | if callable(valid_keys[key]): |
| 170 | res_rec[key] = valid_keys[key](value) |
| 171 | else: |
| 172 | res_rec[key] = True |
| 173 | |
| 174 | else: |
| 175 | raise Exception('Parse error in reservation file') |
| 176 | |
| 177 | else: |
| 178 | if in_res: |
| 179 | raise Exception('Parse error in reservation file') |
| 180 | |
| 181 | if in_res: |
| 182 | raise Exception('Parse error in reservation file') |
| 183 | |
| 184 | # Turn the leases into an array |
| 185 | results = [] |
| 186 | for res in res_db: |
| 187 | results.append({ |
| 188 | 'client-hostname' : res_db[res][0]['hostname'], |
| 189 | 'hardware' : res_db[res][0]['hardware'], |
| 190 | 'ip_address' : res_db[res][0]['fixed-address'], |
| 191 | }) |
| 192 | return results |
| 193 | |
| 194 | |
| 195 | def parse_leases_file(leases_file): |
| 196 | valid_keys = { |
| 197 | 'starts': parse_timestamp, |
| 198 | 'ends': parse_timestamp, |
| 199 | 'tstp': parse_timestamp, |
| 200 | 'tsfp': parse_timestamp, |
| 201 | 'atsfp': parse_timestamp, |
| 202 | 'cltt': parse_timestamp, |
| 203 | 'hardware': parse_hardware, |
| 204 | 'binding': parse_binding_state, |
| 205 | 'next': parse_next_binding_state, |
| 206 | 'rewind': parse_rewind_binding_state, |
| 207 | 'uid': strip_endquotes, |
| 208 | 'client-hostname': strip_endquotes, |
| 209 | 'option': identity, |
| 210 | 'set': identity, |
| 211 | 'on': identity, |
| 212 | 'abandoned': None, |
| 213 | 'bootp': None, |
| 214 | 'reserved': None, |
| 215 | } |
| 216 | |
| 217 | leases_db = {} |
| 218 | |
| 219 | lease_rec = {} |
| 220 | in_lease = False |
| 221 | in_failover = False |
| 222 | |
| 223 | for line in leases_file: |
| 224 | if line.lstrip().startswith('#'): |
| 225 | continue |
| 226 | |
| 227 | tokens = line.split() |
| 228 | |
| 229 | if len(tokens) == 0: |
| 230 | continue |
| 231 | |
| 232 | key = tokens[0].lower() |
| 233 | |
| 234 | if key == 'lease': |
| 235 | if not in_lease: |
| 236 | ip_address = tokens[1] |
| 237 | |
| 238 | lease_rec = {'ip_address' : ip_address} |
| 239 | in_lease = True |
| 240 | |
| 241 | else: |
| 242 | raise Exception('Parse error in leases file') |
| 243 | |
| 244 | elif key == 'failover': |
| 245 | in_failover = True |
| 246 | elif key == '}': |
| 247 | if in_lease: |
| 248 | for k in valid_keys: |
| 249 | if callable(valid_keys[k]): |
| 250 | lease_rec[k] = lease_rec.get(k, '') |
| 251 | else: |
| 252 | lease_rec[k] = False |
| 253 | |
| 254 | ip_address = lease_rec['ip_address'] |
| 255 | |
| 256 | if ip_address in leases_db: |
| 257 | leases_db[ip_address].insert(0, lease_rec) |
| 258 | |
| 259 | else: |
| 260 | leases_db[ip_address] = [lease_rec] |
| 261 | |
| 262 | lease_rec = {} |
| 263 | in_lease = False |
| 264 | |
| 265 | elif in_failover: |
| 266 | in_failover = False |
| 267 | continue |
| 268 | else: |
| 269 | raise Exception('Parse error in leases file') |
| 270 | |
| 271 | elif key in valid_keys: |
| 272 | if in_lease: |
| 273 | value = line[(line.index(key) + len(key)):] |
| 274 | value = value.strip().rstrip(';').rstrip() |
| 275 | |
| 276 | if callable(valid_keys[key]): |
| 277 | lease_rec[key] = valid_keys[key](value) |
| 278 | else: |
| 279 | lease_rec[key] = True |
| 280 | |
| 281 | else: |
| 282 | raise Exception('Parse error in leases file') |
| 283 | |
| 284 | else: |
| 285 | if in_lease: |
| 286 | raise Exception('Parse error in leases file') |
| 287 | |
| 288 | if in_lease: |
| 289 | raise Exception('Parse error in leases file') |
| 290 | |
| 291 | return leases_db |
| 292 | |
| 293 | |
| 294 | def round_timedelta(tdelta): |
| 295 | return datetime.timedelta(tdelta.days, |
| 296 | tdelta.seconds + (0 if tdelta.microseconds < 500000 else 1)) |
| 297 | |
| 298 | |
| 299 | def timestamp_now(): |
| 300 | n = datetime.datetime.utcnow() |
| 301 | return datetime.datetime(n.year, n.month, n.day, n.hour, n.minute, |
| 302 | n.second)# + (0 if n.microsecond < 500000 else 1)) |
| 303 | |
| 304 | |
| 305 | def lease_is_active(lease_rec, as_of_ts): |
| 306 | return lease_rec['binding'] != 'free' and timestamp_is_between(as_of_ts, lease_rec['starts'], |
| 307 | lease_rec['ends']) |
| 308 | |
| 309 | |
| 310 | def ipv4_to_int(ipv4_addr): |
| 311 | parts = ipv4_addr.split('.') |
| 312 | return (int(parts[0]) << 24) + (int(parts[1]) << 16) + \ |
| 313 | (int(parts[2]) << 8) + int(parts[3]) |
| 314 | |
| 315 | def select_active_leases(leases_db, as_of_ts): |
| 316 | retarray = [] |
| 317 | sortedarray = [] |
| 318 | |
| 319 | for ip_address in leases_db: |
| 320 | lease_rec = leases_db[ip_address][0] |
| 321 | |
| 322 | if lease_is_active(lease_rec, as_of_ts): |
| 323 | ip_as_int = ipv4_to_int(ip_address) |
| 324 | insertpos = bisect.bisect(sortedarray, ip_as_int) |
| 325 | sortedarray.insert(insertpos, ip_as_int) |
| 326 | retarray.insert(insertpos, lease_rec) |
| 327 | |
| 328 | return retarray |
| 329 | |
| 330 | def matched(list, target): |
| 331 | if list == None: |
| 332 | return False |
| 333 | |
| 334 | for r in list: |
| 335 | if re.match(r, target) != None: |
| 336 | return True |
| 337 | return False |
| 338 | |
| 339 | def convert_to_seconds(time_val): |
| 340 | num = int(time_val[:-1]) |
| 341 | if time_val.endswith('s'): |
| 342 | return num |
| 343 | elif time_val.endswith('m'): |
| 344 | return num * 60 |
| 345 | elif time_val.endswith('h'): |
| 346 | return num * 60 * 60 |
| 347 | elif time_val.endswith('d'): |
| 348 | return num * 60 * 60 * 24 |
| 349 | |
| 350 | def ping(ip, timeout): |
| 351 | cmd = ['ping', '-c', '1', '-w', timeout, ip] |
| 352 | try: |
| 353 | out = subprocess.check_output(cmd) |
| 354 | return True |
| 355 | except subprocess.CalledProcessError as e: |
| 356 | return False |
| 357 | |
| 358 | def ping_worker(list, to, respQ): |
| 359 | for lease in list: |
| 360 | respQ.put( |
| 361 | { |
| 362 | 'verified': ping(lease['ip_address'], to), |
| 363 | 'lease' : lease, |
| 364 | }) |
| 365 | |
| 366 | def interruptable_get(q): |
| 367 | r = None |
| 368 | while True: |
| 369 | try: |
| 370 | return q.get(timeout=1000) |
| 371 | except Queue.Empty: |
| 372 | pass |
| 373 | |
| 374 | ############################################################################## |
| 375 | |
| 376 | def harvest(options): |
| 377 | |
| 378 | ifilter = None |
| 379 | if options.include != None: |
| 380 | ifilter = options.include.translate(None, ' ').split(',') |
| 381 | |
| 382 | rfilter = None |
| 383 | if options.filter != None: |
| 384 | rfilter = options.filter.split(',') |
| 385 | |
| 386 | myfile = open(options.leases, 'r') |
| 387 | leases = parse_leases_file(myfile) |
| 388 | myfile.close() |
| 389 | |
| 390 | reservations = [] |
| 391 | try: |
| 392 | with open(options.reservations, 'r') as res_file: |
| 393 | reservations = parse_reservation_file(res_file) |
| 394 | res_file.close() |
| 395 | except (IOError) as e: |
| 396 | pass |
| 397 | |
| 398 | now = timestamp_now() |
| 399 | report_dataset = select_active_leases(leases, now) + reservations |
| 400 | |
| 401 | verified = [] |
| 402 | if options.verify: |
| 403 | |
| 404 | # To verify is lease information is valid, i.e. that the host which got the lease still responding |
| 405 | # we ping the host. Not perfect, but good for the main use case. As the lease file can get long |
| 406 | # a little concurrency is used. The lease list is divided amoung workers and each worker takes |
| 407 | # a share. |
| 408 | respQ = Queue() |
| 409 | to = str(convert_to_seconds(options.timeout)) |
| 410 | share = int(len(report_dataset) / options.worker_count) |
| 411 | extra = len(report_dataset) % options.worker_count |
| 412 | start = 0 |
| 413 | for idx in range(0, options.worker_count): |
| 414 | end = start + share |
| 415 | if extra > 0: |
| 416 | end = end + 1 |
| 417 | extra = extra - 1 |
| 418 | worker = threading.Thread(target=ping_worker, args=(report_dataset[start:end], to, respQ)) |
| 419 | worker.daemon = True |
| 420 | worker.start() |
| 421 | start = end |
| 422 | |
| 423 | # All the verification work has been farmed out to worker threads, so sit back and wait for reponses. |
| 424 | # Once all responses are received we are done. Probably should put a time out here as well, but for |
| 425 | # now we expect a response for every lease, either positive or negative |
| 426 | count = 0 |
| 427 | while count != len(report_dataset): |
| 428 | resp = interruptable_get(respQ) |
| 429 | count = count + 1 |
| 430 | if resp['verified']: |
| 431 | print("INFO: verified host '%s' with address '%s'" % (resp['lease']['client-hostname'], resp['lease']['ip_address'])) |
| 432 | verified.append(resp['lease']) |
| 433 | else: |
| 434 | print("INFO: dropping host '%s' with address '%s' (not verified)" % (resp['lease']['client-hostname'], resp['lease']['ip_address'])) |
| 435 | else: |
| 436 | verified = report_dataset |
| 437 | |
| 438 | # Look for duplicate names and add the compressed MAC as a suffix |
| 439 | names = {} |
| 440 | for lease in verified: |
| 441 | # If no client hostname use MAC |
| 442 | name = lease['client-hostname'] |
| 443 | if 'client-hostname' not in lease or len(name) == 0: |
| 444 | name = "UNK-" + lease['hardware'].translate(None, ':').upper() |
| 445 | |
| 446 | if name in names: |
| 447 | names[name] = '+' |
| 448 | else: |
| 449 | names[name] = '-' |
| 450 | |
| 451 | size = 0 |
| 452 | count = 0 |
| 453 | for lease in verified: |
| 454 | name = lease['client-hostname'] |
| 455 | if 'client-hostname' not in lease or len(name) == 0: |
| 456 | name = "UNK-" + lease['hardware'].translate(None, ':').upper() |
| 457 | |
| 458 | if (ifilter != None and name in ifilter) or matched(rfilter, name): |
| 459 | if names[name] == '+': |
| 460 | lease['client-hostname'] = name + '-' + lease['hardware'].translate(None, ':').upper() |
| 461 | size = max(size, len(lease['client-hostname'])) |
| 462 | count += 1 |
| 463 | |
| 464 | if options.dest == '-': |
| 465 | out=sys.stdout |
| 466 | else: |
| 467 | out=open(options.dest, 'w+') |
| 468 | |
| 469 | for lease in verified: |
| 470 | name = lease['client-hostname'] |
| 471 | if 'client-hostname' not in lease or len(name) == 0: |
| 472 | name = "UNK-" + lease['hardware'].translate(None, ':').upper() |
| 473 | |
| 474 | if ifilter != None and name in ifilter or matched(rfilter, name): |
David K. Bainbridge | e80e4f9 | 2016-06-12 17:23:30 -0700 | [diff] [blame] | 475 | out.write(format(name, '<'+str(size)) + ' IN A ' + lease['ip_address'] + ' ; ' + lease['hardware'] +'\n') |
David K. Bainbridge | b541504 | 2016-05-13 17:06:10 -0700 | [diff] [blame] | 476 | if options.dest != '-': |
| 477 | out.close() |
| 478 | return count |
| 479 | |
| 480 | def reload_zone(rndc, server, port, key, zone): |
| 481 | cmd = [rndc, '-s', server] |
| 482 | if key != None: |
| 483 | cmd.extend(['-c', key]) |
| 484 | cmd.extend(['-p', port, 'reload']) |
| 485 | if zone != None: |
| 486 | cmd.append(zone) |
| 487 | |
| 488 | try: |
| 489 | out = subprocess.check_output(cmd) |
| 490 | print("INFO: [%s UTC] updated DNS sever" % time.asctime(time.gmtime())) |
| 491 | except subprocess.CalledProcessError as e: |
| 492 | print("ERROR: failed to update DNS server, exit code %d" % e.returncode) |
| 493 | print(e.output) |
| 494 | |
| 495 | def handleRequestsUsing(requestQ): |
| 496 | return lambda *args: ApiHandler(requestQ, *args) |
| 497 | |
| 498 | class ApiHandler(BaseHTTPServer.BaseHTTPRequestHandler): |
| 499 | def __init__(s, requestQ, *args): |
| 500 | s.requestQ = requestQ |
| 501 | BaseHTTPServer.BaseHTTPRequestHandler.__init__(s, *args) |
| 502 | |
| 503 | def do_HEAD(s): |
| 504 | s.send_response(200) |
| 505 | s.send_header("Content-type", "application/json") |
| 506 | s.end_headers() |
| 507 | |
| 508 | def do_POST(s): |
| 509 | if s.path == '/harvest': |
| 510 | waitQ = Queue() |
| 511 | s.requestQ.put(waitQ) |
| 512 | resp = waitQ.get(block=True, timeout=None) |
| 513 | s.send_response(200) |
| 514 | s.send_header('Content-type', 'application/json') |
| 515 | s.end_headers() |
| 516 | |
| 517 | if resp == "QUIET": |
| 518 | s.wfile.write('{ "response" : "QUIET" }') |
| 519 | else: |
| 520 | s.wfile.write('{ "response" : "OK" }') |
| 521 | |
| 522 | else: |
| 523 | s.send_response(404) |
| 524 | |
| 525 | def do_GET(s): |
| 526 | """Respond to a GET request.""" |
| 527 | s.send_response(404) |
| 528 | |
| 529 | def do_api(hostname, port, requestQ): |
| 530 | server_class = BaseHTTPServer.HTTPServer |
| 531 | httpd = server_class((hostname, int(port)), handleRequestsUsing(requestQ)) |
| 532 | print("INFO: [%s UTC] Start API server on %s:%s" % (time.asctime(time.gmtime()), hostname, port)) |
| 533 | try: |
| 534 | httpd.serve_forever() |
| 535 | except KeyboardInterrupt: |
| 536 | pass |
| 537 | httpd.server_close() |
| 538 | print("INFO: [%s UTC] Stop API server on %s:%s" % (time.asctime(time.gmtime()), hostname, port)) |
| 539 | |
| 540 | def harvester(options, requestQ): |
| 541 | quiet = convert_to_seconds(options.quiet) |
| 542 | last = -1 |
| 543 | resp = "OK" |
| 544 | while True: |
| 545 | responseQ = requestQ.get(block=True, timeout=None) |
| 546 | if last == -1 or (time.time() - last) > quiet: |
| 547 | work_field(options) |
| 548 | last = time.time() |
| 549 | resp = "OK" |
| 550 | else: |
| 551 | resp = "QUIET" |
| 552 | |
| 553 | if responseQ != None: |
| 554 | responseQ.put(resp) |
| 555 | |
| 556 | def work_field(options): |
| 557 | start = datetime.datetime.now() |
| 558 | print("INFO: [%s UTC] starting to harvest hosts from DHCP" % (time.asctime(time.gmtime()))) |
| 559 | count = harvest(options) |
| 560 | end = datetime.datetime.now() |
| 561 | delta = end - start |
| 562 | print("INFO: [%s UTC] harvested %d hosts, taking %d seconds" % (time.asctime(time.gmtime()), count, delta.seconds)) |
| 563 | if options.update: |
| 564 | reload_zone(options.rndc, options.server, options.port, options.key, options.zone) |
| 565 | |
| 566 | def main(): |
| 567 | parser = OptionParser() |
| 568 | parser.add_option('-l', '--leases', dest='leases', default='/dhcp/dhcpd.leases', |
| 569 | help="specifies the DHCP lease file from which to harvest") |
| 570 | parser.add_option('-x', '--reservations', dest='reservations', default='/etc/dhcp/dhcpd.reservations', |
| 571 | help="specified the reservation file as ISC DHCP doesn't update the lease file for fixed addresses") |
| 572 | parser.add_option('-d', '--dest', dest='dest', default='/bind/dhcp_harvest.inc', |
| 573 | help="specifies the file to write the additional DNS information") |
| 574 | parser.add_option('-i', '--include', dest='include', default=None, |
| 575 | help="list of hostnames to include when harvesting DNS information") |
| 576 | parser.add_option('-f', '--filter', dest='filter', default=None, |
| 577 | help="list of regex expressions to use as an include filter") |
| 578 | parser.add_option('-r', '--repeat', dest='repeat', default=None, |
| 579 | help="continues to harvest DHCP information every specified interval") |
| 580 | parser.add_option('-c', '--command', dest='rndc', default='rndc', |
| 581 | help="shell command to execute to cause reload") |
| 582 | parser.add_option('-k', '--key', dest='key', default=None, |
| 583 | help="rndc key file to use to access DNS server") |
| 584 | parser.add_option('-s', '--server', dest='server', default='127.0.0.1', |
| 585 | help="server to reload after generating updated dns information") |
| 586 | parser.add_option('-p', '--port', dest='port', default='954', |
| 587 | help="port on server to contact to reload server") |
| 588 | parser.add_option('-z', '--zone', dest='zone', default=None, |
| 589 | help="zone to reload after generating updated dns information") |
| 590 | parser.add_option('-u', '--update', dest='update', default=False, action='store_true', |
| 591 | help="update the DNS server, by reloading the zone") |
| 592 | parser.add_option('-y', '--verify', dest='verify', default=False, action='store_true', |
| 593 | help="verify the hosts with a ping before pushing them to DNS") |
| 594 | parser.add_option('-t', '--timeout', dest='timeout', default='1s', |
| 595 | help="specifies the duration to wait for a verification ping from a host") |
| 596 | parser.add_option('-a', '--apiserver', dest='apiserver', default='0.0.0.0', |
| 597 | help="specifies the interfaces on which to listen for API requests") |
| 598 | parser.add_option('-e', '--apiport', dest='apiport', default='8954', |
| 599 | help="specifies the port on which to listen for API requests") |
| 600 | parser.add_option('-q', '--quiet', dest='quiet', default='1m', |
| 601 | help="specifieds a minimum quiet period between actually harvest times.") |
| 602 | parser.add_option('-w', '--workers', dest='worker_count', type='int', default=5, |
| 603 | help="specifies the number of workers to use when verifying IP addresses") |
| 604 | |
| 605 | (options, args) = parser.parse_args() |
| 606 | |
| 607 | # Kick off a thread to listen for HTTP requests to force a re-evaluation |
| 608 | requestQ = Queue() |
| 609 | api = threading.Thread(target=do_api, args=(options.apiserver, options.apiport, requestQ)) |
| 610 | api.daemon = True |
| 611 | api.start() |
| 612 | |
| 613 | if options.repeat == None: |
| 614 | work_field(options) |
| 615 | else: |
| 616 | secs = convert_to_seconds(options.repeat) |
| 617 | farmer = threading.Thread(target=harvester, args=(options, requestQ)) |
| 618 | farmer.daemon = True |
| 619 | farmer.start() |
| 620 | while True: |
| 621 | cropQ = Queue() |
| 622 | requestQ.put(cropQ) |
| 623 | interruptable_get(cropQ) |
| 624 | time.sleep(secs) |
| 625 | |
| 626 | if __name__ == "__main__": |
| 627 | main() |