Matteo Scandolo | 37efb3d | 2017-08-09 16:36:09 -0700 | [diff] [blame] | 1 | #!/usr/bin/env python |
Matteo Scandolo | 60b640f | 2017-08-08 13:05:22 -0700 | [diff] [blame] | 2 | |
| 3 | # Copyright 2017-present Open Networking Foundation |
| 4 | # |
| 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | # you may not use this file except in compliance with the License. |
| 7 | # You may obtain a copy of the License at |
| 8 | # |
| 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | # |
| 11 | # Unless required by applicable law or agreed to in writing, software |
| 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | # See the License for the specific language governing permissions and |
| 15 | # limitations under the License. |
| 16 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 17 | # imagebuilder.py |
| 18 | # rebuilds/fetches docker container images per their git status in repo |
| 19 | # in addition to docker, needs `sudo apt-get install python-git` |
| 20 | |
| 21 | import argparse |
| 22 | import datetime |
| 23 | import git |
| 24 | import json |
| 25 | import logging |
| 26 | import os |
| 27 | import re |
| 28 | import string |
| 29 | import sys |
| 30 | import tarfile |
| 31 | import tempfile |
| 32 | import time |
| 33 | import xml.etree.ElementTree as ET |
| 34 | import yaml |
| 35 | |
| 36 | global args |
| 37 | global conf |
| 38 | global build_tag |
| 39 | global buildable_images |
| 40 | global pull_only_images |
| 41 | |
| 42 | |
| 43 | def setup_logging(name=None, logfile=False): |
| 44 | global args |
| 45 | |
| 46 | if name: |
| 47 | log = logging.getLogger("-".join([__name__, name])) |
| 48 | else: |
| 49 | log = logging.getLogger(__name__) |
| 50 | |
| 51 | slh = logging.StreamHandler(sys.stdout) |
| 52 | slh.setFormatter(logging.Formatter(logging.BASIC_FORMAT)) |
| 53 | slh.setLevel(logging.DEBUG) |
| 54 | |
| 55 | log.addHandler(slh) |
| 56 | |
| 57 | # secondary logging to a file, always DEBUG level |
| 58 | if logfile: |
| 59 | fn = os.path.join(conf.logdir, "%s.log" % name) |
| 60 | flh = logging.FileHandler(fn) |
| 61 | flh.setFormatter(logging.Formatter(logging.BASIC_FORMAT)) |
| 62 | flh.setLevel(logging.DEBUG) |
| 63 | log.addHandler(flh) |
| 64 | |
| 65 | return log |
| 66 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 67 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 68 | LOG = setup_logging() |
| 69 | |
| 70 | |
| 71 | def parse_args(): |
| 72 | global args |
| 73 | |
| 74 | parser = argparse.ArgumentParser() |
| 75 | |
| 76 | parser.add_argument('-c', '--container_list', default='docker_images.yml', |
| 77 | type=argparse.FileType('r'), |
| 78 | help="YAML Config and master container list") |
| 79 | |
| 80 | # -f is optional, so using type=argparse.FileType is problematic |
| 81 | parser.add_argument('-f', '--filter_images', default=None, action='store', |
| 82 | help="YAML file restricting images to build/fetch") |
| 83 | |
| 84 | parser.add_argument('-a', '--actions_taken', default=None, |
| 85 | help="Save a YAML file with actions taken during run") |
| 86 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 87 | group = parser.add_mutually_exclusive_group() |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 88 | group.add_argument('-b', '--build', action="store_true", default=False, |
| 89 | help="Build (don't fetch) all internal images, nocache") |
| 90 | group.add_argument('-p', '--pull', action="store_true", default=False, |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 91 | help="Only pull containers, fail if build required") |
| 92 | |
| 93 | parser.add_argument('-d', '--dry_run', action="store_true", |
| 94 | help="Don't build/fetch anything") |
| 95 | |
| 96 | parser.add_argument('-g', '--graph', default=None, |
| 97 | help="Filename for DOT graph file of image dependency") |
| 98 | |
| 99 | parser.add_argument('-l', '--build_log_dir', action="store", |
| 100 | help="Log build output to this dir if set") |
| 101 | |
| 102 | parser.add_argument('-r', '--repo_root', default="..", action="store", |
| 103 | help="Repo root directory") |
| 104 | |
| 105 | parser.add_argument('-t', '--build_tag', default=None, action="store", |
| 106 | help="tag all images built/pulled using this tag") |
| 107 | |
| 108 | parser.add_argument('-v', '--verbosity', action='count', default=1, |
| 109 | help="Repeat to increase log level") |
| 110 | |
| 111 | args = parser.parse_args() |
| 112 | |
| 113 | if args.verbosity > 1: |
| 114 | LOG.setLevel(logging.DEBUG) |
| 115 | else: |
| 116 | LOG.setLevel(logging.INFO) |
| 117 | |
| 118 | |
| 119 | def load_config(): |
| 120 | global args |
| 121 | global conf |
| 122 | global buildable_images |
| 123 | global pull_only_images |
| 124 | global build_tag |
| 125 | |
| 126 | try: |
| 127 | cl_abs = os.path.abspath(args.container_list.name) |
| 128 | LOG.info("Master container list file: %s" % cl_abs) |
| 129 | |
| 130 | conf = yaml.safe_load(args.container_list) |
| 131 | except yaml.YAMLError: |
| 132 | LOG.exception("Problem loading container list file") |
| 133 | sys.exit(1) |
| 134 | |
| 135 | if args.build_tag: |
| 136 | build_tag = args.build_tag |
| 137 | else: |
| 138 | build_tag = conf['docker_build_tag'] |
| 139 | |
| 140 | if args.filter_images is None: |
| 141 | buildable_images = conf['buildable_images'] |
| 142 | pull_only_images = conf['pull_only_images'] |
| 143 | else: |
| 144 | fi_abs = os.path.abspath(args.filter_images) |
| 145 | |
| 146 | LOG.info("Filtering image list per 'docker_image_whitelist' in: %s" % |
| 147 | fi_abs) |
| 148 | try: |
| 149 | fi_fh = open(fi_abs, 'r') |
| 150 | filter_list = yaml.safe_load(fi_fh) |
| 151 | fi_fh.close() |
| 152 | |
| 153 | if 'docker_image_whitelist' not in filter_list: |
| 154 | LOG.error("No 'docker_image_whitelist' defined in: %s" % |
| 155 | fi_abs) |
| 156 | sys.exit(1) |
| 157 | |
Luca Prete | e916a17 | 2018-02-28 18:03:33 -0800 | [diff] [blame] | 158 | # fail if pull_only_images in docker_images.yml doesn't have tags |
| 159 | for i in conf['pull_only_images']: |
| 160 | (name, tag) = split_name(i) |
| 161 | if not tag: |
| 162 | LOG.error("Images in docker_images.yml must be tagged") |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 163 | sys.exit(1) |
| 164 | |
| 165 | buildable_images = [img for img in conf['buildable_images'] |
| 166 | if split_name(img['name'])[0] |
| 167 | in filter_list['docker_image_whitelist']] |
| 168 | |
| 169 | pull_only_images = [img for img in conf['pull_only_images'] |
| 170 | if split_name(img)[0] |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 171 | in map(lambda x: split_name(x)[0], |
| 172 | filter_list['docker_image_whitelist'])] |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 173 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 174 | pull_only_images = map(override_tags( |
| 175 | filter_list['docker_image_whitelist']), |
| 176 | pull_only_images) |
| 177 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 178 | except: |
| 179 | LOG.exception("Problem with filter list file") |
| 180 | sys.exit(1) |
| 181 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 182 | |
Luca Prete | e916a17 | 2018-02-28 18:03:33 -0800 | [diff] [blame] | 183 | def override_tags(image_list_with_tags): |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 184 | |
Luca Prete | e916a17 | 2018-02-28 18:03:33 -0800 | [diff] [blame] | 185 | untagged_whitelist = map(lambda x: split_name(x)[0], image_list_with_tags) |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 186 | |
Luca Prete | e916a17 | 2018-02-28 18:03:33 -0800 | [diff] [blame] | 187 | def inner(i): |
| 188 | img_name = split_name(i)[0] |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 189 | tag_override = split_name(image_list_with_tags[ |
| 190 | untagged_whitelist.index(img_name)])[1] |
Luca Prete | e916a17 | 2018-02-28 18:03:33 -0800 | [diff] [blame] | 191 | if tag_override: |
| 192 | return "%s:%s" % (img_name, tag_override) |
| 193 | return i |
| 194 | return inner |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 195 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 196 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 197 | def split_name(input_name): |
| 198 | """ split a docker image name in the 'name:tag' format into components """ |
| 199 | |
| 200 | name = input_name |
| 201 | tag = None |
| 202 | |
| 203 | # split name:tag if given in combined format |
| 204 | name_tag_split = string.split(input_name, ":") |
| 205 | |
| 206 | if len(name_tag_split) > 1: # has tag, return separated version |
| 207 | name = name_tag_split[0] |
| 208 | tag = name_tag_split[1] |
| 209 | |
| 210 | return (name, tag) |
| 211 | |
| 212 | |
| 213 | class RepoRepo(): |
| 214 | """ git repo managed by repo tool""" |
| 215 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 216 | def __init__(self, name, path, remote_url, remote_branch, short_branch): |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 217 | |
| 218 | self.name = name |
| 219 | self.path = path |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 220 | self.git_url = "%s%s" % (remote_url, name) |
| 221 | self.remote_branch = remote_branch |
| 222 | self.short_branch = short_branch |
| 223 | self.git_tags = [] |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 224 | |
| 225 | try: |
| 226 | self.git_repo_o = git.Repo(self.abspath()) |
| 227 | LOG.debug("Repo - %s, path: %s" % (name, path)) |
| 228 | |
| 229 | self.head_commit = self.git_repo_o.head.commit.hexsha |
| 230 | LOG.debug(" head commit: %s" % self.head_commit) |
| 231 | |
| 232 | commit_t = time.gmtime(self.git_repo_o.head.commit.committed_date) |
| 233 | self.head_commit_t = time.strftime("%Y-%m-%dT%H:%M:%SZ", commit_t) |
| 234 | LOG.debug(" commit date: %s" % self.head_commit_t) |
| 235 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 236 | for tag in self.git_repo_o.tags: |
| 237 | if tag.commit == self.git_repo_o.head.commit: |
| 238 | self.git_tags.append(str(tag)) |
| 239 | |
| 240 | if self.git_tags: |
| 241 | LOG.debug(" tags referring to this commit: %s" % |
| 242 | ", ".join(self.git_tags)) |
| 243 | else: |
| 244 | LOG.debug(" No git tags refer to this commit") |
| 245 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 246 | self.clean = not self.git_repo_o.is_dirty(untracked_files=True) |
| 247 | LOG.debug(" clean: %s" % self.clean) |
| 248 | |
| 249 | # list of untracked files (expensive operation) |
| 250 | self.untracked_files = self.git_repo_o.untracked_files |
| 251 | for u_file in self.untracked_files: |
| 252 | LOG.debug(" Untracked: %s" % u_file) |
| 253 | |
| 254 | except Exception: |
| 255 | LOG.exception("Error with git repo: %s" % name) |
| 256 | sys.exit(1) |
| 257 | |
| 258 | def abspath(self): |
| 259 | global args |
| 260 | return os.path.abspath(os.path.join(args.repo_root, self.path)) |
| 261 | |
| 262 | def path_clean(self, test_path, branch=""): |
| 263 | """ Is working tree on branch and no untracked files in path? """ |
| 264 | global conf |
| 265 | |
| 266 | if not branch: |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 267 | branch = self.remote_branch |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 268 | |
| 269 | LOG.debug(" Looking for changes in path: %s" % test_path) |
| 270 | |
| 271 | p_clean = True |
| 272 | |
| 273 | # diff between branch head and working tree (None) |
| 274 | branch_head = self.git_repo_o.commit(branch) |
| 275 | diff = branch_head.diff(None, paths=test_path) |
| 276 | |
| 277 | if diff: |
| 278 | p_clean = False |
| 279 | |
| 280 | for diff_obj in diff: |
| 281 | LOG.debug(" file not on branch: %s" % diff_obj) |
| 282 | |
| 283 | # remove . to compare paths using .startswith() |
| 284 | if test_path == ".": |
| 285 | test_path = "" |
| 286 | |
| 287 | for u_file in self.untracked_files: |
| 288 | if u_file.startswith(test_path): |
| 289 | LOG.debug(" untracked file in path: %s" % u_file) |
| 290 | p_clean = False |
| 291 | |
| 292 | return p_clean |
| 293 | |
| 294 | |
| 295 | class RepoManifest(): |
| 296 | """ parses manifest XML file used by repo tool""" |
| 297 | |
| 298 | def __init__(self): |
| 299 | global args |
| 300 | global conf |
| 301 | |
| 302 | self.manifest_xml = {} |
| 303 | self.repos = {} |
| 304 | self.branch = "" |
| 305 | |
| 306 | self.manifest_file = os.path.abspath( |
| 307 | os.path.join(args.repo_root, |
| 308 | ".repo/manifest.xml")) |
| 309 | |
| 310 | LOG.info("Loading manifest file: %s" % self.manifest_file) |
| 311 | |
| 312 | try: |
| 313 | tree = ET.parse(self.manifest_file) |
| 314 | self.manifest_xml = tree.getroot() |
| 315 | except Exception: |
| 316 | LOG.exception("Error loading repo manifest") |
| 317 | sys.exit(1) |
| 318 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 319 | # Find the branch names |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 320 | default = self.manifest_xml.find('default') |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 321 | |
| 322 | self.short_branch = default.attrib['revision'] |
| 323 | self.remote_branch = "%s/%s" % (default.attrib['remote'], |
| 324 | default.attrib['revision']) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 325 | |
| 326 | # Find the remote URL for these repos |
| 327 | remote = self.manifest_xml.find('remote') |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 328 | self.remote_url = remote.attrib['review'] |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 329 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 330 | LOG.info("Manifest is on remote branch '%s' with remote url '%s'" % |
| 331 | (self.remote_branch, self.remote_url)) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 332 | |
| 333 | project_repos = {} |
| 334 | |
| 335 | for project in self.manifest_xml.iter('project'): |
| 336 | repo_name = project.attrib['name'] |
| 337 | rel_path = project.attrib['path'] |
| 338 | abs_path = os.path.abspath(os.path.join(args.repo_root, |
| 339 | project.attrib['path'])) |
| 340 | |
| 341 | if os.path.isdir(abs_path): |
| 342 | project_repos[repo_name] = rel_path |
| 343 | else: |
| 344 | LOG.debug("Repo in manifest but not checked out: %s" % |
| 345 | repo_name) |
| 346 | |
| 347 | for repo_name, repo_path in project_repos.iteritems(): |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 348 | self.repos[repo_name] = RepoRepo(repo_name, repo_path, |
| 349 | self.remote_url, |
| 350 | self.remote_branch, |
| 351 | self.short_branch) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 352 | |
| 353 | def get_repo(self, repo_name): |
| 354 | return self.repos[repo_name] |
| 355 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 356 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 357 | # DockerImage Status Constants |
| 358 | |
| 359 | DI_UNKNOWN = 'unknown' # unknown status |
| 360 | DI_EXISTS = 'exists' # already exists in docker, has an image_id |
| 361 | |
| 362 | DI_BUILD = 'build' # needs to be built |
| 363 | DI_FETCH = 'fetch' # needs to be fetched (pulled) |
| 364 | DI_ERROR = 'error' # build or other fatal failure |
| 365 | |
| 366 | |
| 367 | class DockerImage(): |
| 368 | |
| 369 | def __init__(self, name, repo_name=None, repo_d=None, path=".", |
| 370 | context=".", dockerfile='Dockerfile', labels=None, |
| 371 | tags=None, image_id=None, components=None, status=DI_UNKNOWN): |
| 372 | |
| 373 | LOG.debug("New DockerImage object from name: %s" % name) |
| 374 | |
| 375 | # name to pull as, usually what is provided on creation. |
| 376 | # May be changed by create_tags |
| 377 | self.raw_name = name |
| 378 | |
| 379 | # Python's mutable defaults is a landmine |
| 380 | if labels is None: |
| 381 | self.labels = {} |
| 382 | else: |
| 383 | self.labels = labels |
| 384 | |
| 385 | self.repo_name = repo_name |
| 386 | self.repo_d = repo_d |
| 387 | self.path = path |
| 388 | self.context = context |
| 389 | self.dockerfile = dockerfile |
| 390 | self.tags = [] # tags are added to this later in __init__ |
| 391 | self.image_id = image_id |
| 392 | self.components = components |
| 393 | self.status = status |
| 394 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 395 | self.parent_names = [] # names of parents from _find_parent_names() |
| 396 | self.parents = [] # list of parent DockerImage object |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 397 | self.children = [] # list of child DockerImage objects |
| 398 | |
| 399 | # split name:tag if given in combined format |
| 400 | (image_name, image_tag) = split_name(name) |
| 401 | if image_tag: # has tag |
| 402 | self.name = image_name |
| 403 | self.tags.append(image_tag) |
| 404 | else: # no tag |
| 405 | self.name = image_name |
| 406 | |
| 407 | # Add the build tag if exists |
| 408 | if build_tag not in self.tags: |
| 409 | self.tags.append(build_tag) |
| 410 | |
| 411 | # split names from tag list |
| 412 | if tags is not None: |
| 413 | for tag in tags: |
| 414 | thistag = "" |
| 415 | (tag_name, tag_tag) = split_name(tag) |
| 416 | if tag_tag: # has name also, use just tag |
| 417 | thistag = tag_tag |
| 418 | else: # just a bare tag |
| 419 | thistag = tag_name |
| 420 | |
| 421 | if thistag not in self.tags: # don't duplicate tags |
| 422 | self.tags.append(thistag) |
| 423 | |
| 424 | # self.clean only applies to this container |
| 425 | self.clean = self._context_clean() |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 426 | self._find_parent_names() |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 427 | |
| 428 | def __str__(self): |
| 429 | return self.name |
| 430 | |
| 431 | def buildable(self): |
| 432 | """ Can this image be built from a Dockerfile? """ |
| 433 | if self.repo_name: # has a git repo to be built from |
| 434 | return True |
| 435 | return False |
| 436 | |
| 437 | def _context_clean(self): |
| 438 | """ Determine if this is repo and context is clean """ |
| 439 | |
| 440 | if self.buildable(): |
| 441 | |
| 442 | # check if on master branch |
| 443 | repo_clean = self.repo_d.clean |
| 444 | |
| 445 | # only check the Docker context for cleanliness |
| 446 | context_path = os.path.normpath( |
| 447 | os.path.join(self.path, self.context)) |
| 448 | context_clean = self.repo_d.path_clean(context_path) |
| 449 | |
| 450 | # check of subcomponents are clean |
| 451 | components_clean = self.components_clean() |
| 452 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 453 | LOG.debug(" Build Context Cleanliness - " |
| 454 | "repo: %s, context: %s, components: %s" % |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 455 | (repo_clean, context_clean, components_clean)) |
| 456 | |
| 457 | if context_clean and repo_clean and components_clean: |
| 458 | return True |
| 459 | else: |
| 460 | return False |
| 461 | |
| 462 | return True # unbuildable images are clean |
| 463 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 464 | def parents_clean(self): |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 465 | """ Returns true if self and all parents are clean """ |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 466 | |
| 467 | if self.buildable(): |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 468 | if not self.clean: |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 469 | return False |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 470 | else: |
| 471 | for parent in self.parents: |
| 472 | if not parent.parents_clean(): |
| 473 | return False |
| 474 | else: |
| 475 | return True |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 476 | |
| 477 | return True # unbuildable images are clean |
| 478 | |
| 479 | def compare_labels(self, other_labels): |
| 480 | """ Returns True if image label-schema.org labels match dict """ |
| 481 | |
| 482 | comparable_labels_re = [ |
| 483 | r".*name$", |
| 484 | r".*vcs-url$", |
| 485 | r".*vcs-ref$", |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 486 | r".*version$", |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 487 | ] |
| 488 | |
| 489 | for clr in comparable_labels_re: # loop on all comparable labels |
| 490 | for label in self.labels: # loop on all labels |
| 491 | if re.match(clr, label) is not None: # if label matches re |
| 492 | # and label exists in other, and values are same |
| 493 | if label in other_labels and \ |
| 494 | self.labels[label] == other_labels[label]: |
| 495 | pass # continue through loop |
| 496 | else: |
| 497 | LOG.info("Non-matching label: %s" % label) |
| 498 | return False # False when first difference found |
| 499 | |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 500 | LOG.debug(" All labels matched") |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 501 | return True # only when every label matches |
| 502 | |
| 503 | def same_name(self, other_name): |
| 504 | """ compare image name (possibly with tag) against image name/tag """ |
| 505 | |
| 506 | (o_name, o_tag) = split_name(other_name) |
| 507 | |
| 508 | if o_tag is None and self.name == o_name: |
| 509 | return True |
| 510 | elif self.name == o_name and o_tag in self.tags: |
| 511 | return True |
| 512 | |
| 513 | return False |
| 514 | |
| 515 | def components_clean(self): |
| 516 | |
| 517 | if self.buildable() and self.components is not None: |
| 518 | for component in self.components: |
| 519 | if not component['repo_d'].clean or \ |
| 520 | not component['repo_d'].path_clean(component['path']): |
| 521 | return False |
| 522 | |
| 523 | return True |
| 524 | |
| 525 | def component_labels(self): |
| 526 | """ returns a dict of labels for subcomponents """ |
| 527 | |
| 528 | if self.buildable() and self.components is not None: |
| 529 | |
| 530 | comp_l = {} |
| 531 | |
| 532 | for component in self.components: |
| 533 | |
| 534 | LOG.debug(" component %s generating child labels" % |
| 535 | component['repo_name']) |
| 536 | |
| 537 | prefix = "org.opencord.component.%s." % component['repo_name'] |
| 538 | |
| 539 | comp_l[prefix + "vcs-url"] = component['repo_d'].git_url |
| 540 | |
| 541 | if component['repo_d'].clean and \ |
| 542 | component['repo_d'].path_clean(component['path']): |
| 543 | clean = True |
| 544 | else: |
| 545 | clean = False |
| 546 | |
| 547 | if clean: |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 548 | comp_l[prefix + "version"] = "%s-%s" % \ |
| 549 | (self.repo_d.short_branch, self.repo_d.head_commit) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 550 | comp_l[prefix + "vcs-ref"] = \ |
| 551 | component['repo_d'].head_commit |
| 552 | else: |
| 553 | comp_l[prefix + "version"] = "dirty" |
| 554 | comp_l[prefix + "vcs-ref"] = "" |
| 555 | |
| 556 | return comp_l |
| 557 | |
| 558 | return None |
| 559 | |
| 560 | def child_labels(self, repo_list=None): |
| 561 | """ return a dict of labels to apply to child images """ |
| 562 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 563 | LOG.debug(" Generating child labels from parent: %s" % self.name) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 564 | |
| 565 | # only create labels when they haven't already been created |
| 566 | if repo_list is None: |
| 567 | repo_list = [] |
| 568 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 569 | LOG.debug(" Already labeled with: %s" % ", ".join(repo_list)) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 570 | |
| 571 | cl = {} |
| 572 | |
| 573 | if self.buildable() and self.repo_name not in repo_list: |
| 574 | |
| 575 | LOG.debug(" Adding parent labels from repo: %s" % self.repo_name) |
| 576 | |
| 577 | prefix = "org.opencord.component.%s." % self.repo_name |
| 578 | |
| 579 | cl[prefix + "vcs-url"] = self.repo_d.git_url |
| 580 | |
| 581 | if self.clean: |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 582 | cl[prefix + "version"] = "%s-%s" % (self.repo_d.short_branch, |
| 583 | self.repo_d.head_commit) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 584 | cl[prefix + "vcs-ref"] = self.repo_d.head_commit |
| 585 | else: |
| 586 | cl[prefix + "version"] = "dirty" |
| 587 | cl[prefix + "vcs-ref"] = "" |
| 588 | |
| 589 | repo_list.append(self.repo_name) |
| 590 | |
| 591 | # include component labels if present |
| 592 | if self.components is not None: |
| 593 | cl.update(self.component_labels()) |
| 594 | |
| 595 | # recursively find labels up the parent chain |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 596 | if self.parents is not None: |
| 597 | for parent in self.parents: |
| 598 | cl.update(parent.child_labels(repo_list)) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 599 | |
| 600 | return cl |
| 601 | |
| 602 | def create_labels(self): |
| 603 | """ Create label-schema.org labels for image """ |
| 604 | |
| 605 | if self.buildable(): |
| 606 | |
| 607 | LOG.debug("Creating labels for: %s" % self.name) |
| 608 | |
| 609 | self.labels['org.label-schema.name'] = self.name |
| 610 | self.labels['org.label-schema.schema-version'] = "1.0" |
| 611 | |
| 612 | # org.label-schema.build-date |
| 613 | time_now = datetime.datetime.utcnow() |
| 614 | build_date = time_now.strftime("%Y-%m-%dT%H:%M:%SZ") |
| 615 | self.labels['org.label-schema.build-date'] = build_date |
| 616 | |
| 617 | # git version related labels |
| 618 | self.labels['org.label-schema.vcs-url'] = self.repo_d.git_url |
| 619 | |
| 620 | if self.clean: |
| 621 | self.labels['org.label-schema.version'] = \ |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 622 | "%s-%s" % (self.repo_d.short_branch, |
| 623 | self.repo_d.head_commit) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 624 | self.labels['org.label-schema.vcs-ref'] = \ |
| 625 | self.repo_d.head_commit |
| 626 | self.labels['org.opencord.vcs-commit-date'] = \ |
| 627 | self.repo_d.head_commit_t |
| 628 | else: |
| 629 | self.labels['org.label-schema.version'] = "dirty" |
| 630 | self.labels['org.label-schema.vcs-ref'] = "" |
| 631 | |
| 632 | # include component labels if present |
| 633 | if self.components is not None: |
| 634 | self.labels.update(self.component_labels()) |
| 635 | |
| 636 | def create_tags(self): |
| 637 | """ Create docker tags as needed """ |
| 638 | |
| 639 | if self.buildable(): |
| 640 | LOG.debug("Creating tags for image: %s" % self.name) |
| 641 | |
| 642 | # if clean and parents clean, add tags for branch/commit |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 643 | if self.parents_clean(): |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 644 | |
| 645 | # add build tag |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 646 | if build_tag not in self.tags: |
| 647 | self.tags.append(build_tag) |
| 648 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 649 | # add branch tag |
| 650 | branch_tag = self.repo_d.short_branch |
| 651 | if branch_tag not in self.tags: |
| 652 | self.tags.append(branch_tag) |
| 653 | |
| 654 | # Add <branch>-<commit> tag, which is used to pull |
| 655 | commit_tag = "%s-%s" % (self.repo_d.short_branch, |
| 656 | self.repo_d.head_commit) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 657 | if commit_tag not in self.tags: |
| 658 | self.tags.append(commit_tag) |
| 659 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 660 | # this is most specific tag, so pull using it |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 661 | self.raw_name = "%s:%s" % (self.name, commit_tag) |
| 662 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 663 | # add all tags in git that point at the commit |
| 664 | for gt in self.repo_d.git_tags: |
| 665 | if gt not in self.tags: |
| 666 | self.tags.append(gt) |
| 667 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 668 | LOG.debug("All tags: %s" % ", ".join(self.tags)) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 669 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 670 | def _find_parent_names(self): |
| 671 | """ set self.parent_names using Dockerfile FROM lines """ |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 672 | |
| 673 | if self.buildable(): |
| 674 | # read contents of Dockerfile into df |
| 675 | with open(self.dockerfile_abspath()) as dfh: |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 676 | dfl = dfh.readlines() |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 677 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 678 | parent_names = [] |
| 679 | frompatt = re.compile(r'^FROM\s+([\w/_:.-]+)', re.MULTILINE) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 680 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 681 | for line in dfl: |
| 682 | fromline = re.search(frompatt, line) |
| 683 | if fromline: |
| 684 | parent_names.append(fromline.group(1)) |
| 685 | |
| 686 | self.parent_names = parent_names # may have tag |
| 687 | |
| 688 | LOG.debug(" Parents: %s" % ", ".join(self.parent_names)) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 689 | |
| 690 | def dockerfile_abspath(self): |
| 691 | """ returns absolute path to Dockerfile for this image """ |
| 692 | |
| 693 | if self.buildable(): |
| 694 | return os.path.join(self.repo_d.abspath(), |
| 695 | self.path, self.dockerfile) |
| 696 | else: |
| 697 | return None |
| 698 | |
| 699 | def dockerfile_rel_path(self): |
| 700 | """ returns the path relative to the context of the Dockerfile """ |
| 701 | |
| 702 | if self.buildable(): |
| 703 | if self.context is ".": |
| 704 | return self.dockerfile |
| 705 | else: |
| 706 | return os.path.normpath(os.path.join(self.path, |
| 707 | self.dockerfile)) |
| 708 | else: |
| 709 | return None |
| 710 | |
| 711 | def context_tarball(self): |
| 712 | """ returns a filehandle to a tarball (tempfile) for the image """ |
| 713 | |
| 714 | if self.buildable(): |
| 715 | |
| 716 | context_path = os.path.normpath( |
| 717 | os.path.join(self.repo_d.abspath(), |
| 718 | self.path, self.context)) |
| 719 | |
| 720 | LOG.info("Creating context tarball of path: %s" % context_path) |
| 721 | |
| 722 | t_fh = tempfile.NamedTemporaryFile() |
| 723 | t = tarfile.open(mode='w', fileobj=t_fh, dereference=True) |
| 724 | |
Zack Williams | 5e039f0 | 2017-11-16 09:50:09 -0700 | [diff] [blame] | 725 | # exclude git directories anywhere in the context |
| 726 | exclusion_list = ['**/.git'] |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 727 | |
Matteo Scandolo | 7fadd59 | 2017-10-02 10:18:03 -0700 | [diff] [blame] | 728 | docker_ignore = os.path.join(context_path, '.dockerignore') |
| 729 | if os.path.exists(docker_ignore): |
| 730 | for line in open(docker_ignore).readlines(): |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 731 | # slightly out of spec, we allow whitespace before comments |
| 732 | # https://docs.docker.com/engine/reference/builder/#dockerignore-file |
Matteo Scandolo | 7fadd59 | 2017-10-02 10:18:03 -0700 | [diff] [blame] | 733 | if line.strip()[0] is not '#': |
| 734 | exclusion_list.append(line.strip().rstrip('\/')) |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 735 | |
| 736 | LOG.debug("Exclusion list: %s" % exclusion_list) |
Matteo Scandolo | 7fadd59 | 2017-10-02 10:18:03 -0700 | [diff] [blame] | 737 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 738 | # see docker-py source for context |
| 739 | for path in sorted( |
| 740 | DockerUtils.exclude_paths(context_path, exclusion_list)): |
| 741 | t.add(os.path.join(context_path, path), |
| 742 | arcname=path, |
| 743 | recursive=False) |
| 744 | |
| 745 | # add sub-components to tarball if required |
| 746 | if self.components is not None: |
| 747 | for component in self.components: |
| 748 | c_ctx_p = os.path.normpath( |
| 749 | os.path.join(component['repo_d'].abspath(), |
| 750 | component['path'])) |
| 751 | |
| 752 | LOG.info("Adding component %s at context %s" % |
| 753 | (component['repo_name'], c_ctx_p)) |
| 754 | |
| 755 | # walk component source path |
| 756 | for path in sorted( |
| 757 | DockerUtils.exclude_paths(c_ctx_p, exclusion_list)): |
| 758 | |
| 759 | # path to where to put files in the archive |
| 760 | cf_dest = os.path.normpath( |
| 761 | os.path.join(component['dest'], path)) |
| 762 | |
| 763 | t.add(os.path.join(c_ctx_p, path), |
| 764 | arcname=cf_dest, |
| 765 | recursive=False) |
| 766 | |
| 767 | # t.list() # prints all files in tarball |
| 768 | t.close() |
| 769 | t_fh.seek(0) |
| 770 | return t_fh |
| 771 | |
| 772 | else: |
| 773 | return None |
| 774 | |
| 775 | def buildargs(self): |
| 776 | """ returns array of labels in docker buildargs compliant format """ |
| 777 | ba_a = {} |
| 778 | |
| 779 | for label_k in self.labels: |
| 780 | ba_re = re.compile(r'\W') # non alpha/num/_ chars |
| 781 | ba_label = ba_re.sub('_', label_k) |
| 782 | ba_a[ba_label] = self.labels[label_k] |
| 783 | |
| 784 | return ba_a |
| 785 | |
| 786 | |
| 787 | class DockerBuilder(): |
| 788 | |
| 789 | def __init__(self, repo_manifest): |
| 790 | |
| 791 | global buildable_images |
| 792 | global pull_only_images |
| 793 | |
| 794 | self.rm = repo_manifest |
| 795 | self.dc = None # Docker Client object |
| 796 | |
| 797 | self.images = [] |
| 798 | |
| 799 | # arrays of images, used for write_actions |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 800 | self.preexisting = [] |
| 801 | self.obsolete = [] |
| 802 | self.pulled = [] |
| 803 | self.failed_pull = [] |
| 804 | self.obsolete_pull = [] |
| 805 | self.built = [] |
| 806 | self.failed_build = [] |
| 807 | |
| 808 | # create dict of images, setting defaults |
| 809 | for image in buildable_images: |
| 810 | |
| 811 | repo_d = self.rm.get_repo(image['repo']) |
| 812 | |
| 813 | if "components" in image: |
| 814 | components = [] |
| 815 | |
| 816 | for component in image['components']: |
| 817 | comp = {} |
| 818 | comp['repo_name'] = component['repo'] |
| 819 | comp['repo_d'] = self.rm.get_repo(component['repo']) |
| 820 | comp['dest'] = component['dest'] |
| 821 | comp['path'] = component.get('path', '.') |
| 822 | components.append(comp) |
| 823 | else: |
| 824 | components = None |
| 825 | |
| 826 | # set the full name in case this is pulled |
| 827 | full_name = "%s:%s" % (image['name'], build_tag) |
| 828 | |
| 829 | img_o = DockerImage(full_name, image['repo'], repo_d, |
| 830 | image.get('path', '.'), |
| 831 | image.get('context', '.'), |
| 832 | image.get('dockerfile', 'Dockerfile'), |
| 833 | components=components) |
| 834 | |
| 835 | self.images.append(img_o) |
| 836 | |
| 837 | # add misc images |
| 838 | for misc_image in pull_only_images: |
| 839 | img_o = DockerImage(misc_image) |
| 840 | self.images.append(img_o) |
| 841 | |
| 842 | if not args.dry_run: |
| 843 | self._docker_connect() |
| 844 | |
| 845 | self.create_dependency() |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 846 | |
| 847 | if not args.build: # if forcing build, don't use preexisting |
| 848 | self.find_preexisting() |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 849 | |
| 850 | if args.graph is not None: |
| 851 | self.dependency_graph(args.graph) |
| 852 | |
| 853 | self.process_images() |
| 854 | |
| 855 | if args.actions_taken is not None: |
| 856 | self.write_actions_file(args.actions_taken) |
| 857 | |
| 858 | def _docker_connect(self): |
| 859 | """ Connect to docker daemon """ |
| 860 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 861 | try: |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 862 | # get a "high level" Docker object with conf from the environment |
| 863 | hl_dc = docker.from_env() |
| 864 | # use the low level APIClient (same as the 1.x API) |
| 865 | self.dc = hl_dc.api |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 866 | except requests.ConnectionError: |
| 867 | LOG.debug("Docker connection not available") |
| 868 | sys.exit(1) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 869 | |
| 870 | if self.dc.ping(): |
| 871 | LOG.debug("Docker server is responding") |
| 872 | else: |
| 873 | LOG.error("Unable to ping docker server") |
| 874 | sys.exit(1) |
| 875 | |
| 876 | def find_preexisting(self): |
| 877 | """ find images that already exist in Docker and mark """ |
| 878 | |
| 879 | if self.dc: |
| 880 | LOG.debug("Evaluating already built/fetched Docker images") |
| 881 | |
| 882 | # get list of images from docker |
| 883 | pe_images = self.dc.images() |
| 884 | |
| 885 | for pe_image in pe_images: |
| 886 | raw_tags = pe_image['RepoTags'] |
| 887 | |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 888 | if raw_tags: |
| 889 | LOG.info("Preexisting Image - ID: %s, tags: %s" % |
| 890 | (pe_image['Id'], ",".join(raw_tags))) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 891 | |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 892 | has_build_tag = False |
| 893 | for tag in raw_tags: |
| 894 | if build_tag in tag: |
| 895 | LOG.debug(" image has build_tag: %s" % build_tag) |
| 896 | has_build_tag = True |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 897 | |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 898 | base_name = raw_tags[0].split(":")[0] |
| 899 | image = self.find_image(base_name) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 900 | |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 901 | # only evaluate images in the list of desired images |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 902 | if image is not None: |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 903 | |
| 904 | good_labels = image.compare_labels(pe_image['Labels']) |
| 905 | |
| 906 | if good_labels: |
| 907 | if has_build_tag: |
| 908 | LOG.info(" Image %s has up-to-date labels and" |
| 909 | " build_tag" % pe_image['Id']) |
| 910 | else: |
| 911 | LOG.info(" Image %s has up-to-date labels but" |
| 912 | " missing build_tag. Tagging image" |
| 913 | " with build_tag: %s" % |
| 914 | (pe_image['Id'], build_tag)) |
| 915 | |
| 916 | self.dc.tag(pe_image['Id'], image.name, |
| 917 | tag=build_tag) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 918 | |
| 919 | self.preexisting.append({ |
| 920 | 'id': pe_image['Id'], |
| 921 | 'tags': raw_tags, |
Andy Bavier | afaa530 | 2017-08-15 08:56:15 -0700 | [diff] [blame] | 922 | 'base': image.name.split(":")[0], |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 923 | }) |
| 924 | |
| 925 | image.image_id = pe_image['Id'] |
| 926 | image.status = DI_EXISTS |
| 927 | |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 928 | else: # doesn't have good labels |
Zack Williams | cbeb2ec | 2018-01-16 10:39:13 -0700 | [diff] [blame] | 929 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 930 | # if it has a build_tag, and a good image hasn't |
| 931 | # already been tagged |
Zack Williams | cbeb2ec | 2018-01-16 10:39:13 -0700 | [diff] [blame] | 932 | if has_build_tag and (image.status != DI_EXISTS): |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 933 | LOG.info(" Image %s has obsolete labels and" |
| 934 | " build_tag, remove" % pe_image['Id']) |
| 935 | |
| 936 | # remove build_tag from image |
| 937 | name_bt = "%s:%s" % (base_name, build_tag) |
| 938 | self.dc.remove_image(name_bt, False, True) |
| 939 | |
| 940 | else: |
| 941 | LOG.info(" Image %s has obsolete labels, lacks" |
| 942 | " build_tag, ignore" % pe_image['Id']) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 943 | |
| 944 | self.obsolete.append({ |
| 945 | 'id': pe_image['Id'], |
| 946 | 'tags': raw_tags, |
| 947 | }) |
| 948 | |
| 949 | def find_image(self, image_name): |
| 950 | """ return image object matching name """ |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 951 | LOG.debug(" attempting to find image for: %s" % image_name) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 952 | |
| 953 | for image in self.images: |
| 954 | if image.same_name(image_name): |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 955 | LOG.debug(" found a match: %s" % image.raw_name) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 956 | return image |
| 957 | return None |
| 958 | |
| 959 | def create_dependency(self): |
| 960 | """ set parent/child links for images """ |
| 961 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 962 | # List of lists of parents images. Done in two steps for clarity |
| 963 | lol_of_parents = [img.parent_names for img in self.images |
| 964 | if img.parent_names is not []] |
| 965 | |
| 966 | # flat list of all parent image names, with dupes |
| 967 | parents_with_dupes = [parent for parent_sublist in lol_of_parents |
| 968 | for parent in parent_sublist] |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 969 | |
| 970 | # remove duplicates |
| 971 | parents = list(set(parents_with_dupes)) |
| 972 | |
| 973 | LOG.info("All parent images: %s" % ", ".join(parents)) |
| 974 | |
| 975 | # list of "external parents", ones not built internally |
| 976 | external_parents = [] |
| 977 | |
| 978 | for parent_name in parents: |
| 979 | LOG.debug("Evaluating parent image: %s" % parent_name) |
| 980 | internal_parent = False |
| 981 | |
| 982 | # match on p_name, without tag |
| 983 | (p_name, p_tag) = split_name(parent_name) |
| 984 | |
| 985 | for image in self.images: |
| 986 | if image.same_name(p_name): # internal image is a parent |
| 987 | internal_parent = True |
| 988 | LOG.debug(" Internal parent: %s" % image.name) |
| 989 | break |
| 990 | |
| 991 | if not internal_parent: # parent is external |
| 992 | LOG.debug(" External parent: %s" % parent_name) |
| 993 | external_parents.append(parent_name) |
| 994 | |
| 995 | # add unique external parents to image list |
| 996 | for e_p_name in set(external_parents): |
| 997 | LOG.debug(" Creating external parent image object: %s" % e_p_name) |
| 998 | img_o = DockerImage(e_p_name) |
| 999 | self.images.append(img_o) |
| 1000 | |
| 1001 | # now that all images (including parents) are in list, associate them |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1002 | for image in filter(lambda img: img.parent_names is not [], |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1003 | self.images): |
| 1004 | |
| 1005 | LOG.debug("Associating image: %s" % image.name) |
| 1006 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1007 | for parent_name in image.parent_names: |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1008 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1009 | parent = self.find_image(parent_name) |
| 1010 | image.parents.append(parent) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1011 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1012 | if parent is not None: |
| 1013 | LOG.debug(" internal image '%s' is parent of '%s'" % |
| 1014 | (parent.name, image.name)) |
| 1015 | parent.children.append(image) |
| 1016 | |
| 1017 | else: |
| 1018 | LOG.debug(" external image '%s' is parent of '%s'" % |
| 1019 | (image.parent_name, image.name)) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1020 | |
| 1021 | # loop again now that parents are linked to create labels |
| 1022 | for image in self.images: |
| 1023 | image.create_labels() |
| 1024 | image.create_tags() |
| 1025 | |
| 1026 | # if image has parent, get labels from parent(s) |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1027 | if image.parents is not None: |
| 1028 | for parent in image.parents: |
| 1029 | LOG.debug("Adding parent labels from %s to child %s" % |
| 1030 | (parent.name, image.name)) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1031 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1032 | # don't create component labels for same repo as image |
| 1033 | repo_list = [image.repo_name] |
| 1034 | image.labels.update(parent.child_labels(repo_list)) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1035 | |
| 1036 | def dependency_graph(self, graph_fn): |
| 1037 | """ save a DOT dependency graph to a file """ |
| 1038 | |
| 1039 | graph_fn_abs = os.path.abspath(graph_fn) |
| 1040 | |
| 1041 | LOG.info("Saving DOT dependency graph to: %s" % graph_fn_abs) |
| 1042 | |
| 1043 | try: |
| 1044 | import graphviz |
| 1045 | except ImportError: |
| 1046 | LOG.error('graphviz pip module not found') |
| 1047 | raise |
| 1048 | |
| 1049 | dg = graphviz.Digraph(comment='Image Dependency Graph', |
| 1050 | graph_attr={'rankdir': 'LR'}) |
| 1051 | |
| 1052 | component_nodes = [] |
| 1053 | |
| 1054 | # Use raw names, so they match with what's in Dockerfiles |
| 1055 | # delete colons as python graphviz module breaks with them |
| 1056 | for image in self.images: |
| 1057 | name_g = image.raw_name.replace(':', '\n') |
| 1058 | dg.node(name_g) |
| 1059 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1060 | if image.parents is not None: |
| 1061 | for parent in image.parents: |
| 1062 | name_p = parent.raw_name.replace(':', '\n') |
| 1063 | dg.edge(name_p, name_g) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1064 | |
| 1065 | if image.components is not None: |
| 1066 | for component in image.components: |
| 1067 | name_c = "component - %s" % component['repo_name'] |
| 1068 | if name_c not in component_nodes: |
| 1069 | dg.node(name_c) |
| 1070 | component_nodes.append(name_c) |
| 1071 | dg.edge(name_c, name_g, "", {'style': 'dashed'}) |
| 1072 | |
| 1073 | with open(graph_fn_abs, 'w') as g_fh: |
| 1074 | g_fh.write(dg.source) |
| 1075 | |
| 1076 | def write_actions_file(self, actions_fn): |
| 1077 | |
| 1078 | actions_fn_abs = os.path.abspath(actions_fn) |
| 1079 | |
| 1080 | LOG.info("Saving actions as YAML to: %s" % actions_fn_abs) |
| 1081 | |
| 1082 | actions = { |
| 1083 | "ib_pulled": self.pulled, |
| 1084 | "ib_built": self.built, |
| 1085 | "ib_preexisting_images": self.preexisting, |
| 1086 | "ib_obsolete_images": self.obsolete, |
| 1087 | "ib_failed_pull": self.failed_pull, |
| 1088 | "ib_obsolete_pull": self.obsolete_pull, |
| 1089 | "ib_failed_build": self.failed_build, |
| 1090 | } |
| 1091 | |
| 1092 | with open(actions_fn_abs, 'w') as a_fh: |
| 1093 | yaml.safe_dump(actions, a_fh) |
| 1094 | LOG.debug(yaml.safe_dump(actions)) |
| 1095 | |
| 1096 | def process_images(self): |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1097 | |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1098 | """ determine whether to build/fetch images """ |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1099 | # upstream images (have no parents), must be fetched |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1100 | must_fetch_a = filter(lambda img: not img.parents, self.images) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1101 | |
| 1102 | for image in must_fetch_a: |
| 1103 | if image.status is not DI_EXISTS: |
| 1104 | image.status = DI_FETCH |
| 1105 | |
| 1106 | # images that can be built or fetched (have parents) |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1107 | b_or_f_a = filter(lambda img: img.parents, self.images) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1108 | |
| 1109 | for image in b_or_f_a: |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1110 | if not image.parents_clean() or args.build: |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1111 | # must be built if not clean |
| 1112 | image.status = DI_BUILD |
| 1113 | elif image.status is not DI_EXISTS: |
| 1114 | # try to fetch if clean and doesn't exist |
| 1115 | image.status = DI_FETCH |
| 1116 | # otherwise, image is clean and exists (image.status == DI_EXISTS) |
| 1117 | |
| 1118 | c_and_e_a = filter(lambda img: img.status is DI_EXISTS, self.images) |
| 1119 | LOG.info("Preexisting and clean images: %s" % |
| 1120 | ", ".join(c.name for c in c_and_e_a)) |
| 1121 | |
| 1122 | upstream_a = filter(lambda img: (img.status is DI_FETCH and |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1123 | not img.parents), self.images) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1124 | LOG.info("Upstream images that must be fetched: %s" % |
| 1125 | ", ".join(u.raw_name for u in upstream_a)) |
| 1126 | |
| 1127 | fetch_a = filter(lambda img: (img.status is DI_FETCH and |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1128 | img.parents), self.images) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1129 | LOG.info("Clean, buildable images to attempt to fetch: %s" % |
| 1130 | ", ".join(f.raw_name for f in fetch_a)) |
| 1131 | |
| 1132 | build_a = filter(lambda img: img.status is DI_BUILD, self.images) |
| 1133 | LOG.info("Buildable images, due to unclean context or parents: %s" % |
| 1134 | ", ".join(b.raw_name for b in build_a)) |
| 1135 | |
| 1136 | # OK to fetch upstream in any case as they should reduce number of |
| 1137 | # layers pulled/built later |
| 1138 | |
| 1139 | for image in upstream_a: |
| 1140 | if not self._fetch_image(image): |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1141 | LOG.error("Unable to fetch upstream image: %s" % |
| 1142 | image.raw_name) |
| 1143 | sys.exit(1) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1144 | |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1145 | # fetch if not forcing the build of all images |
| 1146 | if not args.build: |
| 1147 | fetch_sort = sorted(fetch_a, key=(lambda img: len(img.children)), |
| 1148 | reverse=True) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1149 | |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1150 | for image in fetch_sort: |
| 1151 | if not self._fetch_image(image): |
| 1152 | # if didn't fetch, build |
| 1153 | image.status = DI_BUILD |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1154 | |
| 1155 | while True: |
| 1156 | buildable_images = self.get_buildable() |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1157 | |
| 1158 | if buildable_images and args.pull: |
| 1159 | LOG.error("Images must be built, but --pull is specified") |
| 1160 | exit(1) |
| 1161 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1162 | if buildable_images: |
| 1163 | for image in buildable_images: |
| 1164 | self._build_image(image) |
| 1165 | else: |
| 1166 | LOG.debug("No more images to build, ending build loop") |
| 1167 | break |
| 1168 | |
| 1169 | def get_buildable(self): |
| 1170 | """ Returns list of images that can be built""" |
| 1171 | |
| 1172 | buildable = [] |
| 1173 | |
| 1174 | for image in filter(lambda img: img.status is DI_BUILD, self.images): |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1175 | for parent in image.parents: |
| 1176 | if parent.status is DI_EXISTS: |
| 1177 | if image not in buildable: # build once if two parents |
| 1178 | buildable.append(image) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1179 | |
| 1180 | LOG.debug("Buildable images: %s" % |
| 1181 | ', '.join(image.name for image in buildable)) |
| 1182 | |
| 1183 | return buildable |
| 1184 | |
| 1185 | def tag_image(self, image): |
| 1186 | """ Applies tags to an image """ |
| 1187 | |
| 1188 | for tag in image.tags: |
| 1189 | |
| 1190 | LOG.info("Tagging id: '%s', repo: '%s', tag: '%s'" % |
| 1191 | (image.image_id, image.name, tag)) |
| 1192 | |
| 1193 | if self.dc is not None: |
| 1194 | self.dc.tag(image.image_id, image.name, tag=tag) |
| 1195 | |
| 1196 | def _fetch_image(self, image): |
| 1197 | |
| 1198 | LOG.info("Attempting to fetch docker image: %s" % image.raw_name) |
| 1199 | |
| 1200 | if self.dc is not None: |
| 1201 | try: |
| 1202 | for stat_json in self.dc.pull(image.raw_name, |
| 1203 | stream=True): |
| 1204 | |
| 1205 | # sometimes Docker's JSON is dirty, per: |
| 1206 | # https://github.com/docker/docker-py/pull/1081/ |
| 1207 | stat_s = stat_json.strip() |
| 1208 | stat_list = stat_s.split("\r\n") |
| 1209 | |
| 1210 | for s_j in stat_list: |
| 1211 | stat_d = json.loads(s_j) |
| 1212 | |
| 1213 | if 'stream' in stat_d: |
| 1214 | for stat_l in stat_d['stream'].split('\n'): |
| 1215 | LOG.debug(stat_l) |
| 1216 | |
| 1217 | if 'status' in stat_d: |
| 1218 | for stat_l in stat_d['status'].split('\n'): |
| 1219 | noisy = ["Extracting", "Downloading", |
| 1220 | "Waiting", "Download complete", |
| 1221 | "Pulling fs layer", "Pull complete", |
| 1222 | "Verifying Checksum", |
| 1223 | "Already exists"] |
| 1224 | if stat_l in noisy: |
| 1225 | LOG.debug(stat_l) |
| 1226 | else: |
| 1227 | LOG.info(stat_l) |
| 1228 | |
| 1229 | if 'error' in stat_d: |
| 1230 | LOG.error(stat_d['error']) |
| 1231 | sys.exit(1) |
| 1232 | |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1233 | except (DockerErrors.NotFound, DockerErrors.ImageNotFound) as e: |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 1234 | LOG.warning("Image could not be pulled: %s" % e) |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1235 | |
| 1236 | self.failed_pull.append({ |
| 1237 | "tags": [image.raw_name, ], |
| 1238 | }) |
| 1239 | |
| 1240 | if not image.parents: |
| 1241 | LOG.error("Pulled image required to build, not available!") |
| 1242 | sys.exit(1) |
| 1243 | |
| 1244 | return False |
| 1245 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1246 | except: |
| 1247 | LOG.exception("Error pulling docker image") |
| 1248 | |
| 1249 | self.failed_pull.append({ |
| 1250 | "tags": [image.raw_name, ], |
| 1251 | }) |
| 1252 | |
| 1253 | return False |
| 1254 | |
| 1255 | # obtain the image_id by inspecting the pulled image. Seems unusual |
| 1256 | # that the Docker API `pull` method doesn't provide it when the |
| 1257 | # `build` method does |
| 1258 | pulled_image = self.dc.inspect_image(image.raw_name) |
| 1259 | |
| 1260 | # check to make sure that image that was downloaded has the labels |
| 1261 | # that we expect it to have, otherwise return false, trigger build |
| 1262 | if not image.compare_labels( |
| 1263 | pulled_image['ContainerConfig']['Labels']): |
| 1264 | LOG.info("Tried fetching image %s, but labels didn't match" % |
| 1265 | image.raw_name) |
| 1266 | |
| 1267 | self.obsolete_pull.append({ |
| 1268 | "id": pulled_image['Id'], |
| 1269 | "tags": pulled_image['RepoTags'], |
| 1270 | }) |
| 1271 | return False |
| 1272 | |
| 1273 | image.image_id = pulled_image['Id'] |
| 1274 | LOG.info("Fetched image %s, id: %s" % |
| 1275 | (image.raw_name, image.image_id)) |
| 1276 | |
| 1277 | self.pulled.append({ |
| 1278 | "id": pulled_image['Id'], |
| 1279 | "tags": pulled_image['RepoTags'], |
Andy Bavier | 0941047 | 2017-08-15 14:29:35 -0700 | [diff] [blame] | 1280 | "base": image.name.split(":")[0], |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1281 | }) |
| 1282 | |
| 1283 | self.tag_image(image) |
| 1284 | image.status = DI_EXISTS |
| 1285 | return True |
| 1286 | |
| 1287 | def _build_image(self, image): |
| 1288 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 1289 | global build_tag |
| 1290 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1291 | LOG.info("Building docker image for %s" % image.raw_name) |
| 1292 | |
| 1293 | if self.dc is not None: |
| 1294 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 1295 | image_build_tag = "%s:%s" % (image.name, build_tag) |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1296 | |
| 1297 | buildargs = image.buildargs() |
| 1298 | context_tar = image.context_tarball() |
| 1299 | dockerfile = image.dockerfile_rel_path() |
| 1300 | |
| 1301 | for key, val in buildargs.iteritems(): |
| 1302 | LOG.debug("Buildarg - %s : %s" % (key, val)) |
| 1303 | |
| 1304 | bl_path = "" |
| 1305 | start_time = datetime.datetime.utcnow() |
| 1306 | |
| 1307 | if(args.build_log_dir): |
| 1308 | bl_name = "%s_%s" % (start_time.strftime("%Y%m%dT%H%M%SZ"), |
| 1309 | re.sub(r'\W', '_', image.name)) |
| 1310 | bl_path = os.path.abspath( |
| 1311 | os.path.join(args.build_log_dir, bl_name)) |
| 1312 | |
| 1313 | LOG.info("Build log: %s" % bl_path) |
| 1314 | bl_fh = open(bl_path, 'w+', 0) # 0 = unbuffered writes |
| 1315 | else: |
| 1316 | bl_fh = None |
| 1317 | |
| 1318 | try: |
| 1319 | LOG.info("Building image: %s" % image) |
| 1320 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 1321 | for stat_d in self.dc.build(tag=image_build_tag, |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1322 | buildargs=buildargs, |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1323 | nocache=args.build, |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1324 | custom_context=True, |
| 1325 | fileobj=context_tar, |
| 1326 | dockerfile=dockerfile, |
| 1327 | rm=True, |
| 1328 | forcerm=True, |
| 1329 | pull=False, |
| 1330 | stream=True, |
| 1331 | decode=True): |
| 1332 | |
| 1333 | if 'stream' in stat_d: |
| 1334 | |
| 1335 | if bl_fh: |
| 1336 | bl_fh.write(stat_d['stream'].encode('utf-8')) |
| 1337 | |
| 1338 | for stat_l in stat_d['stream'].split('\n'): |
| 1339 | if(stat_l): |
| 1340 | LOG.debug(stat_l) |
| 1341 | if stat_d['stream'].startswith("Successfully built "): |
| 1342 | siid = stat_d['stream'].split(' ')[2] |
| 1343 | short_image_id = siid.strip() |
| 1344 | LOG.debug("Short Image ID: %s" % short_image_id) |
| 1345 | |
| 1346 | if 'status' in stat_d: |
| 1347 | for stat_l in stat_d['status'].split('\n'): |
| 1348 | if(stat_l): |
| 1349 | LOG.info(stat_l) |
| 1350 | |
| 1351 | if 'error' in stat_d: |
| 1352 | LOG.error(stat_d['error']) |
| 1353 | image.status = DI_ERROR |
| 1354 | sys.exit(1) |
| 1355 | |
| 1356 | except: |
| 1357 | LOG.exception("Error building docker image") |
| 1358 | |
| 1359 | self.failed_build.append({ |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 1360 | "tags": [image_build_tag, ], |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1361 | }) |
| 1362 | |
| 1363 | return |
| 1364 | |
| 1365 | finally: |
| 1366 | if(bl_fh): |
| 1367 | bl_fh.close() |
| 1368 | |
| 1369 | # the image ID given by output isn't the full SHA256 id, so find |
| 1370 | # and set it to the full one |
| 1371 | built_image = self.dc.inspect_image(short_image_id) |
| 1372 | image.image_id = built_image['Id'] |
| 1373 | |
| 1374 | end_time = datetime.datetime.utcnow() |
| 1375 | duration = end_time - start_time # duration is a timedelta |
| 1376 | |
| 1377 | LOG.info("Built Image: %s, duration: %s, id: %s" % |
| 1378 | (image.name, duration, image.image_id)) |
| 1379 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 1380 | self.tag_image(image) |
| 1381 | |
| 1382 | # don't push the build_tag to dockerhub |
| 1383 | built_tags = list(image.tags) |
| 1384 | built_tags.remove(build_tag) |
| 1385 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1386 | self.built.append({ |
| 1387 | "id": image.image_id, |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 1388 | "tags": built_tags, |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1389 | "build_log": bl_path, |
| 1390 | "duration": duration.total_seconds(), |
Andy Bavier | 0941047 | 2017-08-15 14:29:35 -0700 | [diff] [blame] | 1391 | "base": image.name.split(":")[0], |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1392 | }) |
| 1393 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1394 | image.status = DI_EXISTS |
| 1395 | |
| 1396 | |
| 1397 | if __name__ == "__main__": |
| 1398 | parse_args() |
| 1399 | load_config() |
| 1400 | |
| 1401 | # only include docker module if not a dry run |
| 1402 | if not args.dry_run: |
| 1403 | try: |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1404 | import requests |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1405 | from distutils.version import LooseVersion |
| 1406 | from docker import __version__ as docker_version |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1407 | |
| 1408 | # handle the docker-py v1 to v2 API differences |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 1409 | if LooseVersion(docker_version) < LooseVersion('2.0.0'): |
Zack Williams | 602748e | 2017-12-14 23:08:50 -0700 | [diff] [blame] | 1410 | LOG.error("Unsupported python docker module - " |
| 1411 | "remove docker-py 1.x, install docker 2.x") |
| 1412 | sys.exit(1) |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1413 | |
Zack Williams | 9b7c80b | 2018-03-20 15:07:58 -0700 | [diff] [blame^] | 1414 | import docker |
Zack Williams | 053a807 | 2017-10-31 14:11:39 -0700 | [diff] [blame] | 1415 | from docker import utils as DockerUtils |
| 1416 | from docker import errors as DockerErrors |
| 1417 | |
Zack Williams | ce63eb0 | 2017-02-28 10:46:22 -0700 | [diff] [blame] | 1418 | except ImportError: |
| 1419 | LOG.error("Unable to load python docker module (dry run?)") |
| 1420 | sys.exit(1) |
| 1421 | |
| 1422 | rm = RepoManifest() |
| 1423 | db = DockerBuilder(rm) |