Static Jenkins Site Generator

- Private Jenkins job scraping w/API key
- Added Gilroy font to match main public website
- Link back to ONF website for products
- Add more products

Change-Id: I3ed2dc1e371c564ee483ab83fd110a88d818bca7
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..5f6566e
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,16 @@
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+
+*.pyc
+.tox
+.coverage
+venv_*
+*.egg-info
+
+# directories with ephemeral info
+products
+site
+jobs
+
+# credentials
+credentials.yaml
diff --git a/.gitreview b/.gitreview
new file mode 100644
index 0000000..6aaa5a8
--- /dev/null
+++ b/.gitreview
@@ -0,0 +1,5 @@
+[gerrit]
+host=gerrit.opencord.org
+port=29418
+project=sjsg.git
+defaultremote=origin
diff --git a/.reuse/dep5 b/.reuse/dep5
new file mode 100644
index 0000000..4ef0289
--- /dev/null
+++ b/.reuse/dep5
@@ -0,0 +1,13 @@
+Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+
+Files: VERSION static/images/onf_logo.png .gitreview
+Copyright: 2020 Open Networking Foundation
+License: Apache-2.0
+
+Files: static/scripts/list.min.js
+Copyright: 2011-2018 Jonny Strömberg, jonnystromberg.com
+License: MIT
+
+Files: static/fonts/*
+Copyright: 2010-2020 Fontspring, Order 57767578 made by Denise Barton
+License: LicenseRef-Fontspring-EULA-1.7.1
diff --git a/LICENSES/Apache-2.0.txt b/LICENSES/Apache-2.0.txt
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/LICENSES/Apache-2.0.txt
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/LICENSES/LicenseRef-Fontspring-EULA-1.7.1.txt b/LICENSES/LicenseRef-Fontspring-EULA-1.7.1.txt
new file mode 100644
index 0000000..9fe89b7
--- /dev/null
+++ b/LICENSES/LicenseRef-Fontspring-EULA-1.7.1.txt
@@ -0,0 +1,59 @@
+The Fontspring Web Font End User License Agreement
+Version 1.7.1 - February 24, 2020
+
+By downloading, installing and/or embedding font software (“Web Font”), designed by the foundry (“Foundry”) and offered by Fontspring, you (“Licensee”) agree to be bound by the following terms and conditions of this End User Licensing Agreement (“EULA”):
+
+1. Right Granted
+Fontspring grants Licensee a perpetual, worldwide, non-exclusive and non-transferable license to link the Web Font to Websites using the @font-face selector in CSS files.
+
+2. Requirements and Restrictions
+Licensee agrees to abide by the following requirements and restrictions:
+a. Licensee must use the Web Font provided by Fontspring under this EULA. Licensee may not link to the full, CFF OpenType or TrueType font intended for desktop installation.
+b. The total traffic of the Website(s), measured in pageviews per month, may be no greater than the number of pageviews specified in the Receipt.
+c. Licensee may only install the Web Font on Websites that it owns or controls.
+d. Licensee may embed Web Font in reports generated by the Website(s), provided that Licensee does not sell the reports for profit.
+
+3. Provision to Third Parties
+Licensee may temporarily provide the Web Font to a producer, publisher or other agent who is working on behalf of the Licensee.
+
+The third party designer, developer, agent, or independent contractor must:
+a. Agree in writing to use the Web Font exclusively for Licensee’s work, according to the terms of this EULA.
+b. Retain no copies of the Web Font upon completion of the work.
+c. Use and embed the Web Font only in Websites owned or controlled by Licensee.
+
+4. Term
+This EULA grants a perpetual license for the rights set forth in Paragraph 1 unless and until the EULA terminates under Paragraph 7. Fontspring will not charge additional fees post purchase, annually or otherwise.
+
+5. Modifications
+Licensee may not modify the Web Font or create derivative fonts based upon the Web Font without prior written consent from Fontspring or the Foundry EXCEPT THAT Licensee may generate files necessary for embedding or linking in accordance with this EULA.
+
+6. Copyright
+The Web Font is protected by copyright law. The Foundry is the sole, exclusive owner of all intellectual property rights, including rights under copyright and trademark law. Licensee agrees not to use the Web Font in any manner that infringes the intellectual property rights of the Foundry or violates the terms of this EULA. Licensee will be held legally responsible, and indemnifies Fontspring, for any infringements on the Foundry’s rights caused by failure to abide by the terms of this EULA.
+
+7. Termination
+This EULA is effective until terminated. If Licensee fails to comply with any term of this EULA, Fontspring may terminate the EULA with 30 days notice. This EULA will terminate automatically 30 days after the issuance of such notice.
+
+8. Refunds and Disclaimer
+Fontspring will, upon request by the Licensee, provide a refund for the Web Font if:
+a. The Web Font has not been used in any publicly available Website.
+b. No more than 30 days have passed since the date of purchase, specified on the Receipt.
+c. The Font has been uninstalled and deleted from all Licensee’s computers.
+
+The Web Font is provided “as is.” Fontspring makes no warranty of any kind, either expressed or implied, including, but not limited to the implied warranties of merchantability and fitness for a particular purpose.
+
+Fontspring shall not be liable for any direct, indirect, consequential, or incidental damages (including damages from loss of business profits, business interruption, loss of business information, and the like) arising out of the use of or inability to use the product even if Fontspring or the foundry has been advised of the possibility of such damages.
+
+Because some states do not allow the exclusion or limitation of liability for consequential or incidental damages, the above limitation may not apply to Licensee.
+
+9. Governing Law
+This EULA is governed by the laws of the United States of America and the State of Delaware.
+
+10. Entire Agreement
+This EULA, in conjunction with the Receipt that accompanies each Web Font licensed from Fontspring, constitutes the entire agreement between Fontspring and Licensee.
+
+11. Modification
+Fontspring and Licensee may modify or amend this EULA in writing.
+
+12. Waiver.
+The waiver of one breach or default hereunder shall not constitute the waiver of any subsequent breach or default.
+
diff --git a/LICENSES/MIT.txt b/LICENSES/MIT.txt
new file mode 100644
index 0000000..204b93d
--- /dev/null
+++ b/LICENSES/MIT.txt
@@ -0,0 +1,19 @@
+MIT License Copyright (c) <year> <copyright holders>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is furnished
+to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice (including the next
+paragraph) shall be included in all copies or substantial portions of the
+Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
+OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..7b42641
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,4 @@
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+include requirements.txt
+include VERSION
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..c2c756a
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,74 @@
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+
+SHELL = bash -e -o pipefail
+
+# Common set of args, set to --debug for more output
+COMMON_ARGS ?= --debug
+
+# tooling binaries
+VIRTUALENV 	?= virtualenv -p python3
+PYTHON      ?= python3
+# change to be 'python3 -m cProfile -s cumtime' to profile a script
+
+.DEFAULT_GOAL := help
+.PHONY: test lint yamllint license help build siterender buildcollector sync
+
+# Create the virtualenv with all the tools installed
+VENV_NAME = venv_sjsg
+
+$(VENV_NAME): requirements.txt
+	$(VIRTUALENV) $@ ;\
+  source ./$@/bin/activate ; set -u ;\
+  $(PYTHON) -m pip install -r requirements.txt yamllint
+	echo "To enter virtualenv, run 'source $@/bin/activate'"
+
+# build targets
+
+build: buildcollector siterender ## Collect build info and create website
+
+buildcollector: $(VENV_NAME) ## Collect build information from Jenkins jobs
+	source ./$</bin/activate ; set -u ;\
+	$(PYTHON) buildcollector.py scrape.yaml --credentials credentials.yaml $(COMMON_ARGS)
+
+siterender: $(VENV_NAME) ## Create static website from build information
+	source ./$</bin/activate ; set -u ;\
+	$(PYTHON) siterender.py $(COMMON_ARGS)
+	cp -r static/* site/
+
+serve:
+	cd site && $(PYTHON) -m http.server
+
+sync:
+	rsync --delete-after -avP site/ ubuntu@static.opennetworking.org:/srv/sites/certification.opennetworking.org/
+
+# testing targets
+test: yamllint tox ## run all tests
+
+# YAML files, excluding venv and cookiecutter directories
+YAML_FILES        ?= $(shell find . -type d \( -path "./venv_sjsc" \) -prune -o -type f \( -name '*.yaml' -o -name '*.yml' \) -print )
+
+yamllint: $(VENV_NAME) ## lint YAML format using yamllint
+	source ./$</bin/activate ; set -u ;\
+  yamllint --version ;\
+  yamllint \
+    -d "{extends: default, rules: {line-length: {max: 139}}}" \
+    -s $(YAML_FILES)
+
+tox: ## test code with tox
+	tox
+
+license: ## Check code for licensing with REUSE
+	reuse --root . lint
+
+clean: ## Delete all temporary files
+	rm -rf products/* site/*
+
+clean-all: clean ## Delete virtualenv and all cached job information
+	rm -rf $(VENV_NAME) jobs/* .tox
+
+help: ## Print help for each target
+	@echo nginx test targets
+	@echo
+	@grep '^[[:alnum:]_-]*:.* ##' $(MAKEFILE_LIST) \
+    | sort | awk 'BEGIN {FS=":.* ## "}; {printf "%-25s %s\n", $$1, $$2};'
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..0f74c72
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,148 @@
+..
+  SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+  SPDX-License-Identifier: Apache-2.0
+
+Static Jenkins Site Generator (SJSG)
+====================================
+
+To-Do
+-----
+
+scrape.yaml:
+
+- Add more metadata (links, specs, etc.)
+
+templates:
+
+- Organization of results by metadata (ex: list all products by a
+  Vendor, all products by Type, etc)
+
+static files:
+
+- Add images of products
+
+buildcollector.py:
+
+- Regex support in filters
+- Use correct functions to build file paths, not just string concat
+
+siterender.py:
+
+- ?
+
+Theory of Operation
+-------------------
+
+This tool has two parts:
+
+1. ``buildcollector.py``, which reads a configuration file containing metadata
+   and describing Jenkins jobs, then retrieves information about those jobs
+   from the Jenkins API and stores that metadata, extracted data, and job
+   artifacts in directory structure.
+
+2. ``siterender.py``, which reads the data from the directory structure and
+   builds static HTML site using Jinja2 templates.
+
+Running the scripts
+-------------------
+
+Make sure you have a working Python 3.6 or later instance, and ``virtualenv``
+installed.
+
+Run ``make site``. Scrape and render will both be run, and site will be
+generated in the ``site/`` subdirectory.
+
+Run ``make help`` for information on other Makefile targets.
+
+Changing the look and feel of the Site
+--------------------------------------
+
+Modify the templates in ``templates/``.  These are `Jinja2
+<https://jinja.palletsprojects.com/en/2.11.x/templates/>`_ format files.
+
+Static content is kept in ``static/``, and is copied into ``site/`` as a part
+of the site render.
+
+To view the site locally, run ``make serve``, then use a web browser to go to
+`http://localhost:8000 <http://localhost:8000>`_.
+
+Scrape Files
+------------
+
+For each Jenkins job you want to scrape, you need to create a Scrape File. The
+default scrape file is at ``scrape.yaml``.  All data is required unless
+speicifed as optional.
+
+This file is in YAML format, and contains information about the job(s) to
+scrape. You can put multiple YAML documents within one file, separated with the
+``---`` document start line.
+
+These keys are required in the Scrape File:
+
+- ``product_name`` (string): Human readable product name
+
+- ``onf_project`` (string): ONF Project name
+
+- ``jenkins_jobs`` (list): list of groups of jobs
+
+  - ``group`` (string): Name of group of jobs. Used mainly for maintaining
+    version separation
+
+  - ``jenkins_url`` (string): Bare URL to the Jenkins instance for this group -
+    ex: ``https://jenkins.opencord.org``
+
+  - ``credentials`` (string, optional): Path to a credentials YAML file with
+    Jenkins API Tokens, if trying to access private jobs on the server.  See
+    ``credentials.yaml.example`` for examples.
+
+  - ``jobs`` (list of dicts): List of jobs to be pulled for this
+
+    - ``name`` (string): Job name in Jenkins
+
+    - ``name_override`` (string, optional): Override for name shown in the
+      output, used to keep job names private.
+
+    - ``extract`` (dictionary): Name keys and JSONPath values, which are extracted
+      from the individual Job output JSON files and put in the output.
+
+    - ``filter`` (dictionary, optional): Set of name keys and literal values used to filter
+      which builds are included for this product. After the ``extract`` step is
+      run, the for each key in filter the extracted values are compared with the
+      literal value, and if they match, then the build is retained.
+
+The JSONpath library used is `jsonpath-ng
+<https://github.com/h2non/jsonpath-ng>`_, which seems to be the most regularly
+maintained Python implementation.
+
+Arbitrary variables can also be included in the Scrape File, and will be merged
+into to the output of every JSON file generated by the collector.  This can and
+should include "marketing friendly" information used to modify the Jinja2
+output - for example, links to product pages, names for static images, etc.
+
+The Scrape File also be used to set default values that are only replaced when
+extracted data isn't available - when a JSONPath query returns no results, it
+will contain an empty list, which is ignored in the merge.  If an extracted
+value is found, that value will replace the value given in the Scrape File.
+
+Design Consideration
+--------------------
+
+Metadata addition is needed to avoid adding that metadata to the Jenkins jobs.
+
+Filesystem storage was used because of arbitrary artifacts, and to reduce the
+number of API calls, especially when filtering the same list of builds with
+multiple products.  Raw job output is kept in ``jobs/`` by default.  Processed
+job output is kept in ``builds/`` on a per-product basis.
+
+Jenkins infrastructure is always changing:
+
+- Jobs are Added, Renamed, or Removed
+- Naming schemes may not match up with marketing names
+- Data should be retained even if the job is deleted
+- Fields will differ between products and projects
+
+Tests
+-----
+
+``make test`` will check YAML, static check python, and run tox tests.
+Currently there are no unit tests, but everything is in place to add them.
diff --git a/VERSION b/VERSION
new file mode 100644
index 0000000..c0ab82c
--- /dev/null
+++ b/VERSION
@@ -0,0 +1 @@
+0.0.1-dev
diff --git a/buildcollector.py b/buildcollector.py
new file mode 100644
index 0000000..7091197
--- /dev/null
+++ b/buildcollector.py
@@ -0,0 +1,443 @@
+#!/usr/bin/env python3
+
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+
+from __future__ import absolute_import
+
+import argparse
+import base64
+import json
+import logging
+import os
+import re
+import sys
+import urllib.request
+import yaml
+
+from jsonpath_ng.ext import parse as jpparse
+
+# create shared logger
+logging.basicConfig()
+logger = logging.getLogger("sjsgc")
+
+# global dict of jsonpath expressions -> compiled jsonpath parsers, as
+# reparsing expressions in each loop results in 100x longer execution time
+gjpaths = {}
+
+# credentials global
+
+
+def parse_collector_args():
+    """
+    parse CLI arguments
+    """
+
+    parser = argparse.ArgumentParser(description="Jenkins job results collector")
+
+    # Positional args
+    parser.add_argument(
+        "scrape_file",
+        default="scrape.yaml",
+        type=argparse.FileType("r"),
+        help="YAML file describing Jenkins job and data to scrape",
+    )
+
+    # Flags
+    parser.add_argument(
+        "--credentials",
+        type=argparse.FileType("r"),
+        help="Credentials to use for private jenkins jobs",
+    )
+
+    parser.add_argument(
+        "--local", action="store_true", help="Prefer local copies of build lists"
+    )
+
+    parser.add_argument(
+        "--product_dir", default="products", help="Directory to save per-product output"
+    )
+
+    parser.add_argument(
+        "--jobs_dir", default="jobs", help="Directory to save raw Jenkins job output"
+    )
+
+    parser.add_argument(
+        "--debug", action="store_true", help="Print additional debugging information"
+    )
+
+    return parser.parse_args()
+
+
+def jenkins_job_list_url(server_url, job_name):
+    """
+    create a Jenkins JSON API URL for a job (list of builds)
+    """
+
+    url = "%s/job/%s/api/json" % (server_url, job_name)
+    return url
+
+
+def jenkins_job_build_url(server_url, job_name, build_number):
+    """
+    create a Jenkins JSON API URL for a specific build of a job
+    """
+
+    url = "%s/job/%s/%d/api/json" % (server_url, job_name, build_number)
+    return url
+
+
+def basic_auth_header(username, password):
+    """
+    returns a tuple containing a http basic auth header
+    """
+    creds_str = "%s:%s" % (username, password)
+    creds_b64 = base64.standard_b64encode(creds_str.encode("utf-8"))
+
+    return ("Authorization", "Basic %s" % creds_b64.decode("utf-8"))
+
+
+def jenkins_api_get(url, headers=[]):
+    """
+    Get data from Jenkins JSON API endpoint, return data as a dict
+    """
+
+    request = urllib.request.Request(url)
+
+    # add headers tuples
+    for header in headers:
+        request.add_header(*header)
+
+    try:
+        response = urllib.request.urlopen(request)
+    except urllib.error.HTTPError:
+        logger.exception("Server encountered an HTTPError at URL: '%s'", url)
+    except urllib.error.URLError:
+        logger.exception("An URLError occurred at URL: '%s'", url)
+    else:
+        # docs: https://docs.python.org/3/library/json.html
+        jsondata = response.read()
+        logger.debug("API response: %s", jsondata)
+
+    try:
+        data = json.loads(jsondata)
+    except json.decoder.JSONDecodeError:
+        logger.exception("Unable to decode JSON")
+    else:
+        logger.debug("JSON decoded: %s", data)
+
+    return data
+
+
+def json_file_load(path):
+    """
+    Get data from local file, return data as a dict
+    """
+
+    with open(path) as jf:
+        try:
+            data = json.loads(jf.read())
+        except json.decoder.JSONDecodeError:
+            logger.exception("Unable to decode JSON from file: '%s'", path)
+
+    return data
+
+
+def json_file_dump(path, data):
+    """
+    Write JSON file out to a path, creating directories in path as needed
+    """
+
+    # create directory if it doesn't already exist
+    parent_dir = os.path.dirname(path)
+    os.makedirs(parent_dir, exist_ok=True)
+
+    # write file, pretty printed
+    with open(path, "w") as jf:
+        json.dump(data, jf, indent=2)
+
+
+def parse_scrape_file(scrape_file):
+    """
+    Load and check the YAML scrape file, returning a list one or more documents
+    """
+
+    yout = list(yaml.safe_load_all(scrape_file))  # safe_load_all returns a generator
+    logger.debug("YAML decoded: %s", yout)
+
+    def check_required_keys(to_check, req_keys):
+        """
+        check that all required keys are found in the dict to check
+        """
+        for rk in req_keys:
+            if rk not in to_check:
+                logger.error("Required key '%s' not found in: '%s'", rk, to_check)
+                sys.exit(1)
+
+    # check that required keys exist in each YAML document
+    for ydoc in yout:
+        check_required_keys(ydoc, ["jenkins_jobs", "product_name", "onf_project"])
+
+        for group in ydoc["jenkins_jobs"]:
+            check_required_keys(group, ["group", "jenkins_url", "jobs"])
+
+            for job in group["jobs"]:
+                check_required_keys(job, ["name", "extract"])
+
+    return yout
+
+
+def jsonpath_extract(json_in, extract_list):
+    """
+    Extract data from json using list of jsonpath expressions
+    """
+
+    ret = {}
+
+    for name, jpath in extract_list.items():
+
+        # parsing jsonpath is expensive, store in global of parsed
+        # jsonpath expressions
+        if jpath not in gjpaths:
+            gjpaths[jpath] = jpparse(jpath)
+
+        jexpr = gjpaths[jpath]
+
+        matches = [match.value for match in jexpr.find(json_in)]
+
+        # If only a single match, unwrap from list
+        if len(matches) == 1:
+            ret[name] = matches[0]
+        else:
+            ret[name] = matches
+
+    logger.debug("extracted data: %s", ret)
+
+    return ret
+
+
+def get_builds_for_job(jobs_dir, local, jenkins_url, job_name, headers=[]):
+    """
+    Download list of builds from a Jenkins job, return list of build ids
+    """
+
+    # where to store jenkins JSON output with builds list
+    jbuildlist = "%s/%s/%s/0_list.json" % (jobs_dir, clean_url(jenkins_url), job_name)
+
+    if os.path.isfile(jbuildlist) and local:
+        # if already downlaoded and want to use the local copy, load it
+        jl = json_file_load(jbuildlist)
+    else:
+        # if not, query jenkins for the list of job builds
+        jlu = jenkins_job_list_url(jenkins_url, job_name)
+        jl = jenkins_api_get(jlu, headers)
+
+        # save to disk
+        json_file_dump(jbuildlist, jl)
+
+    # JSONPath for list of builds in the job
+    jexpr = jpparse("builds[*].number")
+
+    # get a list of builds
+    buildlist = [build.value for build in jexpr.find(jl)]
+
+    return buildlist
+
+
+def get_jenkins_build(jobs_dir, jenkins_url, job_name, build_id, headers=[]):
+    """
+    Download a single build and store it on disk, if job has completed
+    """
+
+    # path to store a copy of the JSON recieved by Jenkins
+    jjson = "%s/%s/%s/%d_build.json" % (
+        jobs_dir,
+        clean_url(jenkins_url),
+        job_name,
+        build_id,
+    )
+
+    if os.path.isfile(jjson):
+        # if have already run and local copy exists, read/return local copy
+        braw = json_file_load(jjson)
+    else:
+        # make an API call to get the JSON, store locally
+        burl = jenkins_job_build_url(jenkins_url, job_name, build_id)
+        braw = jenkins_api_get(burl, headers)
+
+        # if build is still going on the result field is null, so don't return
+        # build or save a copy, as build status is not final.
+        if not braw["result"]:
+            return None
+
+        # save to disk
+        json_file_dump(jjson, braw)
+
+    return braw
+
+
+def get_all_jenkins_builds(jobs_dir, jenkins_url, job_name, build_ids, headers=[]):
+    """
+    Get a list of all jenkins build data, for completed builds
+    """
+
+    builds_list = []
+
+    # download build data for all builds
+    for build_id in build_ids:
+
+        build = get_jenkins_build(
+            args.jobs_dir, jobgroup["jenkins_url"], job["name"], build_id, headers,
+        )
+
+        # may return None if build is in progress
+        if build:
+            builds_list.append(build)
+
+    return builds_list
+
+
+def clean_name(name):
+    """
+    Clean up a name string. Currently only replaces spaces with underscores
+    """
+    return name.replace(" ", "_")
+
+
+def clean_url(url):
+    """
+    remove prefix and any non-path friendly characters from URL
+    """
+    return re.sub(r"\W", "_", re.sub(r"\w+://", "", url))
+
+
+def save_product_builds(product_doc, product_dir, builds):
+    """
+    save the product-specific build information, if it's applicable to this
+    product based on the filters
+    """
+
+    # duplicate the scrape doc into final product data
+    product_data = dict(product_doc)
+
+    # used to hold groups of jobs
+    groups = {}
+
+    # each doc can have multiple job groups (usually version-specific)
+    for jobgroup in product_doc["jenkins_jobs"]:
+
+        groups[jobgroup["group"]] = {}
+
+        # each job group can have multiple jobs
+        for job in jobgroup["jobs"]:
+
+            pbuilds = []
+
+            # get the build data for the job
+            for build in builds[job["name"]]:
+
+                jpedata = jsonpath_extract(build, job["extract"])
+
+                # filter builds
+                save = True
+                if "filter" in job:
+                    for k, v in job["filter"].items():
+                        # if data doesn't match the filter value given, don't save it
+                        if jpedata[k] != v:
+                            save = False
+
+                if save:
+                    pbuilds.append(jpedata)
+
+            # allow job name to be overridden, for private jobs
+            if "name_override" in job:
+                groups[jobgroup["group"]][job["name_override"]] = pbuilds
+            else:
+                groups[jobgroup["group"]][job["name"]] = pbuilds
+
+    product_data["groups"] = groups
+
+    product_filename = "%s/%s.json" % (
+        product_dir,
+        clean_name(product_doc["product_name"]),
+    )
+
+    json_file_dump(product_filename, product_data)
+
+
+# main function that calls other functions
+if __name__ == "__main__":
+
+    args = parse_collector_args()
+
+    if not os.path.isdir(args.product_dir):
+        logger.error("Output directory is not a directory: '%s'", args.product_dir)
+        sys.exit(1)
+
+    # only print log messages if debugging
+    if args.debug:
+        logger.setLevel(logging.DEBUG)
+    else:
+        logger.setLevel(logging.CRITICAL)
+
+    # read in credentials file if option if argument passed
+    credentials = {}
+    if args.credentials:
+        cred_file = yaml.safe_load(args.credentials)
+        credentials = cred_file["credentials"]
+
+    # read in the Scrape File
+    sfile = parse_scrape_file(args.scrape_file)
+
+    # dict of job name -> build data
+    builds = {}
+
+    # Scrape File YAML may contain multiple documents
+    for sdoc in sfile:
+
+        # phase 1 - identify all the Jenkins jobs
+        # each doc can have multiple job groups (usually version-specific)
+        for jobgroup in sdoc["jenkins_jobs"]:
+
+            api_headers = []
+
+            if "credentials" in jobgroup:
+                if jobgroup["credentials"] in credentials:
+                    api_headers = [
+                        basic_auth_header(
+                            credentials[jobgroup["credentials"]]["jenkins_api_user"],
+                            credentials[jobgroup["credentials"]]["jenkins_api_token"],
+                        )
+                    ]
+                else:
+                    logger.error(
+                        "Credentials for '%s' not supplied", jobgroup["credentials"]
+                    )
+                    sys.exit(1)
+
+            # each job group can have multiple jobs
+            for job in jobgroup["jobs"]:
+
+                # only redownload jobs that haven't been downloaded before
+                if job["name"] not in builds:
+
+                    # get list of all Job ID's
+                    build_ids = get_builds_for_job(
+                        args.jobs_dir,
+                        args.local,
+                        jobgroup["jenkins_url"],
+                        job["name"],
+                        api_headers,
+                    )
+
+                    # get build info - either download or load from disk
+                    builds[job["name"]] = get_all_jenkins_builds(
+                        args.jobs_dir,
+                        jobgroup["jenkins_url"],
+                        job["name"],
+                        build_ids,
+                        api_headers,
+                    )
+
+        # phase 2 - create per-product (document) lists of build extracted data
+        save_product_builds(sdoc, args.product_dir, builds)
diff --git a/credentials.yaml.example b/credentials.yaml.example
new file mode 100644
index 0000000..ba59886
--- /dev/null
+++ b/credentials.yaml.example
@@ -0,0 +1,14 @@
+---
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+
+# Jenkins API Tokens can be generated at the "me" page for a user after login:
+#   https://jenkins.<domain>/me/configure
+
+credentials:
+  aether:
+    jenkins_api_user: "jenkinsusername"
+    jenkins_api_token: "jenkinsapitoken"
+  anothercred:
+    jenkins_api_user: "jenkinsusername"
+    jenkins_api_token: "jenkinsapitoken"
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..d6a5fc2
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,5 @@
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+PyYAML~=5.3.1
+jsonpath-ng~=1.5.1
+Jinja2~=2.11.2
diff --git a/scrape.yaml b/scrape.yaml
new file mode 100644
index 0000000..857dd07
--- /dev/null
+++ b/scrape.yaml
@@ -0,0 +1,365 @@
+---
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+
+product_name: "Accelleran Small Cell E1000 Series"
+product_link: "https://opennetworking.org/products/accelleran-e1000-series/"
+onf_project: "Aether"
+
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.opencord.org"
+    credentials: "aether"
+    jobs:
+      - name: "aether-member-only-jobs/job/aether_accelleran_func_production"
+        name_override: "aether-production"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          artifacts: "artifacts[*].fileName"
+
+---
+
+product_name: "Accelleran Software RAN Controller (dRAX)"
+product_link: "https://opennetworking.org/products/accelleran-drax/"
+onf_project: "Aether"
+
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.opencord.org"
+    credentials: "aether"
+    jobs:
+      - name: "aether-member-only-jobs/job/aether_accelleran_func_production"
+        name_override: "aether-production"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          artifacts: "artifacts[*].fileName"
+
+---
+# Aether - Wiwynn EP100
+
+product_name: "Wiwynn EP100 Sled for OpenEdge"
+product_link: ""
+onf_project: "Aether"
+
+
+jenkins_jobs: []
+
+---
+# VOLTHA - Edgecore XGSPon
+product_name: "Edgecore ASXvOLT16"
+product_link: "https://opennetworking.org/products/edgecore-asxvolt16/"
+onf_project: "SEBA_VOLTHA"
+
+# jenkins config
+jenkins_jobs:
+  - group: "master - DT workflow"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_onf-demo-pod_1T8GEM_voltha_DT_master_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+  - group: "2.5 - DT workflow"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_onf-demo-pod_1T8GEM_voltha_DT_2.5_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+
+  - group: "master - AT&T workflow"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_flex-ocp-cord_Default_voltha_master_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+
+  - group: "2.5 - AT&T workflow"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_flex-ocp-cord_1T4GEM_voltha_2.5_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+---
+# VOLTHA - Edgecore Switch
+product_name: "Edgecore AS6712-32X"
+product_link: "https://opennetworking.org/products/edgecore-6712-54x/"
+onf_project: "SEBA_VOLTHA"
+
+# jenkins config
+jenkins_jobs:
+  - group: "master - DT workflow"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_onf-demo-pod_1T8GEM_voltha_DT_master_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+  - group: "2.5 - DT workflow"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_onf-demo-pod_1T8GEM_voltha_DT_2.5_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+
+  - group: "master - AT&T workflow"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_flex-ocp-cord_Default_voltha_master_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+
+  - group: "2.5 - AT&T workflow"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_flex-ocp-cord_1T4GEM_voltha_2.5_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+
+
+---
+# VOLTHA - Edgecore GPON
+product_name: "Edgecore ASGvOLT64"
+product_link: "https://opennetworking.org/products/edgecore-asgvolt64/"
+onf_project: "SEBA_VOLTHA"
+
+# jenkins config
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_dt-berlin-pod-gpon_1T8GEM_voltha_DT_master_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+  - group: "2.5"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_dt-berlin-pod-gpon_1T8GEM_voltha_DT_2.5_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+
+
+---
+# VOLTHA - Edgecore Switch
+product_name: "Edgecore AS7712-32X"
+product_link: "https://opennetworking.org/products/edgecore-as7712-32x/"
+onf_project: "SEBA_VOLTHA"
+
+# jenkins config
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_dt-berlin-pod-gpon_1T8GEM_voltha_DT_master_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+  - group: "2.5"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_dt-berlin-pod-gpon_1T8GEM_voltha_DT_2.5_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+
+---
+# VOLTHA - GPON ONU
+product_name: "Sercomm FG1000"
+product_link: "https://opennetworking.org/products/sercomm-fg1000/"
+onf_project: "SEBA_VOLTHA"
+
+# jenkins config
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_dt-berlin-pod-gpon_1T8GEM_voltha_DT_master_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+  - group: "2.5"
+    jenkins_url: "https://jenkins.opencord.org"
+    jobs:
+      - name: "build_dt-berlin-pod-gpon_1T8GEM_voltha_DT_2.5_test"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          artifacts: "artifacts[*].fileName"
+
+---
+# Stratum - Stordis BF6064X
+
+onf_project: "Stratum"
+product_name: "Stordis BF6064X"
+product_link: ""
+
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.stratumproject.org"
+    jobs:
+      - name: "stratum-bf-test-master"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          switchname: "actions[?(@._class=='hudson.model.ParametersAction')].parameters[?(@.name=='SWITCH_NAME')].value"
+        filter:
+          switchname: "x86-64-stordis-bf6064x-t-r0"
+
+---
+# stratum - Stordis BF2556X
+
+onf_project: "Stratum"
+product_name: "Stordis BF2556X"
+product_link: ""
+
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.stratumproject.org"
+    jobs:
+      - name: "stratum-bf-test-master"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          switchname: "actions[?(@._class=='hudson.model.ParametersAction')].parameters[?(@.name=='SWITCH_NAME')].value"
+        filter:
+          switchname: "x86-64-stordis-bf2556x-1t-r0"
+
+---
+# stratum - Stordis D5254
+
+onf_project: "Stratum"
+product_name: "Inventec D5254"
+product_link: "https://opennetworking.org/products/inventec-d5254/"
+
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.stratumproject.org"
+    jobs:
+      - name: "stratum-bf-test-master"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          switchname: "actions[?(@._class=='hudson.model.ParametersAction')].parameters[?(@.name=='SWITCH_NAME')].value"
+        filter:
+          switchname: "x86-64-inventec-d5254-r0"
+
+---
+# stratum - Edgecore Wedge100BF-32X
+
+onf_project: "Stratum"
+product_name: "Edgecore Wedge100BF-32X"
+product_link: "https://opennetworking.org/products/edgecore-wedge-100bf-32x/"
+
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.stratumproject.org"
+    jobs:
+      - name: "stratum-bf-test-master"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          switchname: "actions[?(@._class=='hudson.model.ParametersAction')].parameters[?(@.name=='SWITCH_NAME')].value"
+        filter:
+          switchname: "x86-64-accton-wedge100bf-32x-r0"
+
+---
+# stratum - Edgecore AS7712-32X
+
+onf_project: "Stratum"
+product_name: "Edgecore AS7712-32X"
+product_link: "https://opennetworking.org/products/edgecore-as7712-32x/"
+
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.stratumproject.org"
+    jobs:
+      - name: "stratum-bcm-test-master"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          switchname: "actions[?(@._class=='hudson.model.ParametersAction')].parameters[?(@.name=='SWITCH_NAME')].value"
+        filter:
+          switchname: "x86-64-accton-as7712-32x-r0"
+
+---
+# stratum - Inventec D7032Q28B
+
+onf_project: "Stratum"
+product_name: "Inventec D7032Q28B"
+product_link: ""
+
+jenkins_jobs:
+  - group: "master"
+    jenkins_url: "https://jenkins.stratumproject.org"
+    jobs:
+      - name: "stratum-bcm-test-master"
+        extract:
+          id: "id"
+          result: "result"
+          timestamp: "timestamp"
+          build_url: "url"
+          switchname: "actions[?(@._class=='hudson.model.ParametersAction')].parameters[?(@.name=='SWITCH_NAME')].value"
+        filter:
+          switchname: "x86-64-inventec-d7032q28b-r0"
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..a21a65b
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+
+from __future__ import absolute_import
+from setuptools import setup
+
+
+def version():
+    with open("VERSION") as f:
+        return f.read()
+
+
+def parse_requirements(filename):
+    # parse a requirements.txt file, allowing for blank lines and comments
+    requirements = []
+    for line in open(filename):
+        if line and not line.startswith("#"):
+            requirements.append(line)
+    return requirements
+
+
+setup(
+    name="sjsg",
+    version=version(),
+    description="Static Jenkins Site Generator",
+    author="Open Networking Foundation",
+    author_email="support@opennetworking.org",
+    license="Apache v2",
+    install_requires=parse_requirements("requirements.txt"),
+    classifiers=[
+        "License :: OSI Approved :: Apache Software License",
+        "Programming Language :: Python :: 3.7",
+    ],
+)
diff --git a/siterender.py b/siterender.py
new file mode 100644
index 0000000..315f8df
--- /dev/null
+++ b/siterender.py
@@ -0,0 +1,257 @@
+#!/usr/bin/env python3
+
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+
+from __future__ import absolute_import
+
+import argparse
+import datetime
+import jinja2
+import json
+import logging
+import os
+
+# create shared logger
+logging.basicConfig()
+logger = logging.getLogger("sjsgsr")
+
+# script starttime, used for timeboxing
+starttime = datetime.datetime.now(datetime.timezone.utc)
+startdate = datetime.datetime(
+    starttime.year, starttime.month, starttime.day, tzinfo=datetime.timezone.utc
+)
+
+
+def parse_sitebuilder_args():
+    """
+    parse CLI arguments
+    """
+
+    parser = argparse.ArgumentParser(description="Jenkins job results site renderer")
+
+    def readable_dir(path):
+        if os.path.isdir(path) and os.access(path, os.R_OK):
+            return path
+        raise argparse.ArgumentTypeError("%s is not a directory or unreadable" % path)
+
+    # Flags
+    parser.add_argument(
+        "--product_dir",
+        default="products",
+        type=readable_dir,
+        help="Directory containing product JSON created by buildcollector.py",
+    )
+    parser.add_argument(
+        "--template_dir",
+        default="templates",
+        type=readable_dir,
+        help="Directory with Jinja2 templates",
+    )
+    parser.add_argument(
+        "--site_dir",
+        default="site",
+        type=readable_dir,
+        help="Directory to write the static site into",
+    )
+    parser.add_argument(
+        "--debug", action="store_true", help="Print additional debugging information"
+    )
+
+    return parser.parse_args()
+
+
+def jinja_env(template_dir):
+    """
+    Returns a Jinja2 enviroment loading files from template_dir
+    """
+
+    env = jinja2.Environment(
+        loader=jinja2.FileSystemLoader(template_dir),
+        autoescape=jinja2.select_autoescape(["html"]),
+        undefined=jinja2.StrictUndefined,
+        trim_blocks=True,
+        lstrip_blocks=True,
+    )
+
+    # Jinja2 filters
+    def tsdatetime(value, fmt="%Y-%m-%d %H:%M %Z"):
+        # timestamps given ms precision, divide by 1000
+        dateval = datetime.datetime.fromtimestamp(
+            value // 1000, tz=datetime.timezone.utc
+        )
+        return dateval.strftime(fmt)
+
+    def tsdate(value, fmt="%Y-%m-%d"):
+        # timestamps given ms precision, divide by 1000
+        dateval = datetime.datetime.fromtimestamp(
+            value // 1000, tz=datetime.timezone.utc
+        )
+        return dateval.strftime(fmt)
+
+    def timebox(value, boxsize=24, count=7):
+        """
+        Given a list of builds, groups in them into within a sequential range
+        Boxsize is given in hours
+        Count is number of boxes to return
+        Time starts from now()
+        """
+        retbox = []
+
+        nowms = startdate.timestamp() * 1000  # ms precision datetime
+
+        hourms = 60 * 60 * 1000  # ms in an hour
+
+        for box in range(-1, count - 1):
+
+            startms = nowms - (box * hourms * boxsize)
+            endms = nowms - ((box + 1) * hourms * boxsize)
+            timebox_builds = 0
+            success_count = 0
+
+            builds = []
+
+            for bdata in value:
+                # loops multiple times over entire list of builds, could be
+                # optimized
+
+                bt = int(bdata["timestamp"])
+
+                if startms > bt > endms:
+                    timebox_builds += 1
+
+                    builds.append(bdata)
+
+                    if bdata["result"] == "SUCCESS":
+                        success_count += 1
+
+            # determine overall status for the timebox
+            if timebox_builds == 0:
+                status = "NONE"
+            elif timebox_builds == success_count:
+                status = "SUCCESS"
+            elif success_count == 0:
+                status = "FAILURE"
+            else:  # timebox_builds > success_count
+                status = "UNSTABLE"
+
+            retbox.append(
+                {
+                    "result": status,
+                    "box_start": int(endms),
+                    "outcome": "%d of %d" % (success_count, timebox_builds),
+                    "builds": builds,
+                }
+            )
+
+        return retbox
+
+    env.filters["tsdatetime"] = tsdatetime
+    env.filters["tsdate"] = tsdate
+    env.filters["timebox"] = timebox
+
+    return env
+
+
+def clean_name(name):
+    """
+    Clean up a name string. Currently only replaces spaces with underscores
+    """
+    return name.replace(" ", "_")
+
+
+def render_to_file(j2env, context, template_name, path):
+    """
+    Render out a template to file
+    """
+    parent_dir = os.path.dirname(path)
+    os.makedirs(parent_dir, exist_ok=True)
+
+    template = j2env.get_template(template_name)
+
+    with open(path, "w") as outfile:
+        outfile.write(template.render(context))
+
+
+def json_file_load(path):
+    """
+    Get data from local file, return data as a dict
+    """
+
+    with open(path) as jf:
+        try:
+            data = json.loads(jf.read())
+        except json.decoder.JSONDecodeError:
+            logger.exception("Unable to decode JSON from file: '%s'", path)
+
+    return data
+
+
+# main function that calls other functions
+if __name__ == "__main__":
+
+    args = parse_sitebuilder_args()
+
+    # only print log messages if debugging
+    if args.debug:
+        logger.setLevel(logging.DEBUG)
+    else:
+        logger.setLevel(logging.CRITICAL)
+
+    j2env = jinja_env(args.template_dir)
+
+    buildtime = starttime.strftime("%Y-%m-%d %H:%M %Z")
+
+    # init list of projects
+    projects = {}
+
+    # list of products
+    prodfiles = os.listdir(args.product_dir)
+
+    for prodfile in prodfiles:
+
+        # load product, and set buildtime
+        logger.debug("loading file file: '%s'", prodfile)
+        product = json_file_load("%s/%s" % (args.product_dir, prodfile))
+        product.update({"buildtime": buildtime})
+
+        projname = product["onf_project"]
+
+        # build product filename, write out template
+        site_prod_filename = "%s/%s/%s/index.html" % (
+            args.site_dir,
+            projname,
+            clean_name(product["product_name"]),
+        )
+        render_to_file(j2env, product, "product.html", site_prod_filename)
+
+        # product to project list
+        if projname not in projects:
+            projects[projname] = [product]
+        else:
+            projects[projname].append(product)
+
+    # list of projects
+    for projname in sorted(projects.keys()):
+
+        proj_filename = "%s/%s/index.html" % (args.site_dir, projname)
+
+        products = projects[projname]
+
+        project_context = {
+            "buildtime": buildtime,
+            "project_name": projname,
+            "products": products,
+        }
+
+        render_to_file(j2env, project_context, "project.html", proj_filename)
+
+    # render index file
+    index_filename = "%s/index.html" % args.site_dir
+
+    index_context = {
+        "buildtime": buildtime,
+        "projects": projects,
+    }
+
+    render_to_file(j2env, index_context, "index.html", index_filename)
diff --git a/static/fonts/gilroy-regular-webfont.woff b/static/fonts/gilroy-regular-webfont.woff
new file mode 100644
index 0000000..efefa8a
--- /dev/null
+++ b/static/fonts/gilroy-regular-webfont.woff
Binary files differ
diff --git a/static/fonts/gilroy-regular-webfont.woff2 b/static/fonts/gilroy-regular-webfont.woff2
new file mode 100644
index 0000000..2363d81
--- /dev/null
+++ b/static/fonts/gilroy-regular-webfont.woff2
Binary files differ
diff --git a/static/fonts/gilroy-regularitalic-webfont.woff b/static/fonts/gilroy-regularitalic-webfont.woff
new file mode 100644
index 0000000..ede965c
--- /dev/null
+++ b/static/fonts/gilroy-regularitalic-webfont.woff
Binary files differ
diff --git a/static/fonts/gilroy-regularitalic-webfont.woff2 b/static/fonts/gilroy-regularitalic-webfont.woff2
new file mode 100644
index 0000000..28a6220
--- /dev/null
+++ b/static/fonts/gilroy-regularitalic-webfont.woff2
Binary files differ
diff --git a/static/fonts/gilroy-semibold-webfont.woff b/static/fonts/gilroy-semibold-webfont.woff
new file mode 100644
index 0000000..8d0e0ed
--- /dev/null
+++ b/static/fonts/gilroy-semibold-webfont.woff
Binary files differ
diff --git a/static/fonts/gilroy-semibold-webfont.woff2 b/static/fonts/gilroy-semibold-webfont.woff2
new file mode 100644
index 0000000..d60a16f
--- /dev/null
+++ b/static/fonts/gilroy-semibold-webfont.woff2
Binary files differ
diff --git a/static/images/onf_logo.png b/static/images/onf_logo.png
new file mode 100644
index 0000000..c489dfd
--- /dev/null
+++ b/static/images/onf_logo.png
Binary files differ
diff --git a/static/scripts/list.min.js b/static/scripts/list.min.js
new file mode 100644
index 0000000..3cb2737
--- /dev/null
+++ b/static/scripts/list.min.js
@@ -0,0 +1,2 @@
+/*! List.js v1.5.0 (http://listjs.com) by Jonny Strömberg (http://javve.com) */
+var List=function(t){function e(n){if(r[n])return r[n].exports;var i=r[n]={i:n,l:!1,exports:{}};return t[n].call(i.exports,i,i.exports,e),i.l=!0,i.exports}var r={};return e.m=t,e.c=r,e.i=function(t){return t},e.d=function(t,r,n){e.o(t,r)||Object.defineProperty(t,r,{configurable:!1,enumerable:!0,get:n})},e.n=function(t){var r=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(r,"a",r),r},e.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},e.p="",e(e.s=11)}([function(t,e,r){function n(t){if(!t||!t.nodeType)throw new Error("A DOM element reference is required");this.el=t,this.list=t.classList}var i=r(4),s=/\s+/;Object.prototype.toString;t.exports=function(t){return new n(t)},n.prototype.add=function(t){if(this.list)return this.list.add(t),this;var e=this.array(),r=i(e,t);return~r||e.push(t),this.el.className=e.join(" "),this},n.prototype.remove=function(t){if(this.list)return this.list.remove(t),this;var e=this.array(),r=i(e,t);return~r&&e.splice(r,1),this.el.className=e.join(" "),this},n.prototype.toggle=function(t,e){return this.list?("undefined"!=typeof e?e!==this.list.toggle(t,e)&&this.list.toggle(t):this.list.toggle(t),this):("undefined"!=typeof e?e?this.add(t):this.remove(t):this.has(t)?this.remove(t):this.add(t),this)},n.prototype.array=function(){var t=this.el.getAttribute("class")||"",e=t.replace(/^\s+|\s+$/g,""),r=e.split(s);return""===r[0]&&r.shift(),r},n.prototype.has=n.prototype.contains=function(t){return this.list?this.list.contains(t):!!~i(this.array(),t)}},function(t,e,r){var n=window.addEventListener?"addEventListener":"attachEvent",i=window.removeEventListener?"removeEventListener":"detachEvent",s="addEventListener"!==n?"on":"",a=r(5);e.bind=function(t,e,r,i){t=a(t);for(var o=0;o<t.length;o++)t[o][n](s+e,r,i||!1)},e.unbind=function(t,e,r,n){t=a(t);for(var o=0;o<t.length;o++)t[o][i](s+e,r,n||!1)}},function(t,e){t.exports=function(t){return function(e,r,n){var i=this;this._values={},this.found=!1,this.filtered=!1;var s=function(e,r,n){if(void 0===r)n?i.values(e,n):i.values(e);else{i.elm=r;var s=t.templater.get(i,e);i.values(s)}};this.values=function(e,r){if(void 0===e)return i._values;for(var n in e)i._values[n]=e[n];r!==!0&&t.templater.set(i,i.values())},this.show=function(){t.templater.show(i)},this.hide=function(){t.templater.hide(i)},this.matching=function(){return t.filtered&&t.searched&&i.found&&i.filtered||t.filtered&&!t.searched&&i.filtered||!t.filtered&&t.searched&&i.found||!t.filtered&&!t.searched},this.visible=function(){return!(!i.elm||i.elm.parentNode!=t.list)},s(e,r,n)}}},function(t,e){var r=function(t,e,r){return r?t.getElementsByClassName(e)[0]:t.getElementsByClassName(e)},n=function(t,e,r){return e="."+e,r?t.querySelector(e):t.querySelectorAll(e)},i=function(t,e,r){for(var n=[],i="*",s=t.getElementsByTagName(i),a=s.length,o=new RegExp("(^|\\s)"+e+"(\\s|$)"),l=0,u=0;l<a;l++)if(o.test(s[l].className)){if(r)return s[l];n[u]=s[l],u++}return n};t.exports=function(){return function(t,e,s,a){return a=a||{},a.test&&a.getElementsByClassName||!a.test&&document.getElementsByClassName?r(t,e,s):a.test&&a.querySelector||!a.test&&document.querySelector?n(t,e,s):i(t,e,s)}}()},function(t,e){var r=[].indexOf;t.exports=function(t,e){if(r)return t.indexOf(e);for(var n=0;n<t.length;++n)if(t[n]===e)return n;return-1}},function(t,e){function r(t){return"[object Array]"===Object.prototype.toString.call(t)}t.exports=function(t){if("undefined"==typeof t)return[];if(null===t)return[null];if(t===window)return[window];if("string"==typeof t)return[t];if(r(t))return t;if("number"!=typeof t.length)return[t];if("function"==typeof t&&t instanceof Function)return[t];for(var e=[],n=0;n<t.length;n++)(Object.prototype.hasOwnProperty.call(t,n)||n in t)&&e.push(t[n]);return e.length?e:[]}},function(t,e){t.exports=function(t){return t=void 0===t?"":t,t=null===t?"":t,t=t.toString()}},function(t,e){t.exports=function(t){for(var e,r=Array.prototype.slice.call(arguments,1),n=0;e=r[n];n++)if(e)for(var i in e)t[i]=e[i];return t}},function(t,e){t.exports=function(t){var e=function(r,n,i){var s=r.splice(0,50);i=i||[],i=i.concat(t.add(s)),r.length>0?setTimeout(function(){e(r,n,i)},1):(t.update(),n(i))};return e}},function(t,e){t.exports=function(t){return t.handlers.filterStart=t.handlers.filterStart||[],t.handlers.filterComplete=t.handlers.filterComplete||[],function(e){if(t.trigger("filterStart"),t.i=1,t.reset.filter(),void 0===e)t.filtered=!1;else{t.filtered=!0;for(var r=t.items,n=0,i=r.length;n<i;n++){var s=r[n];e(s)?s.filtered=!0:s.filtered=!1}}return t.update(),t.trigger("filterComplete"),t.visibleItems}}},function(t,e,r){var n=(r(0),r(1)),i=r(7),s=r(6),a=r(3),o=r(19);t.exports=function(t,e){e=e||{},e=i({location:0,distance:100,threshold:.4,multiSearch:!0,searchClass:"fuzzy-search"},e);var r={search:function(n,i){for(var s=e.multiSearch?n.replace(/ +$/,"").split(/ +/):[n],a=0,o=t.items.length;a<o;a++)r.item(t.items[a],i,s)},item:function(t,e,n){for(var i=!0,s=0;s<n.length;s++){for(var a=!1,o=0,l=e.length;o<l;o++)r.values(t.values(),e[o],n[s])&&(a=!0);a||(i=!1)}t.found=i},values:function(t,r,n){if(t.hasOwnProperty(r)){var i=s(t[r]).toLowerCase();if(o(i,n,e))return!0}return!1}};return n.bind(a(t.listContainer,e.searchClass),"keyup",function(e){var n=e.target||e.srcElement;t.search(n.value,r.search)}),function(e,n){t.search(e,n,r.search)}}},function(t,e,r){var n=r(18),i=r(3),s=r(7),a=r(4),o=r(1),l=r(6),u=r(0),c=r(17),f=r(5);t.exports=function(t,e,h){var d,v=this,m=r(2)(v),g=r(8)(v),p=r(12)(v);d={start:function(){v.listClass="list",v.searchClass="search",v.sortClass="sort",v.page=1e4,v.i=1,v.items=[],v.visibleItems=[],v.matchingItems=[],v.searched=!1,v.filtered=!1,v.searchColumns=void 0,v.handlers={updated:[]},v.valueNames=[],v.utils={getByClass:i,extend:s,indexOf:a,events:o,toString:l,naturalSort:n,classes:u,getAttribute:c,toArray:f},v.utils.extend(v,e),v.listContainer="string"==typeof t?document.getElementById(t):t,v.listContainer&&(v.list=i(v.listContainer,v.listClass,!0),v.parse=r(13)(v),v.templater=r(16)(v),v.search=r(14)(v),v.filter=r(9)(v),v.sort=r(15)(v),v.fuzzySearch=r(10)(v,e.fuzzySearch),this.handlers(),this.items(),this.pagination(),v.update())},handlers:function(){for(var t in v.handlers)v[t]&&v.on(t,v[t])},items:function(){v.parse(v.list),void 0!==h&&v.add(h)},pagination:function(){if(void 0!==e.pagination){e.pagination===!0&&(e.pagination=[{}]),void 0===e.pagination[0]&&(e.pagination=[e.pagination]);for(var t=0,r=e.pagination.length;t<r;t++)p(e.pagination[t])}}},this.reIndex=function(){v.items=[],v.visibleItems=[],v.matchingItems=[],v.searched=!1,v.filtered=!1,v.parse(v.list)},this.toJSON=function(){for(var t=[],e=0,r=v.items.length;e<r;e++)t.push(v.items[e].values());return t},this.add=function(t,e){if(0!==t.length){if(e)return void g(t,e);var r=[],n=!1;void 0===t[0]&&(t=[t]);for(var i=0,s=t.length;i<s;i++){var a=null;n=v.items.length>v.page,a=new m(t[i],void 0,n),v.items.push(a),r.push(a)}return v.update(),r}},this.show=function(t,e){return this.i=t,this.page=e,v.update(),v},this.remove=function(t,e,r){for(var n=0,i=0,s=v.items.length;i<s;i++)v.items[i].values()[t]==e&&(v.templater.remove(v.items[i],r),v.items.splice(i,1),s--,i--,n++);return v.update(),n},this.get=function(t,e){for(var r=[],n=0,i=v.items.length;n<i;n++){var s=v.items[n];s.values()[t]==e&&r.push(s)}return r},this.size=function(){return v.items.length},this.clear=function(){return v.templater.clear(),v.items=[],v},this.on=function(t,e){return v.handlers[t].push(e),v},this.off=function(t,e){var r=v.handlers[t],n=a(r,e);return n>-1&&r.splice(n,1),v},this.trigger=function(t){for(var e=v.handlers[t].length;e--;)v.handlers[t][e](v);return v},this.reset={filter:function(){for(var t=v.items,e=t.length;e--;)t[e].filtered=!1;return v},search:function(){for(var t=v.items,e=t.length;e--;)t[e].found=!1;return v}},this.update=function(){var t=v.items,e=t.length;v.visibleItems=[],v.matchingItems=[],v.templater.clear();for(var r=0;r<e;r++)t[r].matching()&&v.matchingItems.length+1>=v.i&&v.visibleItems.length<v.page?(t[r].show(),v.visibleItems.push(t[r]),v.matchingItems.push(t[r])):t[r].matching()?(v.matchingItems.push(t[r]),t[r].hide()):t[r].hide();return v.trigger("updated"),v},d.start()}},function(t,e,r){var n=r(0),i=r(1),s=r(11);t.exports=function(t){var e=function(e,i){var s,o=t.matchingItems.length,l=t.i,u=t.page,c=Math.ceil(o/u),f=Math.ceil(l/u),h=i.innerWindow||2,d=i.left||i.outerWindow||0,v=i.right||i.outerWindow||0;v=c-v,e.clear();for(var m=1;m<=c;m++){var g=f===m?"active":"";r.number(m,d,v,f,h)?(s=e.add({page:m,dotted:!1})[0],g&&n(s.elm).add(g),a(s.elm,m,u)):r.dotted(e,m,d,v,f,h,e.size())&&(s=e.add({page:"...",dotted:!0})[0],n(s.elm).add("disabled"))}},r={number:function(t,e,r,n,i){return this.left(t,e)||this.right(t,r)||this.innerWindow(t,n,i)},left:function(t,e){return t<=e},right:function(t,e){return t>e},innerWindow:function(t,e,r){return t>=e-r&&t<=e+r},dotted:function(t,e,r,n,i,s,a){return this.dottedLeft(t,e,r,n,i,s)||this.dottedRight(t,e,r,n,i,s,a)},dottedLeft:function(t,e,r,n,i,s){return e==r+1&&!this.innerWindow(e,i,s)&&!this.right(e,n)},dottedRight:function(t,e,r,n,i,s,a){return!t.items[a-1].values().dotted&&(e==n&&!this.innerWindow(e,i,s)&&!this.right(e,n))}},a=function(e,r,n){i.bind(e,"click",function(){t.show((r-1)*n+1,n)})};return function(r){var n=new s(t.listContainer.id,{listClass:r.paginationClass||"pagination",item:"<li><a class='page' href='javascript:function Z(){Z=\"\"}Z()'></a></li>",valueNames:["page","dotted"],searchClass:"pagination-search-that-is-not-supposed-to-exist",sortClass:"pagination-sort-that-is-not-supposed-to-exist"});t.on("updated",function(){e(n,r)}),e(n,r)}}},function(t,e,r){t.exports=function(t){var e=r(2)(t),n=function(t){for(var e=t.childNodes,r=[],n=0,i=e.length;n<i;n++)void 0===e[n].data&&r.push(e[n]);return r},i=function(r,n){for(var i=0,s=r.length;i<s;i++)t.items.push(new e(n,r[i]))},s=function(e,r){var n=e.splice(0,50);i(n,r),e.length>0?setTimeout(function(){s(e,r)},1):(t.update(),t.trigger("parseComplete"))};return t.handlers.parseComplete=t.handlers.parseComplete||[],function(){var e=n(t.list),r=t.valueNames;t.indexAsync?s(e,r):i(e,r)}}},function(t,e){t.exports=function(t){var e,r,n,i,s={resetList:function(){t.i=1,t.templater.clear(),i=void 0},setOptions:function(t){2==t.length&&t[1]instanceof Array?r=t[1]:2==t.length&&"function"==typeof t[1]?(r=void 0,i=t[1]):3==t.length?(r=t[1],i=t[2]):r=void 0},setColumns:function(){0!==t.items.length&&void 0===r&&(r=void 0===t.searchColumns?s.toArray(t.items[0].values()):t.searchColumns)},setSearchString:function(e){e=t.utils.toString(e).toLowerCase(),e=e.replace(/[-[\]{}()*+?.,\\^$|#]/g,"\\$&"),n=e},toArray:function(t){var e=[];for(var r in t)e.push(r);return e}},a={list:function(){for(var e=0,r=t.items.length;e<r;e++)a.item(t.items[e])},item:function(t){t.found=!1;for(var e=0,n=r.length;e<n;e++)if(a.values(t.values(),r[e]))return void(t.found=!0)},values:function(r,i){return!!(r.hasOwnProperty(i)&&(e=t.utils.toString(r[i]).toLowerCase(),""!==n&&e.search(n)>-1))},reset:function(){t.reset.search(),t.searched=!1}},o=function(e){return t.trigger("searchStart"),s.resetList(),s.setSearchString(e),s.setOptions(arguments),s.setColumns(),""===n?a.reset():(t.searched=!0,i?i(n,r):a.list()),t.update(),t.trigger("searchComplete"),t.visibleItems};return t.handlers.searchStart=t.handlers.searchStart||[],t.handlers.searchComplete=t.handlers.searchComplete||[],t.utils.events.bind(t.utils.getByClass(t.listContainer,t.searchClass),"keyup",function(e){var r=e.target||e.srcElement,n=""===r.value&&!t.searched;n||o(r.value)}),t.utils.events.bind(t.utils.getByClass(t.listContainer,t.searchClass),"input",function(t){var e=t.target||t.srcElement;""===e.value&&o("")}),o}},function(t,e){t.exports=function(t){var e={els:void 0,clear:function(){for(var r=0,n=e.els.length;r<n;r++)t.utils.classes(e.els[r]).remove("asc"),t.utils.classes(e.els[r]).remove("desc")},getOrder:function(e){var r=t.utils.getAttribute(e,"data-order");return"asc"==r||"desc"==r?r:t.utils.classes(e).has("desc")?"asc":t.utils.classes(e).has("asc")?"desc":"asc"},getInSensitive:function(e,r){var n=t.utils.getAttribute(e,"data-insensitive");"false"===n?r.insensitive=!1:r.insensitive=!0},setOrder:function(r){for(var n=0,i=e.els.length;n<i;n++){var s=e.els[n];if(t.utils.getAttribute(s,"data-sort")===r.valueName){var a=t.utils.getAttribute(s,"data-order");"asc"==a||"desc"==a?a==r.order&&t.utils.classes(s).add(r.order):t.utils.classes(s).add(r.order)}}}},r=function(){t.trigger("sortStart");var r={},n=arguments[0].currentTarget||arguments[0].srcElement||void 0;n?(r.valueName=t.utils.getAttribute(n,"data-sort"),e.getInSensitive(n,r),r.order=e.getOrder(n)):(r=arguments[1]||r,r.valueName=arguments[0],r.order=r.order||"asc",r.insensitive="undefined"==typeof r.insensitive||r.insensitive),e.clear(),e.setOrder(r);var i,s=r.sortFunction||t.sortFunction||null,a="desc"===r.order?-1:1;i=s?function(t,e){return s(t,e,r)*a}:function(e,n){var i=t.utils.naturalSort;return i.alphabet=t.alphabet||r.alphabet||void 0,!i.alphabet&&r.insensitive&&(i=t.utils.naturalSort.caseInsensitive),i(e.values()[r.valueName],n.values()[r.valueName])*a},t.items.sort(i),t.update(),t.trigger("sortComplete")};return t.handlers.sortStart=t.handlers.sortStart||[],t.handlers.sortComplete=t.handlers.sortComplete||[],e.els=t.utils.getByClass(t.listContainer,t.sortClass),t.utils.events.bind(e.els,"click",r),t.on("searchStart",e.clear),t.on("filterStart",e.clear),r}},function(t,e){var r=function(t){var e,r=this,n=function(){e=r.getItemSource(t.item),e&&(e=r.clearSourceItem(e,t.valueNames))};this.clearSourceItem=function(e,r){for(var n=0,i=r.length;n<i;n++){var s;if(r[n].data)for(var a=0,o=r[n].data.length;a<o;a++)e.setAttribute("data-"+r[n].data[a],"");else r[n].attr&&r[n].name?(s=t.utils.getByClass(e,r[n].name,!0),s&&s.setAttribute(r[n].attr,"")):(s=t.utils.getByClass(e,r[n],!0),s&&(s.innerHTML=""));s=void 0}return e},this.getItemSource=function(e){if(void 0===e){for(var r=t.list.childNodes,n=0,i=r.length;n<i;n++)if(void 0===r[n].data)return r[n].cloneNode(!0)}else{if(/<tr[\s>]/g.exec(e)){var s=document.createElement("tbody");return s.innerHTML=e,s.firstChild}if(e.indexOf("<")!==-1){var a=document.createElement("div");return a.innerHTML=e,a.firstChild}var o=document.getElementById(t.item);if(o)return o}},this.get=function(e,n){r.create(e);for(var i={},s=0,a=n.length;s<a;s++){var o;if(n[s].data)for(var l=0,u=n[s].data.length;l<u;l++)i[n[s].data[l]]=t.utils.getAttribute(e.elm,"data-"+n[s].data[l]);else n[s].attr&&n[s].name?(o=t.utils.getByClass(e.elm,n[s].name,!0),i[n[s].name]=o?t.utils.getAttribute(o,n[s].attr):""):(o=t.utils.getByClass(e.elm,n[s],!0),i[n[s]]=o?o.innerHTML:"");o=void 0}return i},this.set=function(e,n){var i=function(e){for(var r=0,n=t.valueNames.length;r<n;r++)if(t.valueNames[r].data){for(var i=t.valueNames[r].data,s=0,a=i.length;s<a;s++)if(i[s]===e)return{data:e}}else{if(t.valueNames[r].attr&&t.valueNames[r].name&&t.valueNames[r].name==e)return t.valueNames[r];if(t.valueNames[r]===e)return e}},s=function(r,n){var s,a=i(r);a&&(a.data?e.elm.setAttribute("data-"+a.data,n):a.attr&&a.name?(s=t.utils.getByClass(e.elm,a.name,!0),s&&s.setAttribute(a.attr,n)):(s=t.utils.getByClass(e.elm,a,!0),s&&(s.innerHTML=n)),s=void 0)};if(!r.create(e))for(var a in n)n.hasOwnProperty(a)&&s(a,n[a])},this.create=function(t){if(void 0!==t.elm)return!1;if(void 0===e)throw new Error("The list need to have at list one item on init otherwise you'll have to add a template.");var n=e.cloneNode(!0);return n.removeAttribute("id"),t.elm=n,r.set(t,t.values()),!0},this.remove=function(e){e.elm.parentNode===t.list&&t.list.removeChild(e.elm)},this.show=function(e){r.create(e),t.list.appendChild(e.elm)},this.hide=function(e){void 0!==e.elm&&e.elm.parentNode===t.list&&t.list.removeChild(e.elm)},this.clear=function(){if(t.list.hasChildNodes())for(;t.list.childNodes.length>=1;)t.list.removeChild(t.list.firstChild)},n()};t.exports=function(t){return new r(t)}},function(t,e){t.exports=function(t,e){var r=t.getAttribute&&t.getAttribute(e)||null;if(!r)for(var n=t.attributes,i=n.length,s=0;s<i;s++)void 0!==e[s]&&e[s].nodeName===e&&(r=e[s].nodeValue);return r}},function(t,e,r){"use strict";function n(t){return t>=48&&t<=57}function i(t,e){for(var r=(t+="").length,i=(e+="").length,s=0,l=0;s<r&&l<i;){var u=t.charCodeAt(s),c=e.charCodeAt(l);if(n(u)){if(!n(c))return u-c;for(var f=s,h=l;48===u&&++f<r;)u=t.charCodeAt(f);for(;48===c&&++h<i;)c=e.charCodeAt(h);for(var d=f,v=h;d<r&&n(t.charCodeAt(d));)++d;for(;v<i&&n(e.charCodeAt(v));)++v;var m=d-f-v+h;if(m)return m;for(;f<d;)if(m=t.charCodeAt(f++)-e.charCodeAt(h++))return m;s=d,l=v}else{if(u!==c)return u<o&&c<o&&a[u]!==-1&&a[c]!==-1?a[u]-a[c]:u-c;++s,++l}}return r-i}var s,a,o=0;i.caseInsensitive=i.i=function(t,e){return i((""+t).toLowerCase(),(""+e).toLowerCase())},Object.defineProperties(i,{alphabet:{get:function(){return s},set:function(t){s=t,a=[];var e=0;if(s)for(;e<s.length;e++)a[s.charCodeAt(e)]=e;for(o=a.length,e=0;e<o;e++)void 0===a[e]&&(a[e]=-1)}}}),t.exports=i},function(t,e){t.exports=function(t,e,r){function n(t,r){var n=t/e.length,i=Math.abs(o-r);return s?n+i/s:i?1:n}var i=r.location||0,s=r.distance||100,a=r.threshold||.4;if(e===t)return!0;if(e.length>32)return!1;var o=i,l=function(){var t,r={};for(t=0;t<e.length;t++)r[e.charAt(t)]=0;for(t=0;t<e.length;t++)r[e.charAt(t)]|=1<<e.length-t-1;return r}(),u=a,c=t.indexOf(e,o);c!=-1&&(u=Math.min(n(0,c),u),c=t.lastIndexOf(e,o+e.length),c!=-1&&(u=Math.min(n(0,c),u)));var f=1<<e.length-1;c=-1;for(var h,d,v,m=e.length+t.length,g=0;g<e.length;g++){for(h=0,d=m;h<d;)n(g,o+d)<=u?h=d:m=d,d=Math.floor((m-h)/2+h);m=d;var p=Math.max(1,o-d+1),C=Math.min(o+d,t.length)+e.length,y=Array(C+2);y[C+1]=(1<<g)-1;for(var b=C;b>=p;b--){var w=l[t.charAt(b-1)];if(0===g?y[b]=(y[b+1]<<1|1)&w:y[b]=(y[b+1]<<1|1)&w|((v[b+1]|v[b])<<1|1)|v[b+1],y[b]&f){var x=n(g,b-1);if(x<=u){if(u=x,c=b-1,!(c>o))break;p=Math.max(1,2*o-c)}}}if(n(g+1,o)>u)break;v=y}return!(c<0)}}]);
\ No newline at end of file
diff --git a/static/style.css b/static/style.css
new file mode 100644
index 0000000..85e56aa
--- /dev/null
+++ b/static/style.css
@@ -0,0 +1,119 @@
+/*
+ * SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+@font-face {
+    font-family: 'Gilroy';
+    src: url('/fonts/gilroy-regular-webfont.woff2') format('woff2'),
+         url('/fonts/gilroy-regular-webfont.woff') format('woff');
+    font-weight: normal;
+    font-style: normal;
+}
+
+@font-face {
+    font-family: 'Gilroy';
+    src: url('/fonts/gilroy-regularitalic-webfont.woff2') format('woff2'),
+         url('/fonts/gilroy-regularitalic-webfont.woff') format('woff');
+    font-weight: normal;
+    font-style: italic;
+}
+
+@font-face {
+    font-family: 'Gilroy';
+    src: url('/fonts/gilroy-semibold-webfont.woff2') format('woff2'),
+         url('/fonts/gilroy-semibold-webfont.woff') format('woff');
+    font-weight: bold;
+    font-style: normal;
+}
+
+
+html, body {
+  height: 100%;
+}
+
+body {
+  font-family: 'Gilroy', 'Open Sans', sans-serif;
+  font-size: 16px;
+  line-height: 140%;
+
+  margin: 0;
+  display: flex;
+  min-height: 100vh;
+  flex-direction: column;
+}
+
+.header {
+  border-top: 2px solid #C4161C;
+  background-color: #cccccc;
+  padding: 1em;
+}
+
+.header.logo {
+  float: left;
+  vertical-align: middle;
+}
+
+.header h1 {
+  display: inline;
+  padding-left: 1em;
+}
+
+.content {
+  padding: 1em;
+  flex: 1;
+}
+
+.footer {
+  padding: 1em;
+  color: #eeeeee;
+  background-color: #1a1a1a;
+}
+
+.footer a {
+  color: #eeeeee;
+}
+
+.buildtime{
+  font-size: 0.8em;
+}
+
+.ccresults ul {
+  padding: 0;
+  margin: 0;
+}
+
+.ccresults li {
+  border: 1px solid #eeeeee;
+  padding: 0.5em;
+  color: #eeeeee;
+  font-size: 0.8em;
+  display: inline-block;
+  background-color: #444444;
+}
+
+.ccresults li a {
+  color: #eeeeee;
+}
+
+/* colors specific to build status */
+li.SUCCESS, a.SUCCESS, span.SUCCESS {
+  background-color: #4E8F00;
+}
+
+li.UNSTABLE, a.UNSTABLE, span.UNSTABLE {
+  background-color: #C28610;
+}
+
+li.FAILURE, a.FAILURE, span.FAILURE {
+  background-color: #C4161C;
+}
+
+li.ABORTED, a.ABORTED, span.ABORTED {
+  background-color: #666666;
+}
+
+li.NONE, a.NONE, span.NONE, li a.NONE {
+  color: #333333;
+  background-color: #eeeeee;
+}
diff --git a/templates/base.html b/templates/base.html
new file mode 100644
index 0000000..ca9bf0f
--- /dev/null
+++ b/templates/base.html
@@ -0,0 +1,31 @@
+{#
+SPDX-FileCopyrightText: &copy; 2020 Open Networking Foundation <support@opennetworking.org>
+SPDX-License-Identifier: Apache-2.0
+#}
+<html>
+<head>
+  {% block head %}
+  <link href="https://fonts.googleapis.com/css2?family=Open+Sans:ital,wght@0,300;0,400;0,700;1,400;1,700&display=swap" rel="stylesheet">
+  <link rel="stylesheet" href="/style.css" />
+  <script src="/scripts/list.min.js"></script>
+  <title>{% block title %}{{ title }}{% endblock %}</title>
+  {% endblock %}
+</head>
+<body>
+  <div class="header">
+    <a href="/"><img class="logo" src="/images/onf_logo.png" height="30px" width="30px"/></a>
+    <h1>{% block header %}{{ title }}{% endblock %}</h1>
+  </div>
+  <div class="content">
+    {% block content %}
+    <p>Base content, replace this block</p>
+    {% endblock %}
+  </div>
+  <footer class="footer">
+    {% block footer %}
+    &copy; 2020 <a href="https://opennetworking.org/">Open Networking Foundation</a><br/>
+    <div class="buildtime">Page generated at {{ buildtime }}.</right>
+    {% endblock %}
+  </footer>
+</body>
+</html>
diff --git a/templates/index.html b/templates/index.html
new file mode 100644
index 0000000..d01eff6
--- /dev/null
+++ b/templates/index.html
@@ -0,0 +1,22 @@
+{#
+SPDX-FileCopyrightText: &copy; 2020 Open Networking Foundation <support@opennetworking.org>
+SPDX-License-Identifier: Apache-2.0
+#}
+{% extends "base.html" %}
+{% block title %}ONF Continuous Certification Program{% endblock %}
+{% block header %}ONF Continuous Certification Program{% endblock %}
+{% block content %}
+
+<p>
+ONF's Continuous Certification Program is continually re-verifying supply chain products on a daily basis against the most recent versions of ONF open source software platforms, ensuring operators have a vibrant ecosystem of options for deploying ONF open source platforms.
+</p>
+
+<h2>ONF's CC Enabled Projects</h2>
+
+<ul>
+{% for project in projects | sort %}
+<li><a href="{{ project | replace(" ", "_") }}">{{ project }}</a></li>
+{% endfor %}
+</ul>
+
+{% endblock %}
diff --git a/templates/product.html b/templates/product.html
new file mode 100644
index 0000000..27fecbd
--- /dev/null
+++ b/templates/product.html
@@ -0,0 +1,42 @@
+{#
+SPDX-FileCopyrightText: &copy; 2020 Open Networking Foundation <support@opennetworking.org>
+SPDX-License-Identifier: Apache-2.0
+#}
+{% extends "base.html" %}
+{% block title %}ONF Continuous Certification - {{ product_name }}{% endblock %}
+{% block header %}ONF Continuous Certification - {{ product_name }}{% endblock %}
+{% block content %}
+
+<p>
+<em>Certification Program:</em> {{ onf_project }}<br/>
+<em>Product Name:</em> <a href="{{ product_link }}">{{ product_name }}</a>
+</p>
+
+<div class="ccresults">
+{% for group_name, jobs in groups.items() %}
+<strong>Version: {{ group_name }}</strong><br/>
+{% for job_name, builds in jobs.items() %}
+<em>Job: {{ job_name }}</em><br/>
+{% set timebox = builds | timebox(24,10) %}
+<ul>
+{% for tb in timebox %}
+<li class="{{ tb.result }}">
+  {{ tb.box_start | tsdate }} - {{ tb.outcome }}<br/>
+  {% for build in tb.builds %}
+  {% if "build_url" in build %}
+  <a href="{{ build.build_url }}" class="{{ build.result }}">{{ build.id }}</a>
+  {% else %}
+  <span class="{{ build.result }}">{{ build.id }}</span>
+  {% endif %}
+  {% else %}
+  No Builds
+  {% endfor %}
+</li>
+{% endfor %}
+</ul>
+{% endfor %}
+{% else %}
+No Tests Currently Available
+{% endfor %}
+</div>
+{% endblock %}
diff --git a/templates/project.html b/templates/project.html
new file mode 100644
index 0000000..febda68
--- /dev/null
+++ b/templates/project.html
@@ -0,0 +1,46 @@
+{#
+SPDX-FileCopyrightText: &copy; 2020 Open Networking Foundation <support@opennetworking.org>
+SPDX-License-Identifier: Apache-2.0
+#}
+{% extends "base.html" %}
+{% block title %}ONF Continuous Certification - {{ project_name }}{% endblock %}
+{% block header %}ONF Continuous Certification - {{ project_name }}{% endblock %}
+{% block content %}
+
+<h2>Test Results</h2>
+
+{% for product in products | sort(attribute="product_name")%}
+<strong><a href="{{ product.product_name | replace(" ", "_") }}">{{ product.product_name }}</a></strong>
+{% set groups = product['groups'] %}
+
+<div class="ccresults">
+{% for group_name, jobs in groups.items() %}
+<strong>Version: {{ group_name }}</strong><br/>
+{% for job_name, builds in jobs.items() %}
+<em>Job: {{ job_name }}</em><br/>
+
+{% set timebox = builds | timebox(24,10) %}
+<ul>
+{% for tb in timebox %}
+<li class="{{ tb.result }}">
+  {{ tb.box_start | tsdate }} - {{ tb.outcome }}<br/>
+  {% for build in tb.builds %}
+  {% if "build_url" in build %}
+  <a href="{{ build.build_url }}" class="{{ build.result }}">{{ build.id }}</a>
+  {% else %}
+  <span class="{{ build.result }}">{{ build.id }}</span>
+  {% endif %}
+  {% else %}
+  No Builds
+  {% endfor %}
+</li>
+{% endfor %}
+</ul>
+{% endfor %}
+{% else %}
+No Tests Currently Available
+{% endfor %}
+</div>
+<br/>
+{% endfor %}
+{% endblock %}
diff --git a/tests/test_collector.py b/tests/test_collector.py
new file mode 100644
index 0000000..9bebb77
--- /dev/null
+++ b/tests/test_collector.py
@@ -0,0 +1,15 @@
+# SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+# SPDX-License-Identifier: Apache-2.0
+
+from __future__ import absolute_import
+
+import unittest
+
+
+class collector_test(unittest.TestCase):
+
+    def test_1(self):
+        '''
+        Pass
+        '''
+        self.assertEqual("1", "1")
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..7f6dab8
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,25 @@
+; SPDX-FileCopyrightText: © 2020 Open Networking Foundation <support@opennetworking.org>
+; SPDX-License-Identifier: Apache-2.0
+
+[tox]
+envlist = py37
+skip_missing_interpreters = true
+
+[testenv]
+deps =
+  -r requirements.txt
+  reuse
+  flake8
+  pylint
+  coverage
+
+commands=
+#  reuse
+  flake8 buildcollector.py siterender.py
+  pylint --py3k buildcollector.py siterender.py
+  coverage erase
+  coverage run -m unittest discover -s tests
+  coverage report --omit=.tox*
+
+[flake8]
+max-line-length = 119