From 8283d87f6a1a7ea2e92e9adfb7ac42ce94a6e4d5 Mon Sep 17 00:00:00 2001
From: Scott Wittenburg <scott.wittenburg@kitware.com>
Date: Tue, 21 Jan 2020 23:35:18 -0700
Subject: [PATCH] pipelines: `spack ci` command with env-based workflow
 (#12854)

Rework Spack's continuous integration workflow to be environment-based.

- Add the `spack ci` command, which replaces the many scripts in `bin/`

- `spack ci` decouples the CI workflow from the spack instance:
  - CI is defined in a spack environment
  - environment is in its own (single) git repository, separate from Spack
  - spack instance used to run the pipeline is up to the user
  - A new `gitlab-ci` section in environments allows users to configure how
    specs in the environment should be mapped to runners
  - Compilers can be bootstrapped in the new pipeline workflow

- Add extensive documentation on pipelines (see `pipelines.rst` for further details)
- Add extensive tests for pipeline code
---
 .gitlab-ci.yml                                |  14 -
 bin/generate-gitlab-ci-yml.sh                 |  91 ---
 bin/rebuild-index.sh                          |  13 -
 bin/rebuild-package.sh                        | 399 ------------
 lib/spack/docs/index.rst                      |   1 +
 lib/spack/docs/pipelines.rst                  | 439 +++++++++++++
 lib/spack/spack/binary_distribution.py        |  15 +-
 .../spack/{cmd/release_jobs.py => ci.py}      | 452 ++++++++++++--
 lib/spack/spack/cmd/buildcache.py             |  99 +--
 lib/spack/spack/cmd/ci.py                     | 482 +++++++++++++++
 lib/spack/spack/package.py                    |   3 +-
 lib/spack/spack/schema/gitlab_ci.py           |  58 +-
 lib/spack/spack/test/ci.py                    | 167 +++++
 lib/spack/spack/test/cmd/ci.py                | 582 ++++++++++++++++++
 lib/spack/spack/test/cmd/release_jobs.py      | 128 ----
 var/spack/gpg.mock/keys/package-signing-key   |   1 +
 16 files changed, 2166 insertions(+), 778 deletions(-)
 delete mode 100644 .gitlab-ci.yml
 delete mode 100755 bin/generate-gitlab-ci-yml.sh
 delete mode 100755 bin/rebuild-index.sh
 delete mode 100755 bin/rebuild-package.sh
 create mode 100644 lib/spack/docs/pipelines.rst
 rename lib/spack/spack/{cmd/release_jobs.py => ci.py} (57%)
 create mode 100644 lib/spack/spack/cmd/ci.py
 create mode 100644 lib/spack/spack/test/ci.py
 create mode 100644 lib/spack/spack/test/cmd/ci.py
 delete mode 100644 lib/spack/spack/test/cmd/release_jobs.py
 create mode 100644 var/spack/gpg.mock/keys/package-signing-key

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
deleted file mode 100644
index 3fc9597a7c..0000000000
--- a/.gitlab-ci.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-generate ci jobs:
-  script:
-    - "./bin/generate-gitlab-ci-yml.sh"
-  tags:
-    - "spack-pre-ci"
-  artifacts:
-    paths:
-      - ci-generation
-    when: always
diff --git a/bin/generate-gitlab-ci-yml.sh b/bin/generate-gitlab-ci-yml.sh
deleted file mode 100755
index bb59199cf2..0000000000
--- a/bin/generate-gitlab-ci-yml.sh
+++ /dev/null
@@ -1,91 +0,0 @@
-#! /usr/bin/env bash
-
-# Remember where we are initially, it's the repo cloned by gitlab-ci
-original_directory=$(pwd)
-. "${original_directory}/share/spack/setup-env.sh"
-
-# Create a temporary working directory
-temp_dir=$(mktemp -d)
-trap 'rm -rf "$temp_dir"' INT TERM QUIT EXIT
-
-if [ -z "${DOWNSTREAM_CI_REPO}" ] ; then
-    echo "ERROR: missing variable: DOWNSTREAM_CI_REPO" >&2
-    exit 1
-fi
-
-if [ -z "${SPACK_RELEASE_ENVIRONMENT_PATH}" ] ; then
-    echo "ERROR: missing variable: SPACK_RELEASE_ENVIRONMENT_PATH" >&2
-    exit 1
-fi
-
-if [ -z "${CDASH_AUTH_TOKEN}" ] ; then
-    echo "WARNING: missing variable: CDASH_AUTH_TOKEN" >&2
-else
-    token_file="${temp_dir}/cdash_auth_token"
-    echo ${CDASH_AUTH_TOKEN} > ${token_file}
-fi
-
-if [ -z "${SPACK_RELEASE_ENVIRONMENT_REPO}" ] ; then
-    echo "Assuming spack repo contains environment" >&2
-    env_repo_dir=${original_directory}
-else
-    echo "Cloning ${SPACK_RELEASE_ENVIRONMENT_REPO} into ${temp_dir}/envrepo" >&2
-    cd ${temp_dir}
-    git clone ${SPACK_RELEASE_ENVIRONMENT_REPO} envrepo
-    cd envrepo
-    env_repo_dir=$(pwd)
-fi
-
-current_branch="$CI_COMMIT_REF_NAME"
-
-# Because want to see generated gitlab-ci file as an artifact,
-# we need to write it within the spack repo cloned by gitlab-ci.
-gen_ci_dir="${original_directory}/ci-generation"
-gen_ci_file="${gen_ci_dir}/.gitlab-ci.yml"
-mkdir -p ${gen_ci_dir}
-
-env_dir="${env_repo_dir}/${SPACK_RELEASE_ENVIRONMENT_PATH}"
-
-if [ ! -f "${env_dir}/spack.yaml" ] ; then
-    echo "ERROR: Cannot find spack environment file in ${env_dir}"
-    exit 1
-fi
-
-cd $env_dir
-
-# The next commands generates the .gitlab-ci.yml (and optionally creates a
-# buildgroup in cdash)
-RELEASE_JOBS_ARGS=("--output-file" "${gen_ci_file}")
-if [ ! -z "${token_file}" ]; then
-    RELEASE_JOBS_ARGS+=("--cdash-credentials" "${token_file}")
-fi
-
-spack release-jobs "${RELEASE_JOBS_ARGS[@]}"
-
-if [[ $? -ne 0 ]]; then
-    echo "spack release-jobs command failed"
-    exit 1
-fi
-
-cd "$original_directory"
-mv .git "$temp_dir/original-git-dir"
-git init .
-
-git config user.email "robot@spack.io"
-git config user.name "Spack Build Bot"
-
-cp ${gen_ci_file} "${original_directory}/.gitlab-ci.yml"
-git add .
-
-echo "git commit"
-commit_message="Auto-generated commit testing"
-commit_message="${commit_message} ${current_branch} (${CI_COMMIT_SHA})"
-git commit --message="${commit_message}"
-
-echo "git push"
-git remote add origin "$DOWNSTREAM_CI_REPO"
-git push --force origin "master:multi-ci-${current_branch}"
-
-rm -rf .git
-mv "$temp_dir/original-git-dir" .git
-git reset --hard HEAD
diff --git a/bin/rebuild-index.sh b/bin/rebuild-index.sh
deleted file mode 100755
index c4811e1f29..0000000000
--- a/bin/rebuild-index.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-
-# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-set -x
-
-SPACK_BIN_DIR="${CI_PROJECT_DIR}/bin"
-export PATH="${SPACK_BIN_DIR}:${PATH}"
-
-spack buildcache update-index -d "$MIRROR_URL"
diff --git a/bin/rebuild-package.sh b/bin/rebuild-package.sh
deleted file mode 100755
index 747cbd9875..0000000000
--- a/bin/rebuild-package.sh
+++ /dev/null
@@ -1,399 +0,0 @@
-#!/bin/bash
-
-# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-###
-### This script represents a gitlab-ci job, corresponding to a single release
-### spec.  As such this script must first decide whether or not the spec it
-### has been assigned is up to date on the remote binary mirror.  If it is
-### not (i.e. the source code has changed in a way that caused a change in the
-### full_hash of the spec), this script will build the package, create a
-### binary cache for it, and then push all related files to the remote binary
-### mirror.  This script also optionally communicates with a remote CDash
-### instance to share status on the package build process.
-###
-### The following environment variables are (possibly) used within this script
-### in order for the various elements function properly.
-###
-### First are two defaults we rely on from gitlab:
-###
-### CI_PROJECT_DIR
-### CI_JOB_NAME
-###
-### The following must be set up in the variables section of gitlab:
-###
-### AWS_ACCESS_KEY_ID
-### AWS_SECRET_ACCESS_KEY
-### SPACK_SIGNING_KEY
-###
-### SPACK_S3_UPLOAD_MIRROR_URL         // only required in the short term for the cloud case
-###
-### The following variabes are defined by the ci generation process and are
-### required:
-###
-### SPACK_ENABLE_CDASH
-### SPACK_ROOT_SPEC
-### SPACK_MIRROR_URL
-### SPACK_JOB_SPEC_PKG_NAME
-### SPACK_COMPILER_ACTION
-###
-### Finally, these variables are optionally defined by the ci generation
-### process, and may or may not be present:
-###
-### SPACK_CDASH_BASE_URL
-### SPACK_CDASH_PROJECT
-### SPACK_CDASH_PROJECT_ENC
-### SPACK_CDASH_BUILD_NAME
-### SPACK_CDASH_SITE
-### SPACK_RELATED_BUILDS
-### SPACK_JOB_SPEC_BUILDGROUP
-###
-
-shopt -s expand_aliases
-
-export FORCE_UNSAFE_CONFIGURE=1
-
-TEMP_DIR="${CI_PROJECT_DIR}/jobs_scratch_dir"
-
-JOB_LOG_DIR="${TEMP_DIR}/logs"
-SPEC_DIR="${TEMP_DIR}/specs"
-LOCAL_MIRROR="${CI_PROJECT_DIR}/local_mirror"
-BUILD_CACHE_DIR="${LOCAL_MIRROR}/build_cache"
-SPACK_BIN_DIR="${CI_PROJECT_DIR}/bin"
-
-if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
-    CDASH_UPLOAD_URL="${SPACK_CDASH_BASE_URL}/submit.php?project=${SPACK_CDASH_PROJECT_ENC}"
-    DEP_JOB_RELATEBUILDS_URL="${SPACK_CDASH_BASE_URL}/api/v1/relateBuilds.php"
-    declare -a JOB_DEPS_PKG_NAMES
-fi
-
-export SPACK_ROOT=${CI_PROJECT_DIR}
-# export PATH="${SPACK_BIN_DIR}:${PATH}"
-export GNUPGHOME="${CI_PROJECT_DIR}/opt/spack/gpg"
-
-. "${CI_PROJECT_DIR}/share/spack/setup-env.sh"
-
-mkdir -p ${JOB_LOG_DIR}
-mkdir -p ${SPEC_DIR}
-
-cleanup() {
-    set +x
-
-    if [ -z "$exit_code" ] ; then
-
-        exit_code=$1
-        if [ -z "$exit_code" ] ; then
-            exit_code=0
-        fi
-
-        restore_io
-
-        if [ "$( type -t finalize )" '=' 'function' ] ; then
-            finalize "$JOB_LOG_DIR/cdash_log.txt"
-        fi
-
-        # We can clean these out later on, once we have a good sense for
-        # how the logging infrastructure is working
-        # rm -rf "$JOB_LOG_DIR"
-    fi
-
-    \exit $exit_code
-}
-
-alias exit='cleanup'
-
-begin_logging() {
-    trap "cleanup 1; \\exit \$exit_code" INT TERM QUIT
-    trap "cleanup 0; \\exit \$exit_code" EXIT
-
-    rm -rf "$JOB_LOG_DIR/cdash_log.txt"
-
-    # NOTE: Here, some redirects are set up
-    exec 3>&1 # fd 3 is now a dup of stdout
-    exec 4>&2 # fd 4 is now a dup of stderr
-
-    # stdout and stderr are joined and redirected to the log
-    exec &> "$JOB_LOG_DIR/cdash_log.txt"
-
-    set -x
-}
-
-restore_io() {
-    exec  >&-
-    exec 2>&-
-
-    exec  >&3
-    exec 2>&4
-
-    exec 3>&-
-    exec 4>&-
-}
-
-finalize() {
-    # If you define a finalize function:
-    #  - it will always be called at the very end of the script
-    #  - the log file will be passed in as the first argument, and
-    #  - the code in this function will not be logged.
-    echo "The full log file is located at $1"
-    # TODO: send this log data to cdash!
-}
-
-check_error()
-{
-    local last_exit_code=$1
-    local last_cmd=$2
-    if [[ ${last_exit_code} -ne 0 ]]; then
-        echo "${last_cmd} exited with code ${last_exit_code}"
-        echo "TERMINATING JOB"
-        exit 1
-    else
-        echo "${last_cmd} completed successfully"
-    fi
-}
-
-extract_build_id()
-{
-    LINES_TO_SEARCH=$1
-    regex="buildSummary\.php\?buildid=([[:digit:]]+)"
-    SINGLE_LINE_OUTPUT=$(echo ${LINES_TO_SEARCH} | tr -d '\n')
-
-    if [[ ${SINGLE_LINE_OUTPUT} =~ ${regex} ]]; then
-        echo "${BASH_REMATCH[1]}"
-    else
-        echo "NONE"
-    fi
-}
-
-get_relate_builds_post_data()
-{
-  cat <<EOF
-{
-  "project": "${1}",
-  "buildid": ${2},
-  "relatedid": ${3},
-  "relationship": "depends on"
-}
-EOF
-}
-
-gen_full_specs_for_job_and_deps() {
-    SPEC_YAML_PATH="${SPEC_DIR}/${SPACK_JOB_SPEC_PKG_NAME}.yaml"
-    local spec_names_to_save="${SPACK_JOB_SPEC_PKG_NAME}"
-
-    if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
-        IFS=';' read -ra DEPS <<< "${SPACK_RELATED_BUILDS}"
-        for i in "${DEPS[@]}"; do
-            depPkgName="${i}"
-            spec_names_to_save="${spec_names_to_save} ${depPkgName}"
-            JOB_DEPS_PKG_NAMES+=("${depPkgName}")
-        done
-    fi
-
-    if [ "${SPACK_COMPILER_ACTION}" == "FIND_ANY" ]; then
-        # This corresponds to a bootstrapping phase where we need to
-        # rely on any available compiler to build the package (i.e. the
-        # compiler needed to be stripped from the spec), and thus we need
-        # to concretize the root spec again.
-        spack -d buildcache save-yaml --specs "${spec_names_to_save}" --root-spec "${SPACK_ROOT_SPEC}" --yaml-dir "${SPEC_DIR}"
-    else
-        # in this case, either we're relying on Spack to install missing compiler
-        # bootstrapped in a previous phase, or else we only had one phase (like a
-        # site which already knows what compilers are available on it's runners),
-        # so we don't want to concretize that root spec again.  The reason we need
-        # this in the first case (bootstrapped compiler), is that we can't concretize
-        # a spec at this point if we're going to ask spack to "install_missing_compilers".
-        tmp_dir=$(mktemp -d)
-        TMP_YAML_PATH="${tmp_dir}/root.yaml"
-        ROOT_SPEC_YAML=$(spack python -c "import base64 ; import zlib ; print(str(zlib.decompress(base64.b64decode('${SPACK_ROOT_SPEC}')).decode('utf-8')))")
-        echo "${ROOT_SPEC_YAML}" > "${TMP_YAML_PATH}"
-        spack -d buildcache save-yaml --specs "${spec_names_to_save}" --root-spec-yaml "${TMP_YAML_PATH}" --yaml-dir "${SPEC_DIR}"
-        rm -rf ${tmp_dir}
-    fi
-}
-
-begin_logging
-
-echo "Running job for spec: ${CI_JOB_NAME}"
-
-# This should create the directory we referred to as GNUPGHOME earlier
-spack gpg list
-
-# Importing the secret key using gpg2 directly should allow to
-# sign and verify both
-set +x
-KEY_IMPORT_RESULT=`echo ${SPACK_SIGNING_KEY} | base64 --decode | gpg2 --import`
-check_error $? "gpg2 --import"
-set -x
-
-spack gpg list --trusted
-spack gpg list --signing
-
-# To have spack install missing compilers, we need to add a custom
-# configuration scope, then we pass that to the package installation
-# command
-CUSTOM_CONFIG_SCOPE_DIR="${TEMP_DIR}/config_scope"
-mkdir -p "${CUSTOM_CONFIG_SCOPE_DIR}"
-CUSTOM_CONFIG_SCOPE_ARG=""
-
-if [ "${SPACK_COMPILER_ACTION}" == "INSTALL_MISSING" ]; then
-    echo "Make sure bootstrapped compiler will be installed"
-    custom_config_file_path="${CUSTOM_CONFIG_SCOPE_DIR}/config.yaml"
-      cat <<CONFIG_STUFF > "${custom_config_file_path}"
-config:
-  install_missing_compilers: true
-CONFIG_STUFF
-    CUSTOM_CONFIG_SCOPE_ARG="-C ${CUSTOM_CONFIG_SCOPE_DIR}"
-    # Configure the binary mirror where, if needed, this jobs compiler
-    # was installed in binary pacakge form, then tell spack to
-    # install_missing_compilers.
-elif [ "${SPACK_COMPILER_ACTION}" == "FIND_ANY" ]; then
-    echo "Just find any available compiler"
-    spack compiler find
-else
-    echo "No compiler action to be taken"
-fi
-
-# Finally, list the compilers spack knows about
-echo "Compiler Configurations:"
-spack config get compilers
-
-# Write full-deps yamls for this job spec and its dependencies
-gen_full_specs_for_job_and_deps
-
-# Make the build_cache directory if it doesn't exist
-mkdir -p "${BUILD_CACHE_DIR}"
-
-# Get buildcache name so we can write a CDash build id file in the right place.
-# If we're unable to get the buildcache name, we may have encountered a problem
-# concretizing the spec, or some other issue that will eventually cause the job
-# to fail.
-JOB_BUILD_CACHE_ENTRY_NAME=`spack -d buildcache get-buildcache-name --spec-yaml "${SPEC_YAML_PATH}"`
-if [[ $? -ne 0 ]]; then
-    echo "ERROR, unable to get buildcache entry name for job ${CI_JOB_NAME}"
-    exit 1
-fi
-
-if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
-    # Whether we have to build the spec or download it pre-built, we expect to find
-    # the cdash build id file sitting in this location afterwards.
-    JOB_CDASH_ID_FILE="${BUILD_CACHE_DIR}/${JOB_BUILD_CACHE_ENTRY_NAME}.cdashid"
-fi
-
-# Finally, we can check the spec we have been tasked with build against
-# the built binary on the remote mirror to see if it needs to be rebuilt
-spack -d buildcache check --spec-yaml "${SPEC_YAML_PATH}" --mirror-url "${SPACK_MIRROR_URL}" --rebuild-on-error
-
-if [[ $? -ne 0 ]]; then
-    # Configure mirror
-    spack mirror add local_artifact_mirror "file://${LOCAL_MIRROR}"
-
-    if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
-        JOB_CDASH_ID="NONE"
-
-        # Install package, using the buildcache from the local mirror to
-        # satisfy dependencies.
-        BUILD_ID_LINE=`spack -d -k -v "${CUSTOM_CONFIG_SCOPE_ARG}" install --keep-stage --cdash-upload-url "${CDASH_UPLOAD_URL}" --cdash-build "${SPACK_CDASH_BUILD_NAME}" --cdash-site "${SPACK_CDASH_SITE}" --cdash-track "${SPACK_JOB_SPEC_BUILDGROUP}" -f "${SPEC_YAML_PATH}" | grep "buildSummary\\.php"`
-        check_error $? "spack install"
-
-        # By parsing the output of the "spack install" command, we can get the
-        # buildid generated for us by CDash
-        JOB_CDASH_ID=$(extract_build_id "${BUILD_ID_LINE}")
-
-        # Write the .cdashid file to the buildcache as well
-        echo "${JOB_CDASH_ID}" >> ${JOB_CDASH_ID_FILE}
-    else
-        spack -d -k -v "${CUSTOM_CONFIG_SCOPE_ARG}" install --keep-stage -f "${SPEC_YAML_PATH}"
-    fi
-
-    # Copy some log files into an artifact location, once we have a way
-    # to provide a spec.yaml file to more spack commands (e.g. "location")
-    # stage_dir=$(spack location --stage-dir -f "${SPEC_YAML_PATH}")
-    # build_log_file=$(find -L "${stage_dir}" | grep "spack-build\\.out")
-    # config_log_file=$(find -L "${stage_dir}" | grep "config\\.log")
-    # cp "${build_log_file}" "${JOB_LOG_DIR}/"
-    # cp "${config_log_file}" "${JOB_LOG_DIR}/"
-
-    # Create buildcache entry for this package, reading the spec from the yaml
-    # file.
-    spack -d buildcache create --spec-yaml "${SPEC_YAML_PATH}" -a -f -d "${LOCAL_MIRROR}" --no-rebuild-index
-    check_error $? "spack buildcache create"
-
-    # TODO: The upload-s3 command should eventually be replaced with something
-    # like: "spack buildcache put <mirror> <spec>", when that subcommand is
-    # properly implemented.
-    if [ ! -z "${SPACK_S3_UPLOAD_MIRROR_URL}" ] ; then
-        spack -d upload-s3 spec --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}" --endpoint-url "${SPACK_S3_UPLOAD_MIRROR_URL}"
-        check_error $? "spack upload-s3 spec"
-    else
-        spack -d buildcache copy --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}" --destination-url "${SPACK_MIRROR_URL}"
-    fi
-else
-    echo "spec ${CI_JOB_NAME} is already up to date on remote mirror, downloading it"
-
-    # Configure remote mirror so we can download buildcache entry
-    spack mirror add remote_binary_mirror ${SPACK_MIRROR_URL}
-
-    # Now download it
-    BUILDCACHE_DL_ARGS=("--spec-yaml" "${SPEC_YAML_PATH}" "--path" "${BUILD_CACHE_DIR}/" )
-    if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
-        BUILDCACHE_DL_ARGS+=( "--require-cdashid" )
-    fi
-    spack -d buildcache download "${BUILDCACHE_DL_ARGS[@]}"
-    check_error $? "spack buildcache download"
-fi
-
-# The next step is to relate this job to the jobs it depends on
-if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
-    if [ -f "${JOB_CDASH_ID_FILE}" ]; then
-        JOB_CDASH_BUILD_ID=$(<${JOB_CDASH_ID_FILE})
-
-        if [ "${JOB_CDASH_BUILD_ID}" == "NONE" ]; then
-            echo "ERROR: unable to read this jobs id from ${JOB_CDASH_ID_FILE}"
-            exit 1
-        fi
-
-        # Now get CDash ids for dependencies and "relate" each dependency build
-        # with this jobs build
-        for DEP_PKG_NAME in "${JOB_DEPS_PKG_NAMES[@]}"; do
-            echo "Getting cdash id for dependency --> ${DEP_PKG_NAME} <--"
-            DEP_SPEC_YAML_PATH="${SPEC_DIR}/${DEP_PKG_NAME}.yaml"
-            DEP_JOB_BUILDCACHE_NAME=`spack -d buildcache get-buildcache-name --spec-yaml "${DEP_SPEC_YAML_PATH}"`
-
-            if [[ $? -eq 0 ]]; then
-                DEP_JOB_ID_FILE="${BUILD_CACHE_DIR}/${DEP_JOB_BUILDCACHE_NAME}.cdashid"
-                echo "DEP_JOB_ID_FILE path = ${DEP_JOB_ID_FILE}"
-
-                if [ -f "${DEP_JOB_ID_FILE}" ]; then
-                    DEP_JOB_CDASH_BUILD_ID=$(<${DEP_JOB_ID_FILE})
-                    echo "File ${DEP_JOB_ID_FILE} contained value ${DEP_JOB_CDASH_BUILD_ID}"
-                    echo "Relating builds -> ${SPACK_CDASH_BUILD_NAME} (buildid=${JOB_CDASH_BUILD_ID}) depends on ${DEP_PKG_NAME} (buildid=${DEP_JOB_CDASH_BUILD_ID})"
-                    relateBuildsPostBody="$(get_relate_builds_post_data "${SPACK_CDASH_PROJECT}" ${JOB_CDASH_BUILD_ID} ${DEP_JOB_CDASH_BUILD_ID})"
-                    relateBuildsResult=`curl "${DEP_JOB_RELATEBUILDS_URL}" -H "Content-Type: application/json" -H "Accept: application/json" -d "${relateBuildsPostBody}"`
-                    echo "Result of curl request: ${relateBuildsResult}"
-                else
-                    echo "ERROR: Did not find expected .cdashid file for dependency: ${DEP_JOB_ID_FILE}"
-                    exit 1
-                fi
-            else
-                echo "ERROR: Unable to get buildcache entry name for ${DEP_SPEC_NAME}"
-                exit 1
-            fi
-        done
-    else
-        echo "ERROR: Did not find expected .cdashid file ${JOB_CDASH_ID_FILE}"
-        exit 1
-    fi
-fi
-
-# Show the size of the buildcache and a list of what's in it, directly
-# in the gitlab log output
-(
-    restore_io
-    du -sh ${BUILD_CACHE_DIR}
-    find ${BUILD_CACHE_DIR} -maxdepth 3 -type d -ls
-)
-
-echo "End of rebuild package script"
diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst
index a8534ec1bf..489c15645a 100644
--- a/lib/spack/docs/index.rst
+++ b/lib/spack/docs/index.rst
@@ -74,6 +74,7 @@ or refer to the full manual below.
    package_list
    chain
    extensions
+   pipelines
 
 .. toctree::
    :maxdepth: 2
diff --git a/lib/spack/docs/pipelines.rst b/lib/spack/docs/pipelines.rst
new file mode 100644
index 0000000000..f70b39a16d
--- /dev/null
+++ b/lib/spack/docs/pipelines.rst
@@ -0,0 +1,439 @@
+.. Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+   Spack Project Developers. See the top-level COPYRIGHT file for details.
+
+   SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+.. _pipelines:
+
+=========
+Pipelines
+=========
+
+Spack provides commands that support generating and running automated build
+pipelines designed for Gitlab CI.  At the highest level it works like this:
+provide a spack environment describing the set of packages you care about,
+and include within that environment file a description of how those packages
+should be mapped to Gitlab runners.  Spack can then generate a ``.gitlab-ci.yml``
+file containing job descriptions for all your packages that can be run by a
+properly configured Gitlab CI instance.  When run, the generated pipeline will
+build and deploy binaries, and it can optionally report to a CDash instance
+regarding the health of the builds as they evolve over time.
+
+------------------------------
+Getting started with pipelines
+------------------------------
+
+It is fairly straightforward to get started with automated build pipelines.  At
+a minimum, you'll need to set up a Gitlab instance (more about Gitlab CI
+`here <https://about.gitlab.com/product/continuous-integration/>`_) and configure
+at least one `runner <https://docs.gitlab.com/runner/>`_.  Then the basic steps
+for setting up a build pipeline are as follows:
+
+#. Create a repository on your gitlab instance
+#. Add a ``spack.yaml`` at the root containing your pipeline environment (see
+   below for details)
+#. Add a ``.gitlab-ci.yml`` at the root containing a single job, similar to
+   this one:
+
+   .. code-block:: yaml
+
+      pipeline-job:
+        tags:
+          - <custom-tag>
+          ...
+        script:
+          - spack ci start
+
+#. Add any secrets required by the CI process to environment variables using the
+   CI web ui
+#. Push a commit containing the ``spack.yaml`` and ``.gitlab-ci.yml`` mentioned above
+   to the gitlab repository
+
+The ``<custom-tag>``, above, is used to pick one of your configured runners,
+while the use of the ``spack ci start`` command implies that runner has an
+appropriate version of spack installed and configured for use.  Of course, there
+are myriad ways to customize the process.  You can configure CDash reporting
+on the progress of your builds, set up S3 buckets to mirror binaries built by
+the pipeline, clone a custom spack repository/ref for use by the pipeline, and
+more.
+
+While it is possible to set up pipelines on gitlab.com, the builds there are
+limited to 60 minutes and generic hardware.  It is also possible to
+`hook up <https://about.gitlab.com/blog/2018/04/24/getting-started-gitlab-ci-gcp>`_
+Gitlab to Google Kubernetes Engine (`GKE <https://cloud.google.com/kubernetes-engine/>`_)
+or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), though those
+topics are outside the scope of this document.
+
+-----------------------------------
+Spack commands supporting pipelines
+-----------------------------------
+
+Spack provides a command `ci` with sub-commands for doing various things related
+to automated build pipelines.  All of the ``spack ci ...`` commands must be run
+from within a environment, as each one makes use of the environment for different
+purposes.  Additionally, some options to the commands (or conditions present in
+the spack environment file) may require particular environment variables to be
+set in order to function properly.  Examples of these are typically secrets
+needed for pipeline operation that should not be visible in a spack environment
+file.  These environment variables are described in more detail
+:ref:`ci_environment_variables`.
+
+.. _cmd_spack_ci:
+
+^^^^^^^^^^^^^^^^^^
+``spack ci``
+^^^^^^^^^^^^^^^^^^
+
+Super-command for functionality related to generating pipelines and executing
+pipeline jobs.
+
+.. _cmd_spack_ci_start:
+
+^^^^^^^^^^^^^^^^^^
+``spack ci start``
+^^^^^^^^^^^^^^^^^^
+
+Currently this command is a short-cut to first run ``spack ci generate``, followed
+by ``spack ci pushyaml``.
+
+.. _cmd_spack_ci_generate:
+
+^^^^^^^^^^^^^^^^^^^^^
+``spack ci generate``
+^^^^^^^^^^^^^^^^^^^^^
+
+Concretizes the specs in the active environment, stages them (as described in
+:ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk.
+
+.. _cmd_spack_ci_pushyaml:
+
+^^^^^^^^^^^^^^^^^^^^^
+``spack ci pushyaml``
+^^^^^^^^^^^^^^^^^^^^^
+
+Generates a commit containing the generated ``.gitlab-ci.yml`` and pushes it to a
+``DOWNSTREAM_CI_REPO``, which is frequently the same repository.  The branch
+created has the same name as the current branch being tested, but has ``multi-ci-``
+prepended to the branch name.  Once Gitlab CI has full support for dynamically
+defined workloads, this command will be deprecated.
+
+.. _cmd_spack_ci_rebuild:
+
+^^^^^^^^^^^^^^^^^^^^
+``spack ci rebuild``
+^^^^^^^^^^^^^^^^^^^^
+
+This sub-command is responsible for ensuring a single spec from the release
+environment is up to date on the remote mirror configured in the environment,
+and as such, corresponds to a single job in the ``.gitlab-ci.yml`` file.
+
+------------------------------------
+A pipeline-enabled spack environment
+------------------------------------
+
+Here's an example of a spack environment file that has been enhanced with
+sections desribing a build pipeline:
+
+.. code-block:: yaml
+
+   spack:
+     definitions:
+     - pkgs:
+       - readline@7.0
+     - compilers:
+       - '%gcc@5.5.0'
+     - oses:
+       - os=ubuntu18.04
+       - os=centos7
+     specs:
+     - matrix:
+       - [$pkgs]
+       - [$compilers]
+       - [$oses]
+     mirrors:
+       cloud_gitlab: https://mirror.spack.io
+     gitlab-ci:
+       mappings:
+         - match:
+             - os=ubuntu18.04
+           runner-attributes:
+             tags:
+               - spack-k8s
+             image: spack/spack_builder_ubuntu_18.04
+         - match:
+             - os=centos7
+           runner-attributes:
+             tags:
+               - spack-k8s
+             image: spack/spack_builder_centos_7
+     cdash:
+       build-group: Release Testing
+       url: https://cdash.spack.io
+       project: Spack
+       site: Spack AWS Gitlab Instance
+
+Hopefully, the ``definitions``, ``specs``, ``mirrors``, etc. sections are already
+familiar, as they are part of spack :ref:`environments`.  So let's take a more
+in-depth look some of the pipeline-related sections in that environment file
+that might not be as familiar.
+
+The ``gitlab-ci`` section is used to configure how the pipeline workload should be
+generated, mainly how the jobs for building specs should be assigned to the
+configured runners on your instance.  Each entry within the list of ``mappings``
+corresponds to a known gitlab runner, where the ``match`` section is used
+in assigning a release spec to one of the runners, and the ``runner-attributes``
+section is used to configure the spec/job for that particular runner.
+
+There are other pipeline options you can configure within the ``gitlab-ci`` section
+as well.  The ``bootstrap`` section allows you to specify lists of specs from
+your ``definitions`` that should be staged ahead of the environment's ``specs`` (this
+section is described in more detail below).  The ``enable-artifacts-buildcache`` key
+takes a boolean and determines whether the pipeline uses artifacts to store and
+pass along the buildcaches from one stage to the next (the default if you don't
+provide this option is ``False``).  The ``enable-debug-messages`` key takes a boolean
+and allows you to choose whether the pipeline build jobs are run as ``spack -d ci rebuild``
+or just ``spack ci rebuild`` (the default is not to enable debug messages).  The
+``final-stage-rebuild-index`` section controls whether an extra job is added to the
+end of your pipeline (in a stage by itself) which will regenerate the mirror's
+buildcache index.  Under normal operation, each pipeline job that rebuilds a package
+will re-generate the mirror's buildcache index after the buildcache entry for that
+job has been created and pushed to the mirror.  Since jobs in the same stage can run in
+parallel, there is the possibility that at the end of some stage, the index may not
+reflect all the binaries in the buildcache.  Adding the ``final-stage-rebuild-index``
+section ensures that at the end of the pipeline, the index will be in sync with the
+binaries on the mirror.  If the mirror lives in an S3 bucket, this job will need to
+run on a machine with the Python ``boto3`` module installed, and consequently the
+``final-stage-rebuild-index`` needs to specify a list of ``tags`` to pick a runner
+satisfying that condition.  It can also take an ``image`` key so Docker executor type
+runners can pick the right image for the index regeneration job.
+
+The optional ``cdash`` section provides information that will be used by the
+``spack ci generate`` command (invoked by ``spack ci start``) for reporting
+to CDash.  All the jobs generated from this environment will belong to a
+"build group" within CDash that can be tracked over time.  As the release
+progresses, this build group may have jobs added or removed. The url, project,
+and site are used to specify the CDash instance to which build results should
+be reported.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Assignment of specs to runners
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ``mappings`` section corresponds to a list of runners, and during assignment
+of specs to runners, the list is traversed in order looking for matches, the
+first runner that matches a release spec is assigned to build that spec.  The
+``match`` section within each runner mapping section is a list of specs, and
+if any of those specs match the release spec (the ``spec.satisfies()`` method
+is used), then that runner is considered a match.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Configuration of specs/jobs for a runner
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Once a runner has been chosen to build a release spec, the ``runner-attributes``
+section provides information determining details of the job in the context of
+the runner.  The ``runner-attributes`` section must have a ``tags`` key, which
+is a list containing at least one tag used to select the runner from among the
+runners known to the gitlab instance.  For Docker executor type runners, the
+``image`` key is used to specify the Docker image used to build the release spec
+(and could also appear as a dictionary with a ``name`` specifying the image name,
+as well as an ``entrypoint`` to override whatever the default for that image is).
+For other types of runners the ``variables`` key will be useful to pass any
+information on to the runner that it needs to do its work (e.g. scheduler
+parameters, etc.).
+
+.. _staging_algorithm:
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Summary of ``.gitlab-ci.yml`` generation algorithm
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+All specs yielded by the matrix (or all the specs in the environment) have their
+dependencies computed, and the entire resulting set of specs are staged together
+before being run through the ``gitlab-ci/mappings`` entries, where each staged
+spec is assigned a runner.  "Staging" is the name we have given to the process
+of figuring out in what order the specs should be built, taking into consideration
+Gitlab CI rules about jobs/stages.  In the staging process the goal is to maximize
+the number of jobs in any stage of the pipeline, while ensuring that the jobs in
+any stage only depend on jobs in previous stages (since those jobs are guaranteed
+to have completed already).  As a runner is determined for a job, the information
+in the ``runner-attributes`` is used to populate various parts of the job
+description that will be used by Gitlab CI. Once all the jobs have been assigned
+a runner, the ``.gitlab-ci.yml`` is written to disk.
+
+The short example provided above would result in the ``readline``, ``ncurses``,
+and ``pkgconf`` packages getting staged and built on the runner chosen by the
+``spack-k8s`` tag.  In this example, we assume the runner is a Docker executor
+type runner, and thus certain jobs will be run in the ``centos7`` container,
+and others in the ``ubuntu-18.04`` container.  The resulting ``.gitlab-ci.yml``
+will contain 6 jobs in three stages.  Once the jobs have been generated, the
+presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
+``spack ci generate`` command would result in all of the jobs being put in a
+build group on CDash called "Release Testing" (that group will be created if
+it didn't already exist).
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Optional compiler bootstrapping
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Spack pipelines also have support for bootstrapping compilers on systems that
+may not already have the desired compilers installed. The idea here is that
+you can specify a list of things to bootstrap in your ``definitions``, and
+spack will guarantee those will be installed in a phase of the pipeline before
+your release specs, so that you can rely on those packages being available in
+the binary mirror when you need them later on in the pipeline.  At the moment
+the only viable use-case for bootstrapping is to install compilers.
+
+Here's an example of what bootstrapping some compilers might look like:
+
+.. code-block:: yaml
+
+   spack:
+     definitions:
+     - compiler-pkgs:
+       - 'llvm+clang@6.0.1 os=centos7'
+       - 'gcc@6.5.0 os=centos7'
+       - 'llvm+clang@6.0.1 os=ubuntu18.04'
+       - 'gcc@6.5.0 os=ubuntu18.04'
+     - pkgs:
+       - readline@7.0
+     - compilers:
+       - '%gcc@5.5.0'
+       - '%gcc@6.5.0'
+       - '%gcc@7.3.0'
+       - '%clang@6.0.0'
+       - '%clang@6.0.1'
+     - oses:
+       - os=ubuntu18.04
+       - os=centos7
+     specs:
+     - matrix:
+       - [$pkgs]
+       - [$compilers]
+       - [$oses]
+       exclude:
+         - '%gcc@7.3.0 os=centos7'
+         - '%gcc@5.5.0 os=ubuntu18.04'
+     gitlab-ci:
+       bootstrap:
+         - name: compiler-pkgs
+           compiler-agnostic: true
+       mappings:
+         # mappings similar to the example higher up in this description
+         ...
+
+In the example above, we have added a list to the ``definitions`` called
+``compiler-pkgs`` (you can add any number of these), which lists compiler packages
+we want to be staged ahead of the full matrix of release specs (which consists
+only of readline in our example).  Then within the ``gitlab-ci`` section, we
+have added a ``bootstrap`` section, which can contain a list of items, each
+referring to a list in the ``definitions`` section.  These items can either
+be a dictionary or a string.  If you supply a dictionary, it must have a name
+key whose value must match one of the lists in definitions and it can have a
+``compiler-agnostic`` key whose value is a boolean.  If you supply a string,
+then it needs to match one of the lists provided in ``definitions``.  You can
+think of the bootstrap list as an ordered list of pipeline "phases" that will
+be staged before your actual release specs.  While this introduces another
+layer of bottleneck in the pipeline (all jobs in all stages of one phase must
+complete before any jobs in the next phase can begin), it also means you are
+guaranteed your bootstrapped compilers will be available when you need them.
+
+The ``compiler-agnostic`` key can be provided with each item in the
+bootstrap list. It tells the ``spack ci generate`` command that any jobs staged
+from that particular list should have the compiler removed from the spec, so
+that any compiler available on the runner where the job is run can be used to
+build the package.
+
+When including a bootstrapping phase as in the example above, the result is that
+the bootstrapped compiler packages will be pushed to the binary mirror (and the
+local artifacts mirror) before the actual release specs are built. In this case,
+the jobs corresponding to subsequent release specs are configured to
+``install_missing_compilers``, so that if spack is asked to install a package
+with a compiler it doesn't know about, it can be quickly installed from the
+binary mirror first.
+
+Since bootstrapping compilers is optional, those items can be left out of the
+environment/stack file, and in that case no bootstrapping will be done (only the
+specs will be staged for building) and the runners will be expected to already
+have all needed compilers installed and configured for spack to use.
+
+-------------------------------------
+Using a custom spack in your pipeline
+-------------------------------------
+
+If your runners will not have a version of spack ready to invoke, or if for some
+other reason you want to use a custom version of spack to run your pipelines,
+this can be accomplished fairly simply.  First, create CI environment variables
+containing the url and branch/tag you want to clone (calling them, for example,
+``SPACK_REPO`` and ``SPACK_REF``), use them to clone spack in your pre-ci
+``before_script``, and finally pass those same values along to the workload
+generation process via the ``spack-repo`` and ``spack-ref`` cli args.  Here's
+an example:
+
+.. code-block:: yaml
+
+   pipeline-job:
+     tags:
+       - <some-other-tag>
+   before_script:
+     - git clone ${SPACK_REPO} --branch ${SPACK_REF}
+     - . ./spack/share/spack/setup-env.sh
+   script:
+     - spack ci start --spack-repo ${SPACK_REPO} --spack-ref ${SPACK_REF} <...args>
+   after_script:
+     - rm -rf ./spack
+
+If the ``spack ci start`` command receives those extra command line arguments,
+then it adds similar ``before_script`` and ``after_script`` sections for each of
+the ``spack ci rebuild`` jobs it generates (cloning and sourcing a custom
+spack in the ``before_script`` and removing it again in the ``after_script``).
+This gives you control over the version of spack used when the rebuild jobs
+are actually run on the gitlab runner.
+
+.. _ci_environment_variables:
+
+--------------------------------------------------
+Environment variables affecting pipeline operation
+--------------------------------------------------
+
+Certain secrets and some other information should be provided to the pipeline
+infrastructure via environment variables, usually for reasons of security, but
+in some cases to support other pipeline use cases such as PR testing.  The
+environment variables used by the pipeline infrastructure are described here.
+
+^^^^^^^^^^^^^^^^^
+AWS_ACCESS_KEY_ID
+^^^^^^^^^^^^^^^^^
+
+Needed when binary mirror is an S3 bucket.
+
+^^^^^^^^^^^^^^^^^^^^^
+AWS_SECRET_ACCESS_KEY
+^^^^^^^^^^^^^^^^^^^^^
+
+Needed when binary mirror is an S3 bucket.
+
+^^^^^^^^^^^^^^^
+S3_ENDPOINT_URL
+^^^^^^^^^^^^^^^
+
+Needed when binary mirror is an S3 bucket that is *not* on AWS.
+
+^^^^^^^^^^^^^^^^^
+CDASH_AUTH_TOKEN
+^^^^^^^^^^^^^^^^^
+
+Needed in order to report build groups to CDash.
+
+^^^^^^^^^^^^^^^^^
+SPACK_SIGNING_KEY
+^^^^^^^^^^^^^^^^^
+
+Needed to sign/verify binary packages from the remote binary mirror.
+
+^^^^^^^^^^^^^^^^^^
+DOWNSTREAM_CI_REPO
+^^^^^^^^^^^^^^^^^^
+
+Needed until Gitlab CI supports dynamic job generation.  Can contain connection
+credentials, and could be the same repository or a different one.
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 6f3efc2acf..37190a549c 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -803,10 +803,10 @@ def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
     try:
         _, _, yaml_file = web_util.read_from_url(file_path)
         yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
-    except URLError as url_err:
+    except (URLError, web_util.SpackWebError) as url_err:
         err_msg = [
             'Unable to determine whether {0} needs rebuilding,',
-            ' caught URLError attempting to read from {1}.',
+            ' caught exception attempting to read from {1}.',
         ]
         tty.error(''.join(err_msg).format(spec.short_spec, file_path))
         tty.debug(url_err)
@@ -908,11 +908,16 @@ def _download_buildcache_entry(mirror_root, descriptions):
     return True
 
 
-def download_buildcache_entry(file_descriptions):
-    if not spack.mirror.MirrorCollection():
-        tty.die("Please add a spack mirror to allow " +
+def download_buildcache_entry(file_descriptions, mirror_url=None):
+    if not mirror_url and not spack.mirror.MirrorCollection():
+        tty.die("Please provide or add a spack mirror to allow " +
                 "download of buildcache entries.")
 
+    if mirror_url:
+        mirror_root = os.path.join(
+            mirror_url, _build_cache_relative_path)
+        return _download_buildcache_entry(mirror_root, file_descriptions)
+
     for mirror in spack.mirror.MirrorCollection().values():
         mirror_root = os.path.join(
             mirror.fetch_url,
diff --git a/lib/spack/spack/cmd/release_jobs.py b/lib/spack/spack/ci.py
similarity index 57%
rename from lib/spack/spack/cmd/release_jobs.py
rename to lib/spack/spack/ci.py
index cdd3b10dc3..ed06524073 100644
--- a/lib/spack/spack/cmd/release_jobs.py
+++ b/lib/spack/spack/ci.py
@@ -4,7 +4,11 @@
 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
 
 import base64
+import datetime
 import json
+import os
+import shutil
+import tempfile
 import zlib
 
 from six import iteritems
@@ -14,31 +18,36 @@
 
 import llnl.util.tty as tty
 
-import spack.environment as ev
+import spack
+import spack.binary_distribution as bindist
+import spack.cmd.buildcache as buildcache
 import spack.compilers as compilers
+import spack.config as cfg
+import spack.environment as ev
 from spack.dependency import all_deptypes
 from spack.error import SpackError
 import spack.hash_types as ht
+from spack.main import SpackCommand
+import spack.repo
 from spack.spec import Spec
 import spack.util.spack_yaml as syaml
+import spack.util.web as web_util
 
-description = "generate release build set as .gitlab-ci.yml"
-section = "build"
-level = "long"
 
+spack_gpg = SpackCommand('gpg')
+spack_compiler = SpackCommand('compiler')
 
-def setup_parser(subparser):
-    subparser.add_argument(
-        '-o', '--output-file', default=".gitlab-ci.yml",
-        help="path to output file to write")
 
-    subparser.add_argument(
-        '-p', '--print-summary', action='store_true', default=False,
-        help="Print summary of staged jobs to standard output")
+class TemporaryDirectory(object):
+    def __init__(self):
+        self.temporary_directory = tempfile.mkdtemp()
 
-    subparser.add_argument(
-        '--cdash-credentials', default=None,
-        help="Path to file containing CDash authentication token")
+    def __enter__(self):
+        return self.temporary_directory
+
+    def __exit__(self, exc_type, exc_value, exc_traceback):
+        shutil.rmtree(self.temporary_directory)
+        return False
 
 
 def _create_buildgroup(opener, headers, url, project, group_name, group_type):
@@ -131,6 +140,10 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
     format_args.append(spec.name)
     item_idx += 1
 
+    format_str += '/{{{0}}}'.format(item_idx)
+    format_args.append(spec.dag_hash(7))
+    item_idx += 1
+
     format_str += ' {{{0}}}'.format(item_idx)
     format_args.append(spec.version)
     item_idx += 1
@@ -368,6 +381,10 @@ def append_dep(s, d):
         rkey, rlabel = spec_deps_key_label(spec)
 
         for s in spec.traverse(deptype=deptype):
+            if s.external:
+                tty.msg('Will not stage external pkg: {0}'.format(s))
+                continue
+
             skey, slabel = spec_deps_key_label(s)
             spec_labels[slabel] = {
                 'spec': get_spec_string(s),
@@ -377,6 +394,10 @@ def append_dep(s, d):
 
             for d in s.dependencies(deptype=deptype):
                 dkey, dlabel = spec_deps_key_label(d)
+                if d.external:
+                    tty.msg('Will not stage external dep: {0}'.format(d))
+                    continue
+
                 append_dep(slabel, dlabel)
 
     for l, d in spec_labels.items():
@@ -406,15 +427,28 @@ def find_matching_config(spec, ci_mappings):
     return None
 
 
-def release_jobs(parser, args):
-    env = ev.get_env(args, 'release-jobs', required=True)
+def pkg_name_from_spec_label(spec_label):
+    return spec_label[:spec_label.index('/')]
+
+
+def generate_gitlab_ci_yaml(env, print_summary, output_file,
+                            custom_spack_repo=None, custom_spack_ref=None):
+    # FIXME: What's the difference between one that opens with 'spack'
+    # and one that opens with 'env'?  This will only handle the former.
+    with spack.concretize.disable_compiler_existence_check():
+        env.concretize()
 
     yaml_root = ev.config_dict(env.yaml)
 
     if 'gitlab-ci' not in yaml_root:
         tty.die('Environment yaml does not have "gitlab-ci" section')
 
-    ci_mappings = yaml_root['gitlab-ci']['mappings']
+    gitlab_ci = yaml_root['gitlab-ci']
+    ci_mappings = gitlab_ci['mappings']
+
+    final_job_config = None
+    if 'final-stage-rebuild-index' in gitlab_ci:
+        final_job_config = gitlab_ci['final-stage-rebuild-index']
 
     build_group = None
     enable_cdash_reporting = False
@@ -426,23 +460,40 @@ def release_jobs(parser, args):
         build_group = ci_cdash['build-group']
         cdash_url = ci_cdash['url']
         cdash_project = ci_cdash['project']
-        proj_enc = urlencode({'project': cdash_project})
-        eq_idx = proj_enc.find('=') + 1
-        cdash_project_enc = proj_enc[eq_idx:]
         cdash_site = ci_cdash['site']
 
-        if args.cdash_credentials:
-            with open(args.cdash_credentials) as fd:
-                cdash_auth_token = fd.read()
-                cdash_auth_token = cdash_auth_token.strip()
+        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
+            tty.verbose("Using CDash auth token from environment")
+            cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')
+
+    # Make sure we use a custom spack if necessary
+    before_script = None
+    after_script = None
+    if custom_spack_repo:
+        if not custom_spack_ref:
+            custom_spack_ref = 'master'
+        before_script = [
+            ('git clone "{0}" --branch "{1}" --depth 1 '
+             '--single-branch'.format(custom_spack_repo, custom_spack_ref)),
+            # Next line just shows spack version in pipeline output
+            'pushd ./spack && git rev-parse HEAD && popd',
+            '. "./spack/share/spack/setup-env.sh"',
+        ]
+        after_script = [
+            'rm -rf "./spack"'
+        ]
 
     ci_mirrors = yaml_root['mirrors']
     mirror_urls = [url for url in ci_mirrors.values()]
 
+    enable_artifacts_buildcache = False
+    if 'enable-artifacts-buildcache' in gitlab_ci:
+        enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']
+
     bootstrap_specs = []
     phases = []
-    if 'bootstrap' in yaml_root['gitlab-ci']:
-        for phase in yaml_root['gitlab-ci']['bootstrap']:
+    if 'bootstrap' in gitlab_ci:
+        for phase in gitlab_ci['bootstrap']:
             try:
                 phase_name = phase.get('name')
                 strip_compilers = phase.get('compiler-agnostic')
@@ -469,9 +520,11 @@ def release_jobs(parser, args):
     staged_phases = {}
     for phase in phases:
         phase_name = phase['name']
-        staged_phases[phase_name] = stage_spec_jobs(env.spec_lists[phase_name])
+        with spack.concretize.disable_compiler_existence_check():
+            staged_phases[phase_name] = stage_spec_jobs(
+                env.spec_lists[phase_name])
 
-    if args.print_summary:
+    if print_summary:
         for phase in phases:
             phase_name = phase['name']
             tty.msg('Stages for phase "{0}"'.format(phase_name))
@@ -498,10 +551,12 @@ def release_jobs(parser, args):
             stage_id += 1
 
             for spec_label in stage_jobs:
-                release_spec = spec_labels[spec_label]['spec']
                 root_spec = spec_labels[spec_label]['rootSpec']
+                pkg_name = pkg_name_from_spec_label(spec_label)
+                release_spec = root_spec[pkg_name]
 
-                runner_attribs = find_matching_config(root_spec, ci_mappings)
+                runner_attribs = find_matching_config(
+                    release_spec, ci_mappings)
 
                 if not runner_attribs:
                     tty.warn('No match found for {0}, skipping it'.format(
@@ -529,7 +584,11 @@ def release_jobs(parser, args):
                 job_name = get_job_name(phase_name, strip_compilers,
                                         release_spec, osname, build_group)
 
-                job_scripts = ['./bin/rebuild-package.sh']
+                debug_flag = ''
+                if 'enable-debug-messages' in gitlab_ci:
+                    debug_flag = '-d '
+
+                job_scripts = ['spack {0}ci rebuild'.format(debug_flag)]
 
                 compiler_action = 'NONE'
                 if len(phases) > 1:
@@ -538,7 +597,6 @@ def release_jobs(parser, args):
                         compiler_action = 'INSTALL_MISSING'
 
                 job_vars = {
-                    'SPACK_MIRROR_URL': mirror_urls[0],
                     'SPACK_ROOT_SPEC': format_root_spec(
                         root_spec, main_phase, strip_compilers),
                     'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
@@ -547,11 +605,13 @@ def release_jobs(parser, args):
 
                 job_dependencies = []
                 if spec_label in dependencies:
-                    job_dependencies = (
-                        [get_job_name(phase_name, strip_compilers,
-                                      spec_labels[dep_label]['spec'],
-                                      osname, build_group)
-                            for dep_label in dependencies[spec_label]])
+                    for dep_label in dependencies[spec_label]:
+                        dep_pkg = pkg_name_from_spec_label(dep_label)
+                        dep_spec = spec_labels[dep_label]['rootSpec'][dep_pkg]
+                        dep_job_name = get_job_name(
+                            phase_name, strip_compilers, dep_spec, osname,
+                            build_group)
+                        job_dependencies.append(dep_job_name)
 
                 # This next section helps gitlab make sure the right
                 # bootstrapped compiler exists in the artifacts buildcache by
@@ -585,34 +645,38 @@ def release_jobs(parser, args):
                             [spec_labels[d]['spec'].name
                                 for d in dependencies[spec_label]])
 
-                    job_vars['SPACK_CDASH_BASE_URL'] = cdash_url
-                    job_vars['SPACK_CDASH_PROJECT'] = cdash_project
-                    job_vars['SPACK_CDASH_PROJECT_ENC'] = cdash_project_enc
                     job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
-                    job_vars['SPACK_CDASH_SITE'] = cdash_site
-                    job_vars['SPACK_RELATED_BUILDS'] = ';'.join(related_builds)
-                    job_vars['SPACK_JOB_SPEC_BUILDGROUP'] = build_group
-
-                job_vars['SPACK_ENABLE_CDASH'] = str(enable_cdash_reporting)
+                    job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
+                        related_builds)
 
                 variables.update(job_vars)
 
+                artifact_paths = [
+                    'jobs_scratch_dir',
+                    'cdash_report',
+                ]
+
+                if enable_artifacts_buildcache:
+                    artifact_paths.append('local_mirror/build_cache')
+
                 job_object = {
                     'stage': stage_name,
                     'variables': variables,
                     'script': job_scripts,
                     'tags': tags,
                     'artifacts': {
-                        'paths': [
-                            'jobs_scratch_dir',
-                            'cdash_report',
-                            'local_mirror/build_cache',
-                        ],
+                        'paths': artifact_paths,
                         'when': 'always',
                     },
                     'dependencies': job_dependencies,
                 }
 
+                if before_script:
+                    job_object['before_script'] = before_script
+
+                if after_script:
+                    job_object['after_script'] = after_script
+
                 if image_name:
                     job_object['image'] = image_name
                     if image_entry is not None:
@@ -624,7 +688,7 @@ def release_jobs(parser, args):
                 output_object[job_name] = job_object
                 job_id += 1
 
-    tty.msg('{0} build jobs generated in {1} stages'.format(
+    tty.debug('{0} build jobs generated in {1} stages'.format(
         job_id, stage_id))
 
     # Use "all_job_names" to populate the build group for this set
@@ -637,20 +701,280 @@ def release_jobs(parser, args):
     else:
         tty.warn('Unable to populate buildgroup without CDash credentials')
 
-    # Add an extra, final job to regenerate the index
-    final_stage = 'stage-rebuild-index'
-    final_job = {
-        'stage': final_stage,
-        'variables': {
-            'MIRROR_URL': mirror_urls[0],
-        },
-        'script': './bin/rebuild-index.sh',
-        'tags': ['spack-post-ci']    # may want a runner to handle this
-    }
-    output_object['rebuild-index'] = final_job
-    stage_names.append(final_stage)
+    if final_job_config:
+        # Add an extra, final job to regenerate the index
+        final_stage = 'stage-rebuild-index'
+        final_job = {
+            'stage': final_stage,
+            'script': 'spack buildcache update-index -d {0}'.format(
+                mirror_urls[0]),
+            'tags': final_job_config['tags']
+        }
+        if 'image' in final_job_config:
+            final_job['image'] = final_job_config['image']
+        if before_script:
+            final_job['before_script'] = before_script
+        if after_script:
+            final_job['after_script'] = after_script
+        output_object['rebuild-index'] = final_job
+        stage_names.append(final_stage)
 
     output_object['stages'] = stage_names
 
-    with open(args.output_file, 'w') as outf:
+    with open(output_file, 'w') as outf:
         outf.write(syaml.dump_config(output_object, default_flow_style=True))
+
+
+def url_encode_string(input_string):
+    encoded_keyval = urlencode({'donotcare': input_string})
+    eq_idx = encoded_keyval.find('=') + 1
+    encoded_value = encoded_keyval[eq_idx:]
+    return encoded_value
+
+
+def import_signing_key(base64_signing_key):
+    if not base64_signing_key:
+        tty.warn('No key found for signing/verifying packages')
+        return
+
+    tty.debug('ci.import_signing_key() will attempt to import a key')
+
+    # This command has the side-effect of creating the directory referred
+    # to as GNUPGHOME in setup_environment()
+    list_output = spack_gpg('list', output=str)
+
+    tty.debug('spack gpg list:')
+    tty.debug(list_output)
+
+    decoded_key = base64.b64decode(base64_signing_key)
+    if isinstance(decoded_key, bytes):
+        decoded_key = decoded_key.decode('utf8')
+
+    with TemporaryDirectory() as tmpdir:
+        sign_key_path = os.path.join(tmpdir, 'signing_key')
+        with open(sign_key_path, 'w') as fd:
+            fd.write(decoded_key)
+
+        key_import_output = spack_gpg('trust', sign_key_path, output=str)
+        tty.debug('spack gpg trust {0}'.format(sign_key_path))
+        tty.debug(key_import_output)
+
+    # Now print the keys we have for verifying and signing
+    trusted_keys_output = spack_gpg('list', '--trusted', output=str)
+    signing_keys_output = spack_gpg('list', '--signing', output=str)
+
+    tty.debug('spack gpg list --trusted')
+    tty.debug(trusted_keys_output)
+    tty.debug('spack gpg list --signing')
+    tty.debug(signing_keys_output)
+
+
+def configure_compilers(compiler_action, scope=None):
+    if compiler_action == 'INSTALL_MISSING':
+        tty.debug('Make sure bootstrapped compiler will be installed')
+        config = cfg.get('config')
+        config['install_missing_compilers'] = True
+        cfg.set('config', config)
+    elif compiler_action == 'FIND_ANY':
+        tty.debug('Just find any available compiler')
+        find_args = ['find']
+        if scope:
+            find_args.extend(['--scope', scope])
+        output = spack_compiler(*find_args)
+        tty.debug('spack compiler find')
+        tty.debug(output)
+        output = spack_compiler('list')
+        tty.debug('spack compiler list')
+        tty.debug(output)
+    else:
+        tty.debug('No compiler action to be taken')
+
+    return None
+
+
+def get_concrete_specs(root_spec, job_name, related_builds, compiler_action):
+    spec_map = {
+        'root': None,
+        'deps': {},
+    }
+
+    if compiler_action == 'FIND_ANY':
+        # This corresponds to a bootstrapping phase where we need to
+        # rely on any available compiler to build the package (i.e. the
+        # compiler needed to be stripped from the spec when we generated
+        # the job), and thus we need to concretize the root spec again.
+        tty.debug('About to concretize {0}'.format(root_spec))
+        concrete_root = Spec(root_spec).concretized()
+        tty.debug('Resulting concrete root: {0}'.format(concrete_root))
+    else:
+        # in this case, either we're relying on Spack to install missing
+        # compiler bootstrapped in a previous phase, or else we only had one
+        # phase (like a site which already knows what compilers are available
+        # on it's runners), so we don't want to concretize that root spec
+        # again.  The reason we take this path in the first case (bootstrapped
+        # compiler), is that we can't concretize a spec at this point if we're
+        # going to ask spack to "install_missing_compilers".
+        concrete_root = Spec.from_yaml(
+            str(zlib.decompress(base64.b64decode(root_spec)).decode('utf-8')))
+
+    spec_map['root'] = concrete_root
+    spec_map[job_name] = concrete_root[job_name]
+
+    if related_builds:
+        for dep_job_name in related_builds.split(';'):
+            spec_map['deps'][dep_job_name] = concrete_root[dep_job_name]
+
+    return spec_map
+
+
+def register_cdash_build(build_name, base_url, project, site, track):
+    url = base_url + '/api/v1/addBuild.php'
+    time_stamp = datetime.datetime.now().strftime('%Y%m%d-%H%M')
+    build_stamp = '{0}-{1}'.format(time_stamp, track)
+    payload = {
+        "project": project,
+        "site": site,
+        "name": build_name,
+        "stamp": build_stamp,
+    }
+
+    tty.debug('Registering cdash build to {0}, payload:'.format(url))
+    tty.debug(payload)
+
+    enc_data = json.dumps(payload).encode('utf-8')
+
+    headers = {
+        'Content-Type': 'application/json',
+    }
+
+    opener = build_opener(HTTPHandler)
+
+    request = Request(url, data=enc_data, headers=headers)
+
+    response = opener.open(request)
+    response_code = response.getcode()
+
+    if response_code != 200 and response_code != 201:
+        msg = 'Adding build failed (response code = {0}'.format(response_code)
+        raise SpackError(msg)
+
+    response_text = response.read()
+    response_json = json.loads(response_text)
+    build_id = response_json['buildid']
+
+    return (build_id, build_stamp)
+
+
+def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
+                        cdashids_mirror_url):
+    if not job_build_id:
+        return
+
+    dep_map = spec_map['deps']
+
+    headers = {
+        'Content-Type': 'application/json',
+        'Accept': 'application/json',
+    }
+
+    cdash_api_url = '{0}/api/v1/relateBuilds.php'.format(cdash_base_url)
+
+    for dep_pkg_name in dep_map:
+        tty.debug('Fetching cdashid file for {0}'.format(dep_pkg_name))
+        dep_spec = dep_map[dep_pkg_name]
+        dep_build_id = read_cdashid_from_mirror(dep_spec, cdashids_mirror_url)
+
+        payload = {
+            "project": cdash_project,
+            "buildid": job_build_id,
+            "relatedid": dep_build_id,
+            "relationship": "depends on"
+        }
+
+        enc_data = json.dumps(payload).encode('utf-8')
+
+        opener = build_opener(HTTPHandler)
+
+        request = Request(cdash_api_url, data=enc_data, headers=headers)
+
+        response = opener.open(request)
+        response_code = response.getcode()
+
+        if response_code != 200 and response_code != 201:
+            msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
+                job_build_id, dep_build_id, response_code)
+            raise SpackError(msg)
+
+        response_text = response.read()
+        tty.debug('Relate builds response: {0}'.format(response_text))
+
+
+def write_cdashid_to_mirror(cdashid, spec, mirror_url):
+    if not spec.concrete:
+        tty.die('Can only write cdashid for concrete spec to mirror')
+
+    with TemporaryDirectory() as tmpdir:
+        local_cdash_path = os.path.join(tmpdir, 'job.cdashid')
+        with open(local_cdash_path, 'w') as fd:
+            fd.write(cdashid)
+
+        buildcache_name = bindist.tarball_name(spec, '')
+        cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
+        remote_url = os.path.join(
+            mirror_url, bindist.build_cache_relative_path(), cdashid_file_name)
+
+        tty.debug('pushing cdashid to url')
+        tty.debug('  local file path: {0}'.format(local_cdash_path))
+        tty.debug('  remote url: {0}'.format(remote_url))
+        web_util.push_to_url(local_cdash_path, remote_url)
+
+
+def read_cdashid_from_mirror(spec, mirror_url):
+    if not spec.concrete:
+        tty.die('Can only read cdashid for concrete spec from mirror')
+
+    buildcache_name = bindist.tarball_name(spec, '')
+    cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
+    url = os.path.join(
+        mirror_url, bindist.build_cache_relative_path(), cdashid_file_name)
+
+    resp_url, resp_headers, response = web_util.read_from_url(url)
+    contents = response.fp.read()
+
+    return int(contents)
+
+
+def push_mirror_contents(env, spec, yaml_path, mirror_url, build_id):
+    if mirror_url:
+        tty.debug('Creating buildcache')
+        buildcache._createtarball(env, yaml_path, None, mirror_url, None,
+                                  True, True, False, False, True, False)
+        if build_id:
+            tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
+                build_id, mirror_url))
+            write_cdashid_to_mirror(build_id, spec, mirror_url)
+
+
+def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
+    try:
+        job_pkg = spack.repo.get(job_spec)
+        tty.debug('job package: {0}'.format(job_pkg))
+        stage_dir = job_pkg.stage.path
+        tty.debug('stage dir: {0}'.format(stage_dir))
+        build_env_src = os.path.join(stage_dir, 'spack-build-env.txt')
+        build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
+        build_env_dst = os.path.join(
+            job_log_dir, 'spack-build-env.txt')
+        build_out_dst = os.path.join(
+            job_log_dir, 'spack-build-out.txt')
+        tty.debug('Copying logs to artifacts:')
+        tty.debug('  1: {0} -> {1}'.format(
+            build_env_src, build_env_dst))
+        shutil.copyfile(build_env_src, build_env_dst)
+        tty.debug('  2: {0} -> {1}'.format(
+            build_out_src, build_out_dst))
+        shutil.copyfile(build_out_src, build_out_dst)
+    except Exception as inst:
+        msg = ('Unable to copy build logs from stage to artifacts '
+               'due to exception: {0}').format(inst)
+        tty.error(msg)
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index 46d1bf3628..5baa63af85 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -303,19 +303,18 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False):
     return specs_from_cli
 
 
-def createtarball(args):
-    """create a binary package from an existing install"""
-    if args.spec_yaml:
+def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
+                   rel, unsigned, allow_root, no_rebuild_index):
+    if spec_yaml:
         packages = set()
-        tty.msg('createtarball, reading spec from {0}'.format(args.spec_yaml))
-        with open(args.spec_yaml, 'r') as fd:
+        with open(spec_yaml, 'r') as fd:
             yaml_text = fd.read()
             tty.debug('createtarball read spec yaml:')
             tty.debug(yaml_text)
             s = Spec.from_yaml(yaml_text)
             packages.add('/{0}'.format(s.dag_hash()))
-    elif args.packages:
-        packages = args.packages
+    elif packages:
+        packages = packages
     else:
         tty.die("build cache file creation requires at least one" +
                 " installed package argument or else path to a" +
@@ -324,18 +323,15 @@ def createtarball(args):
     specs = set()
 
     outdir = '.'
-    if args.directory:
-        outdir = args.directory
+    if directory:
+        outdir = directory
 
     mirror = spack.mirror.MirrorCollection().lookup(outdir)
     outdir = url_util.format(mirror.push_url)
 
     signkey = None
-    if args.key:
-        signkey = args.key
-
-    # restrict matching to current environment if one is active
-    env = ev.get_env(args, 'buildcache create')
+    if key:
+        signkey = key
 
     matches = find_matching_specs(pkgs, env=env)
 
@@ -350,7 +346,7 @@ def createtarball(args):
         else:
             tty.debug('adding matching spec %s' % match.format())
             specs.add(match)
-            if args.no_deps is True:
+            if no_deps is True:
                 continue
             tty.debug('recursing dependencies')
             for d, node in match.traverse(order='post',
@@ -368,14 +364,25 @@ def createtarball(args):
     for spec in specs:
         tty.msg('creating binary cache file for package %s ' % spec.format())
         try:
-            bindist.build_tarball(spec, outdir, args.force, args.rel,
-                                  args.unsigned, args.allow_root, signkey,
-                                  not args.no_rebuild_index)
+            bindist.build_tarball(spec, outdir, force, rel,
+                                  unsigned, allow_root, signkey,
+                                  not no_rebuild_index)
         except Exception as e:
             tty.warn('%s' % e)
             pass
 
 
+def createtarball(args):
+    """create a binary package from an existing install"""
+
+    # restrict matching to current environment if one is active
+    env = ev.get_env(args, 'buildcache create')
+
+    _createtarball(env, args.spec_yaml, args.packages, args.directory,
+                   args.key, args.no_deps, args.force, args.rel, args.unsigned,
+                   args.allow_root, args.no_rebuild_index)
+
+
 def installtarball(args):
     """install from a binary package"""
     if not args.packages:
@@ -477,6 +484,32 @@ def check_binaries(args):
         configured_mirrors, specs, args.output_file, args.rebuild_on_error))
 
 
+def download_buildcache_files(concrete_spec, local_dest, require_cdashid,
+                              mirror_url=None):
+    tarfile_name = bindist.tarball_name(concrete_spec, '.spack')
+    tarball_dir_name = bindist.tarball_directory_name(concrete_spec)
+    tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
+    local_tarball_path = os.path.join(local_dest, tarball_dir_name)
+
+    files_to_fetch = [
+        {
+            'url': tarball_path_name,
+            'path': local_tarball_path,
+            'required': True,
+        }, {
+            'url': bindist.tarball_name(concrete_spec, '.spec.yaml'),
+            'path': local_dest,
+            'required': True,
+        }, {
+            'url': bindist.tarball_name(concrete_spec, '.cdashid'),
+            'path': local_dest,
+            'required': require_cdashid,
+        },
+    ]
+
+    return bindist.download_buildcache_entry(files_to_fetch, mirror_url)
+
+
 def get_tarball(args):
     """Download buildcache entry from a remote mirror to local folder.  This
     command uses the process exit code to indicate its result, specifically,
@@ -493,34 +526,10 @@ def get_tarball(args):
         sys.exit(0)
 
     spec = get_concrete_spec(args)
+    result = download_buildcache_files(spec, args.path, args.require_cdashid)
 
-    tarfile_name = bindist.tarball_name(spec, '.spack')
-    tarball_dir_name = bindist.tarball_directory_name(spec)
-    tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
-    local_tarball_path = os.path.join(args.path, tarball_dir_name)
-
-    files_to_fetch = [
-        {
-            'url': tarball_path_name,
-            'path': local_tarball_path,
-            'required': True,
-        }, {
-            'url': bindist.tarball_name(spec, '.spec.yaml'),
-            'path': args.path,
-            'required': True,
-        }, {
-            'url': bindist.tarball_name(spec, '.cdashid'),
-            'path': args.path,
-            'required': args.require_cdashid,
-        },
-    ]
-
-    result = bindist.download_buildcache_entry(files_to_fetch)
-
-    if result:
-        sys.exit(0)
-
-    sys.exit(1)
+    if not result:
+        sys.exit(1)
 
 
 def get_concrete_spec(args):
diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py
new file mode 100644
index 0000000000..22c555d85a
--- /dev/null
+++ b/lib/spack/spack/cmd/ci.py
@@ -0,0 +1,482 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os
+import shutil
+import sys
+
+from six.moves.urllib.parse import urlencode
+
+import llnl.util.tty as tty
+
+import spack.binary_distribution as bindist
+import spack.ci as spack_ci
+import spack.cmd.buildcache as buildcache
+import spack.environment as ev
+import spack.hash_types as ht
+import spack.util.executable as exe
+
+
+description = "manage continuous integration pipelines"
+section = "build"
+level = "long"
+
+
+def get_env_var(variable_name):
+    if variable_name in os.environ:
+        return os.environ.get(variable_name)
+    return None
+
+
+def setup_parser(subparser):
+    setup_parser.parser = subparser
+    subparsers = subparser.add_subparsers(help='CI sub-commands')
+
+    start = subparsers.add_parser('start', help=ci_start.__doc__)
+    start.add_argument(
+        '--output-file', default=None,
+        help="Absolute path to file where generated jobs file should be " +
+             "written.  The default is .gitlab-ci.yml in the root of the " +
+             "repository.")
+    start.add_argument(
+        '--copy-to', default=None,
+        help="Absolute path of additional location where generated jobs " +
+             "yaml file should be copied.  Default is not to copy.")
+    start.add_argument(
+        '--spack-repo', default=None,
+        help="Provide a url for this argument if a custom spack repo " +
+             "should be cloned as a step in each generated job.")
+    start.add_argument(
+        '--spack-ref', default=None,
+        help="Provide a git branch or tag if a custom spack branch " +
+             "should be checked out as a step in each generated job.  " +
+             "This argument is ignored if no --spack-repo is provided.")
+    start.add_argument(
+        '--downstream-repo', default=None,
+        help="Url to repository where commit containing jobs yaml file " +
+             "should be pushed.")
+    start.add_argument(
+        '--branch-name', default='default-branch',
+        help="Name of current branch, used in generation of pushed commit.")
+    start.add_argument(
+        '--commit-sha', default='none',
+        help="SHA of current commit, used in generation of pushed commit.")
+    start.set_defaults(func=ci_start)
+
+    # Dynamic generation of the jobs yaml from a spack environment
+    generate = subparsers.add_parser('generate', help=ci_generate.__doc__)
+    generate.add_argument(
+        '--output-file', default=None,
+        help="Absolute path to file where generated jobs file should be " +
+             "written.  The default is .gitlab-ci.yml in the root of the " +
+             "repository.")
+    generate.add_argument(
+        '--copy-to', default=None,
+        help="Absolute path of additional location where generated jobs " +
+             "yaml file should be copied.  Default is not to copy.")
+    generate.add_argument(
+        '--spack-repo', default=None,
+        help="Provide a url for this argument if a custom spack repo " +
+             "should be cloned as a step in each generated job.")
+    generate.add_argument(
+        '--spack-ref', default=None,
+        help="Provide a git branch or tag if a custom spack branch " +
+             "should be checked out as a step in each generated job.  " +
+             "This argument is ignored if no --spack-repo is provided.")
+    generate.set_defaults(func=ci_generate)
+
+    # Commit and push jobs yaml to a downstream CI repo
+    pushyaml = subparsers.add_parser('pushyaml', help=ci_pushyaml.__doc__)
+    pushyaml.add_argument(
+        '--downstream-repo', default=None,
+        help="Url to repository where commit containing jobs yaml file " +
+             "should be pushed.")
+    pushyaml.add_argument(
+        '--branch-name', default='default-branch',
+        help="Name of current branch, used in generation of pushed commit.")
+    pushyaml.add_argument(
+        '--commit-sha', default='none',
+        help="SHA of current commit, used in generation of pushed commit.")
+    pushyaml.set_defaults(func=ci_pushyaml)
+
+    # Check a spec against mirror. Rebuild, create buildcache and push to
+    # mirror (if necessary).
+    rebuild = subparsers.add_parser('rebuild', help=ci_rebuild.__doc__)
+    rebuild.set_defaults(func=ci_rebuild)
+
+
+def ci_generate(args):
+    """Generate jobs file from a spack environment file containing CI info.
+       Before invoking this command, you can set the environment variable
+       SPACK_CDASH_AUTH_TOKEN to contain the CDash authorization token
+       for creating a build group for the generated workload and registering
+       all generated jobs under that build group.  If this environment
+       variable is not set, no build group will be created on CDash."""
+    env = ev.get_env(args, 'ci generate', required=True)
+
+    output_file = args.output_file
+    copy_yaml_to = args.copy_to
+    spack_repo = args.spack_repo
+    spack_ref = args.spack_ref
+
+    if not output_file:
+        gen_ci_dir = os.getcwd()
+        output_file = os.path.join(gen_ci_dir, '.gitlab-ci.yml')
+    else:
+        gen_ci_dir = os.path.dirname(output_file)
+        if not os.path.exists(gen_ci_dir):
+            os.makedirs(gen_ci_dir)
+
+    # Generate the jobs
+    spack_ci.generate_gitlab_ci_yaml(
+        env, True, output_file, spack_repo, spack_ref)
+
+    if copy_yaml_to:
+        copy_to_dir = os.path.dirname(copy_yaml_to)
+        if not os.path.exists(copy_to_dir):
+            os.makedirs(copy_to_dir)
+        shutil.copyfile(output_file, copy_yaml_to)
+
+
+def ci_pushyaml(args):
+    """Push the generated jobs yaml file to a remote repository.  The file
+       (.gitlab-ci.yaml) is expected to be in the current directory, which
+       should be the root of the repository."""
+    downstream_repo = args.downstream_repo
+    branch_name = args.branch_name
+    commit_sha = args.commit_sha
+
+    if not downstream_repo:
+        tty.die('No downstream repo to push to, exiting')
+
+    working_dir = os.getcwd()
+    jobs_yaml = os.path.join(working_dir, '.gitlab-ci.yml')
+    git_dir = os.path.join(working_dir, '.git')
+
+    if not os.path.exists(jobs_yaml):
+        tty.die('.gitlab-ci.yml must exist in current directory')
+
+    if not os.path.exists(git_dir):
+        tty.die('.git directory must exist in current directory')
+
+    # Create a temporary working directory
+    with spack_ci.TemporaryDirectory() as temp_dir:
+        git = exe.which('git', required=True)
+
+        # Push a commit with the generated file to the downstream ci repo
+        saved_git_dir = os.path.join(temp_dir, 'original-git-dir')
+
+        shutil.move('.git', saved_git_dir)
+
+        git('init', '.')
+
+        git('config', 'user.email', 'robot@spack.io')
+        git('config', 'user.name', 'Spack Build Bot')
+
+        git('add', '.')
+
+        # If the environment contains a spack directory, do not commit
+        # or push it with any other generated products
+        if os.path.exists('./spack') and os.path.isdir('./spack'):
+            git('rm', '-rf', '--cached', 'spack')
+
+        tty.msg('git commit')
+        commit_message = '{0} {1} ({2})'.format(
+            'Auto-generated commit testing', branch_name, commit_sha)
+
+        git('commit', '-m', '{0}'.format(commit_message))
+
+        tty.msg('git push')
+        git('remote', 'add', 'downstream', downstream_repo)
+        push_to_branch = 'master:multi-ci-{0}'.format(branch_name)
+        git('push', '--force', 'downstream', push_to_branch)
+
+        shutil.rmtree('.git')
+        shutil.move(saved_git_dir, '.git')
+        git('reset', '--hard', 'HEAD')
+
+
+def ci_rebuild(args):
+    """This command represents a gitlab-ci job, corresponding to a single
+       release spec.  As such it must first decide whether or not the spec it
+       has been assigned to build is up to date on the remote binary mirror.
+       If it is not (i.e. the full_hash of the spec as computed locally does
+       not match the one stored in the metadata on the mirror), this script
+       will build the package, create a binary cache for it, and then push all
+       related files to the remote binary mirror.  This script also
+       communicates with a remote CDash instance to share status on the package
+       build process.
+
+       The spec to be built by this job is represented by essentially two
+       pieces of information: 1) a root spec (possibly already concrete, but
+       maybe still needing to be concretized) and 2) a package name used to
+       index that root spec (once the root is, for certain, concrete)."""
+    env = ev.get_env(args, 'ci rebuild', required=True)
+    yaml_root = ev.config_dict(env.yaml)
+
+    # The following environment variables should defined in the CI
+    # infrastructre (or some other external source) in the case that the
+    # remote mirror is an S3 bucket.  The AWS keys are used to upload
+    # buildcache entries to S3 using the boto3 api.
+    #
+    # AWS_ACCESS_KEY_ID
+    # AWS_SECRET_ACCESS_KEY
+    # S3_ENDPOINT_URL (only needed for non-AWS S3 implementations)
+    #
+    # If present, we will import the  SPACK_SIGNING_KEY using the
+    # "spack gpg trust" command, so it can be used both for verifying
+    # dependency buildcache entries and signing the buildcache entry we create
+    # for our target pkg.
+    #
+    # SPACK_SIGNING_KEY
+
+    ci_artifact_dir = get_env_var('CI_PROJECT_DIR')
+    signing_key = get_env_var('SPACK_SIGNING_KEY')
+    root_spec = get_env_var('SPACK_ROOT_SPEC')
+    job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
+    compiler_action = get_env_var('SPACK_COMPILER_ACTION')
+    cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
+    related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
+
+    gitlab_ci = None
+    if 'gitlab-ci' in yaml_root:
+        gitlab_ci = yaml_root['gitlab-ci']
+
+    if not gitlab_ci:
+        tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
+
+    enable_cdash = False
+    if 'cdash' in yaml_root:
+        enable_cdash = True
+        ci_cdash = yaml_root['cdash']
+        job_spec_buildgroup = ci_cdash['build-group']
+        cdash_base_url = ci_cdash['url']
+        cdash_project = ci_cdash['project']
+        proj_enc = urlencode({'project': cdash_project})
+        eq_idx = proj_enc.find('=') + 1
+        cdash_project_enc = proj_enc[eq_idx:]
+        cdash_site = ci_cdash['site']
+        tty.debug('cdash_base_url = {0}'.format(cdash_base_url))
+        tty.debug('cdash_project = {0}'.format(cdash_project))
+        tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
+        tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
+        tty.debug('cdash_site = {0}'.format(cdash_site))
+        tty.debug('related_builds = {0}'.format(related_builds))
+        tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))
+
+    remote_mirror_url = None
+    if 'mirrors' in yaml_root:
+        ci_mirrors = yaml_root['mirrors']
+        mirror_urls = [url for url in ci_mirrors.values()]
+        remote_mirror_url = mirror_urls[0]
+
+    if not remote_mirror_url:
+        tty.die('spack ci rebuild requires an env containing a mirror')
+
+    tty.debug('ci_artifact_dir = {0}'.format(ci_artifact_dir))
+    tty.debug('root_spec = {0}'.format(root_spec))
+    tty.debug('remote_mirror_url = {0}'.format(remote_mirror_url))
+    tty.debug('job_spec_pkg_name = {0}'.format(job_spec_pkg_name))
+    tty.debug('compiler_action = {0}'.format(compiler_action))
+
+    spack_cmd = exe.which('spack')
+
+    os.environ['FORCE_UNSAFE_CONFIGURE'] = '1'
+
+    cdash_report_dir = os.path.join(ci_artifact_dir, 'cdash_report')
+    temp_dir = os.path.join(ci_artifact_dir, 'jobs_scratch_dir')
+    job_log_dir = os.path.join(temp_dir, 'logs')
+    spec_dir = os.path.join(temp_dir, 'specs')
+
+    local_mirror_dir = os.path.join(ci_artifact_dir, 'local_mirror')
+    build_cache_dir = os.path.join(local_mirror_dir, 'build_cache')
+
+    enable_artifacts_mirror = False
+    artifact_mirror_url = None
+    if 'enable-artifacts-buildcache' in gitlab_ci:
+        enable_artifacts_mirror = gitlab_ci['enable-artifacts-buildcache']
+        if enable_artifacts_mirror:
+            artifact_mirror_url = 'file://' + local_mirror_dir
+            mirror_msg = 'artifact buildcache enabled, mirror url: {0}'.format(
+                artifact_mirror_url)
+            tty.debug(mirror_msg)
+
+    # Clean out scratch directory from last stage
+    if os.path.exists(temp_dir):
+        shutil.rmtree(temp_dir)
+
+    if os.path.exists(cdash_report_dir):
+        shutil.rmtree(cdash_report_dir)
+
+    os.makedirs(job_log_dir)
+    os.makedirs(spec_dir)
+
+    job_spec_yaml_path = os.path.join(
+        spec_dir, '{0}.yaml'.format(job_spec_pkg_name))
+    job_log_file = os.path.join(job_log_dir, 'pipeline_log.txt')
+
+    cdash_build_id = None
+    cdash_build_stamp = None
+
+    with open(job_log_file, 'w') as log_fd:
+        os.dup2(log_fd.fileno(), sys.stdout.fileno())
+        os.dup2(log_fd.fileno(), sys.stderr.fileno())
+
+        current_directory = os.getcwd()
+        tty.debug('Current working directory: {0}, Contents:'.format(
+            current_directory))
+        directory_list = os.listdir(current_directory)
+        for next_entry in directory_list:
+            tty.debug('  {0}'.format(next_entry))
+
+        # Make a copy of the environment file, so we can overwrite the changed
+        # version in between the two invocations of "spack install"
+        env_src_path = os.path.join(current_directory, 'spack.yaml')
+        env_dst_path = os.path.join(current_directory, 'spack.yaml_BACKUP')
+        shutil.copyfile(env_src_path, env_dst_path)
+
+        tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
+
+        if signing_key:
+            spack_ci.import_signing_key(signing_key)
+
+        spack_ci.configure_compilers(compiler_action)
+
+        spec_map = spack_ci.get_concrete_specs(
+            root_spec, job_spec_pkg_name, related_builds, compiler_action)
+
+        job_spec = spec_map[job_spec_pkg_name]
+
+        tty.debug('Here is the concrete spec: {0}'.format(job_spec))
+
+        with open(job_spec_yaml_path, 'w') as fd:
+            fd.write(job_spec.to_yaml(hash=ht.build_hash))
+
+        tty.debug('Done writing concrete spec')
+
+        # DEBUG
+        with open(job_spec_yaml_path) as fd:
+            tty.debug('Wrote spec file, read it back.  Contents:')
+            tty.debug(fd.read())
+
+        # DEBUG the root spec
+        root_spec_yaml_path = os.path.join(spec_dir, 'root.yaml')
+        with open(root_spec_yaml_path, 'w') as fd:
+            fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))
+
+        if bindist.needs_rebuild(job_spec, remote_mirror_url, True):
+            # Binary on remote mirror is not up to date, we need to rebuild
+            # it.
+            #
+            # FIXME: ensure mirror precedence causes this local mirror to
+            # be chosen ahead of the remote one when installing deps
+            if enable_artifacts_mirror:
+                mirror_add_output = spack_cmd(
+                    'mirror', 'add', 'local_mirror', artifact_mirror_url)
+                tty.debug('spack mirror add:')
+                tty.debug(mirror_add_output)
+
+            mirror_list_output = spack_cmd('mirror', 'list')
+            tty.debug('listing spack mirrors:')
+            tty.debug(mirror_list_output)
+
+            # 2) build up install arguments
+            install_args = ['-d', '-v', '-k', 'install', '--keep-stage']
+
+            # 3) create/register a new build on CDash (if enabled)
+            cdash_args = []
+            if enable_cdash:
+                tty.debug('Registering build with CDash')
+                (cdash_build_id,
+                    cdash_build_stamp) = spack_ci.register_cdash_build(
+                    cdash_build_name, cdash_base_url, cdash_project,
+                    cdash_site, job_spec_buildgroup)
+
+                cdash_upload_url = '{0}/submit.php?project={1}'.format(
+                    cdash_base_url, cdash_project_enc)
+
+                cdash_args = [
+                    '--cdash-upload-url', cdash_upload_url,
+                    '--cdash-build', cdash_build_name,
+                    '--cdash-site', cdash_site,
+                    '--cdash-buildstamp', cdash_build_stamp,
+                ]
+
+            spec_cli_arg = [job_spec_yaml_path]
+
+            tty.debug('Installing package')
+
+            try:
+                # Two-pass install is intended to avoid spack trying to
+                # install from buildcache even though the locally computed
+                # full hash is different than the one stored in the spec.yaml
+                # file on the remote mirror.
+                first_pass_args = install_args + [
+                    '--cache-only',
+                    '--only',
+                    'dependencies',
+                ]
+                first_pass_args.extend(spec_cli_arg)
+                tty.debug('First pass install arguments: {0}'.format(
+                    first_pass_args))
+                spack_cmd(*first_pass_args)
+
+                # Overwrite the changed environment file so it doesn't
+                # the next install invocation.
+                shutil.copyfile(env_dst_path, env_src_path)
+
+                second_pass_args = install_args + [
+                    '--no-cache',
+                    '--only',
+                    'package',
+                ]
+                second_pass_args.extend(cdash_args)
+                second_pass_args.extend(spec_cli_arg)
+                tty.debug('Second pass install arguments: {0}'.format(
+                    second_pass_args))
+                spack_cmd(*second_pass_args)
+            except Exception as inst:
+                tty.error('Caught exception during install:')
+                tty.error(inst)
+
+            spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
+
+            # 4) create buildcache on remote mirror
+            spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path,
+                                          remote_mirror_url, cdash_build_id)
+
+            # 5) create another copy of that buildcache on "local artifact
+            # mirror" (only done if cash reporting is enabled)
+            spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path,
+                                          artifact_mirror_url, cdash_build_id)
+
+            # 6) relate this build to its dependencies on CDash (if enabled)
+            if enable_cdash:
+                spack_ci.relate_cdash_builds(
+                    spec_map, cdash_base_url, cdash_build_id, cdash_project,
+                    artifact_mirror_url or remote_mirror_url)
+        else:
+            # There is nothing to do here unless "local artifact mirror" is
+            # enabled, in which case, we need to download the buildcache to
+            # the local artifacts directory to be used by dependent jobs in
+            # subsequent stages
+            tty.debug('No need to rebuild {0}'.format(job_spec_pkg_name))
+            if enable_artifacts_mirror:
+                tty.debug('Getting {0} buildcache'.format(job_spec_pkg_name))
+                tty.debug('Downloading to {0}'.format(build_cache_dir))
+                buildcache.download_buildcache_files(
+                    job_spec, build_cache_dir, True, remote_mirror_url)
+
+
+def ci_start(args):
+    """Kicks of the CI process (currently just calls ci_generate() then
+       ci_push())"""
+    ci_generate(args)
+    ci_pushyaml(args)
+
+
+def ci(parser, args):
+    if args.func:
+        args.func(args)
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index f7dd6601ea..ba3cf2961b 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -1669,7 +1669,8 @@ def do_install(self, **kwargs):
                 spack.hooks.post_install(self.spec)
                 return
             elif kwargs.get('cache_only', False):
-                tty.die('No binary for %s found and cache-only specified')
+                tty.die('No binary for %s found and cache-only specified'
+                        % self.name)
 
             tty.msg('No binary for %s found: installing from source'
                     % self.name)
diff --git a/lib/spack/spack/schema/gitlab_ci.py b/lib/spack/spack/schema/gitlab_ci.py
index f11c9f07d8..9d0ba8a77b 100644
--- a/lib/spack/spack/schema/gitlab_ci.py
+++ b/lib/spack/spack/schema/gitlab_ci.py
@@ -9,6 +9,24 @@
    :lines: 13-
 """
 
+image_schema = {
+    'oneOf': [
+        {
+            'type': 'string'
+        }, {
+            'type': 'object',
+            'properties': {
+                'name': {'type': 'string'},
+                'entrypoint': {
+                    'type': 'array',
+                    'items': {
+                        'type': 'string',
+                    },
+                },
+            },
+        },
+    ],
+}
 
 #: Properties for inclusion in other schemas
 properties = {
@@ -58,24 +76,7 @@
                             'additionalProperties': True,
                             'required': ['tags'],
                             'properties': {
-                                'image': {
-                                    'oneOf': [
-                                        {
-                                            'type': 'string'
-                                        }, {
-                                            'type': 'object',
-                                            'properties': {
-                                                'name': {'type': 'string'},
-                                                'entrypoint': {
-                                                    'type': 'array',
-                                                    'items': {
-                                                        'type': 'string',
-                                                    },
-                                                },
-                                            },
-                                        },
-                                    ],
-                                },
+                                'image': image_schema,
                                 'tags': {
                                     'type': 'array',
                                     'default': [],
@@ -95,6 +96,27 @@
                     },
                 },
             },
+            'enable-artifacts-buildcache': {
+                'type': 'boolean',
+                'default': False,
+            },
+            'enable-debug-messages': {
+                'type': 'boolean',
+                'default': False,
+            },
+            'final-stage-rebuild-index': {
+                'type': 'object',
+                'additionalProperties': False,
+                'required': ['tags'],
+                'properties': {
+                    'image': image_schema,
+                    'tags': {
+                        'type': 'array',
+                        'default': [],
+                        'items': {'type': 'string'}
+                    },
+                },
+            },
         },
     },
 }
diff --git a/lib/spack/spack/test/ci.py b/lib/spack/spack/test/ci.py
new file mode 100644
index 0000000000..fd8fd7d79d
--- /dev/null
+++ b/lib/spack/spack/test/ci.py
@@ -0,0 +1,167 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os
+import pytest
+from six.moves.urllib.error import URLError
+
+import spack.ci as ci
+import spack.main as spack_main
+import spack.config as cfg
+import spack.paths as spack_paths
+import spack.spec as spec
+import spack.util.gpg as gpg_util
+import spack.util.web as web_util
+
+
+@pytest.fixture(scope='function')
+def testing_gpg_directory(tmpdir):
+    old_gpg_path = gpg_util.GNUPGHOME
+    gpg_util.GNUPGHOME = str(tmpdir.join('gpg'))
+    yield
+    gpg_util.GNUPGHOME = old_gpg_path
+
+
+@pytest.fixture
+def tmp_scope():
+    """Creates a temporary configuration scope"""
+    base_name = 'internal-testing-scope'
+    current_overrides = set(
+        x.name for x in
+        cfg.config.matching_scopes(r'^{0}'.format(base_name)))
+
+    num_overrides = 0
+    scope_name = base_name
+    while scope_name in current_overrides:
+        scope_name = '{0}{1}'.format(base_name, num_overrides)
+        num_overrides += 1
+
+    with cfg.override(cfg.InternalConfigScope(scope_name)):
+        yield scope_name
+
+
+def test_urlencode_string():
+    s = 'Spack Test Project'
+
+    s_enc = ci.url_encode_string(s)
+
+    assert(s_enc == 'Spack+Test+Project')
+
+
+def test_import_signing_key(testing_gpg_directory):
+    signing_key_dir = spack_paths.mock_gpg_keys_path
+    signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
+    with open(signing_key_path) as fd:
+        signing_key = fd.read()
+
+    # Just make sure this does not raise any exceptions
+    ci.import_signing_key(signing_key)
+
+
+def test_configure_compilers(mutable_config):
+
+    def assert_missing(config):
+        assert('install_missing_compilers' not in config or
+               config['install_missing_compilers'] is False)
+
+    def assert_present(config):
+        assert('install_missing_compilers' in config and
+               config['install_missing_compilers'] is True)
+
+    original_config = cfg.get('config')
+    assert_missing(original_config)
+
+    ci.configure_compilers('FIND_ANY', scope='site')
+
+    second_config = cfg.get('config')
+    assert_missing(second_config)
+
+    ci.configure_compilers('INSTALL_MISSING')
+    last_config = cfg.get('config')
+    assert_present(last_config)
+
+
+def test_get_concrete_specs(config, mock_packages):
+    root_spec = (
+        'eJztkk1uwyAQhfc5BbuuYjWObSKuUlURYP5aDBjjBPv0RU7iRI6qpKuqUtnxzZvRwHud'
+        'YxSt1oCMyuVoBdI5MN8paxDYZK/ZbkLYU3kqAuA0Dtz6BgGtTB8XdG87BCgzwXbwXArY'
+        'CxYQiLtqXxUTpLZxSjN/mWlwwxAQlJ7v8wpFtsvK1UXSOUyTjvRKB2Um7LBPhZD0l1md'
+        'xJ7VCATfszOiXGOR9np7vwDn7lCMS8SXQNf3RCtyBTVzzNTMUMXmfWrFeR+UngEAEncS'
+        'ASjKwZcid7ERNldthBxjX46mMD2PsJnlYXDs2rye3l+vroOkJJ54SXgZPklLRQmx61sm'
+        'cgKNVFRO0qlpf2pojq1Ro7OG56MY+Bgc1PkIo/WkaT8OVcrDYuvZkJdtBl/+XCZ+NQBJ'
+        'oKg1h6X/VdXRoyE2OWeH6lCXZdHGrauUZAWFw/YJ/0/39OefN3F4Kle3cXjYsF684ZqG'
+        'Tbap/uPwbRx+YPStIQ8bvgA7G6YE'
+    )
+
+    dep_builds = 'diffutils;libiconv'
+    spec_map = ci.get_concrete_specs(root_spec, 'bzip2', dep_builds, 'NONE')
+
+    assert('root' in spec_map and 'deps' in spec_map)
+
+    nonconc_root_spec = 'archive-files'
+    dep_builds = ''
+    spec_map = ci.get_concrete_specs(
+        nonconc_root_spec, 'archive-files', dep_builds, 'FIND_ANY')
+
+    assert('root' in spec_map and 'deps' in spec_map)
+    assert('archive-files' in spec_map)
+
+
+def test_register_cdash_build():
+    build_name = 'Some pkg'
+    base_url = 'http://cdash.fake.org'
+    project = 'spack'
+    site = 'spacktests'
+    track = 'Experimental'
+
+    with pytest.raises(URLError):
+        ci.register_cdash_build(build_name, base_url, project, site, track)
+
+
+def test_relate_cdash_builds(config, mock_packages):
+    root_spec = (
+        'eJztkk1uwyAQhfc5BbuuYjWObSKuUlURYP5aDBjjBPv0RU7iRI6qpKuqUtnxzZvRwHud'
+        'YxSt1oCMyuVoBdI5MN8paxDYZK/ZbkLYU3kqAuA0Dtz6BgGtTB8XdG87BCgzwXbwXArY'
+        'CxYQiLtqXxUTpLZxSjN/mWlwwxAQlJ7v8wpFtsvK1UXSOUyTjvRKB2Um7LBPhZD0l1md'
+        'xJ7VCATfszOiXGOR9np7vwDn7lCMS8SXQNf3RCtyBTVzzNTMUMXmfWrFeR+UngEAEncS'
+        'ASjKwZcid7ERNldthBxjX46mMD2PsJnlYXDs2rye3l+vroOkJJ54SXgZPklLRQmx61sm'
+        'cgKNVFRO0qlpf2pojq1Ro7OG56MY+Bgc1PkIo/WkaT8OVcrDYuvZkJdtBl/+XCZ+NQBJ'
+        'oKg1h6X/VdXRoyE2OWeH6lCXZdHGrauUZAWFw/YJ/0/39OefN3F4Kle3cXjYsF684ZqG'
+        'Tbap/uPwbRx+YPStIQ8bvgA7G6YE'
+    )
+
+    dep_builds = 'diffutils;libiconv'
+    spec_map = ci.get_concrete_specs(root_spec, 'bzip2', dep_builds, 'NONE')
+    cdash_api_url = 'http://cdash.fake.org'
+    job_build_id = '42'
+    cdash_project = 'spack'
+    cdashids_mirror_url = 'https://my.fake.mirror'
+
+    with pytest.raises(web_util.SpackWebError):
+        ci.relate_cdash_builds(spec_map, cdash_api_url, job_build_id,
+                               cdash_project, cdashids_mirror_url)
+
+    # Just make sure passing None for build id doesn't throw exceptions
+    ci.relate_cdash_builds(spec_map, cdash_api_url, None, cdash_project,
+                           cdashids_mirror_url)
+
+
+def test_read_write_cdash_ids(config, tmp_scope, tmpdir, mock_packages):
+    working_dir = tmpdir.join('working_dir')
+    mirror_dir = working_dir.join('mirror')
+    mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+
+    mirror_cmd = spack_main.SpackCommand('mirror')
+    mirror_cmd('add', '--scope', tmp_scope, 'test_mirror', mirror_url)
+
+    mock_spec = spec.Spec('archive-files').concretized()
+    orig_cdashid = '42'
+
+    ci.write_cdashid_to_mirror(orig_cdashid, mock_spec, mirror_url)
+
+    # Now read it back
+    read_cdashid = ci.read_cdashid_from_mirror(mock_spec, mirror_url)
+
+    assert(str(read_cdashid) == orig_cdashid)
diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py
new file mode 100644
index 0000000000..c914f565fc
--- /dev/null
+++ b/lib/spack/spack/test/cmd/ci.py
@@ -0,0 +1,582 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import filecmp
+import os
+import pytest
+
+import llnl.util.filesystem as fs
+
+import spack
+import spack.ci as ci
+import spack.config
+import spack.environment as ev
+import spack.hash_types as ht
+import spack.util.gpg as gpg_util
+from spack.main import SpackCommand
+import spack.paths as spack_paths
+import spack.repo as repo
+from spack.spec import Spec
+from spack.test.conftest import MockPackage, MockPackageMultiRepo
+import spack.util.executable as exe
+import spack.util.spack_yaml as syaml
+
+
+ci_cmd = SpackCommand('ci')
+env_cmd = SpackCommand('env')
+mirror_cmd = SpackCommand('mirror')
+gpg_cmd = SpackCommand('gpg')
+install_cmd = SpackCommand('install')
+buildcache_cmd = SpackCommand('buildcache')
+git = exe.which('git', required=True)
+
+
+@pytest.fixture(scope='function')
+def testing_gpg_directory(tmpdir):
+    old_gpg_path = gpg_util.GNUPGHOME
+    gpg_util.GNUPGHOME = str(tmpdir.join('gpg'))
+    yield
+    gpg_util.GNUPGHOME = old_gpg_path
+
+
+@pytest.fixture()
+def env_deactivate():
+    yield
+    spack.environment._active_environment = None
+    os.environ.pop('SPACK_ENV', None)
+
+
+def initialize_new_repo(repo_path, initial_commit=False):
+    if not os.path.exists(repo_path):
+        os.makedirs(repo_path)
+
+    with fs.working_dir(repo_path):
+        init_args = ['init', '.']
+        # if not initial_commit:
+        #     init_args.append('--bare')
+
+        git(*init_args)
+
+        if initial_commit:
+            readme_contents = "This is the project README\n"
+            readme_path = os.path.join(repo_path, 'README.md')
+            with open(readme_path, 'w') as fd:
+                fd.write(readme_contents)
+            git('add', '.')
+            git('commit', '-m', 'Project initial commit')
+
+
+def get_repo_status(repo_path):
+    with fs.working_dir(repo_path):
+        output = git('rev-parse', '--abbrev-ref', 'HEAD', output=str)
+        current_branch = output.split()[0]
+
+        output = git('rev-parse', 'HEAD', output=str)
+        current_sha = output.split()[0]
+
+        return current_branch, current_sha
+
+
+def set_env_var(key, val):
+    os.environ[key] = val
+
+
+def test_specs_staging(config):
+    """Make sure we achieve the best possible staging for the following
+spec DAG::
+
+        a
+       /|
+      c b
+        |\
+        e d
+          |\
+          f g
+
+In this case, we would expect 'c', 'e', 'f', and 'g' to be in the first stage,
+and then 'd', 'b', and 'a' to be put in the next three stages, respectively.
+
+"""
+    default = ('build', 'link')
+
+    g = MockPackage('g', [], [])
+    f = MockPackage('f', [], [])
+    e = MockPackage('e', [], [])
+    d = MockPackage('d', [f, g], [default, default])
+    c = MockPackage('c', [], [])
+    b = MockPackage('b', [d, e], [default, default])
+    a = MockPackage('a', [b, c], [default, default])
+
+    mock_repo = MockPackageMultiRepo([a, b, c, d, e, f, g])
+
+    with repo.swap(mock_repo):
+        spec_a = Spec('a')
+        spec_a.concretize()
+
+        spec_a_label = ci.spec_deps_key_label(spec_a)[1]
+        spec_b_label = ci.spec_deps_key_label(spec_a['b'])[1]
+        spec_c_label = ci.spec_deps_key_label(spec_a['c'])[1]
+        spec_d_label = ci.spec_deps_key_label(spec_a['d'])[1]
+        spec_e_label = ci.spec_deps_key_label(spec_a['e'])[1]
+        spec_f_label = ci.spec_deps_key_label(spec_a['f'])[1]
+        spec_g_label = ci.spec_deps_key_label(spec_a['g'])[1]
+
+        spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
+
+        assert (len(stages) == 4)
+
+        assert (len(stages[0]) == 4)
+        assert (spec_c_label in stages[0])
+        assert (spec_e_label in stages[0])
+        assert (spec_f_label in stages[0])
+        assert (spec_g_label in stages[0])
+
+        assert (len(stages[1]) == 1)
+        assert (spec_d_label in stages[1])
+
+        assert (len(stages[2]) == 1)
+        assert (spec_b_label in stages[2])
+
+        assert (len(stages[3]) == 1)
+        assert (spec_a_label in stages[3])
+
+
+def test_ci_generate_with_env(tmpdir, mutable_mock_env_path, env_deactivate,
+                              install_mockery, mock_packages):
+    """Make sure we can get a .gitlab-ci.yml from an environment file
+       which has the gitlab-ci, cdash, and mirrors sections."""
+    filename = str(tmpdir.join('spack.yaml'))
+    with open(filename, 'w') as f:
+        f.write("""\
+spack:
+  definitions:
+    - bootstrap:
+      - cmake@3.4.3
+    - old-gcc-pkgs:
+      - archive-files
+      - callpath
+      - hypre@0.2.15
+  specs:
+    - matrix:
+      - [$old-gcc-pkgs]
+  mirrors:
+    some-mirror: https://my.fake.mirror
+  gitlab-ci:
+    bootstrap:
+      - name: bootstrap
+        compiler-agnostic: true
+    mappings:
+      - match:
+          - arch=test-debian6-x86_64
+        runner-attributes:
+          tags:
+            - donotcare
+          image: donotcare
+    final-stage-rebuild-index:
+      image: donotcare
+      tags: [donotcare]
+  cdash:
+    build-group: Not important
+    url: https://my.fake.cdash
+    project: Not used
+    site: Nothing
+""")
+    with tmpdir.as_cwd():
+        env_cmd('create', 'test', './spack.yaml')
+        outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+
+        with ev.read('test'):
+            ci_cmd('generate', '--output-file', outputfile)
+
+        with open(outputfile) as f:
+            contents = f.read()
+            yaml_contents = syaml.load(contents)
+            found_spec = False
+            for ci_key in yaml_contents.keys():
+                if '(bootstrap)' in ci_key:
+                    found_spec = True
+                    assert('cmake' in ci_key)
+            assert(found_spec)
+            assert('stages' in yaml_contents)
+            assert(len(yaml_contents['stages']) == 6)
+            assert(yaml_contents['stages'][0] == 'stage-0')
+            assert(yaml_contents['stages'][5] == 'stage-rebuild-index')
+
+
+def test_ci_generate_with_env_missing_section(tmpdir, mutable_mock_env_path,
+                                              env_deactivate, install_mockery,
+                                              mock_packages):
+    """Make sure we get a reasonable message if we omit gitlab-ci section"""
+    filename = str(tmpdir.join('spack.yaml'))
+    with open(filename, 'w') as f:
+        f.write("""\
+spack:
+  specs:
+    - archive-files
+  mirrors:
+    some-mirror: https://my.fake.mirror
+""")
+
+    expect_out = 'Error: Environment yaml does not have "gitlab-ci" section'
+
+    with tmpdir.as_cwd():
+        env_cmd('create', 'test', './spack.yaml')
+
+        with ev.read('test'):
+            output = ci_cmd('generate', fail_on_error=False, output=str)
+            assert(expect_out in output)
+
+
+def test_ci_generate_with_cdash_token(tmpdir, mutable_mock_env_path,
+                                      env_deactivate, install_mockery,
+                                      mock_packages):
+    """Make sure we it doesn't break if we configure cdash"""
+    filename = str(tmpdir.join('spack.yaml'))
+    with open(filename, 'w') as f:
+        f.write("""\
+spack:
+  specs:
+    - archive-files
+  mirrors:
+    some-mirror: https://my.fake.mirror
+  gitlab-ci:
+    enable-artifacts-buildcache: True
+    enable-debug-messages: True
+    mappings:
+      - match:
+          - archive-files
+        runner-attributes:
+          tags:
+            - donotcare
+          image: donotcare
+  cdash:
+    build-group: Not important
+    url: https://my.fake.cdash
+    project: Not used
+    site: Nothing
+""")
+
+    with tmpdir.as_cwd():
+        env_cmd('create', 'test', './spack.yaml')
+
+        with ev.read('test'):
+            fake_token = 'notreallyatokenbutshouldnotmatter'
+            os.environ['SPACK_CDASH_AUTH_TOKEN'] = fake_token
+            copy_to_file = str(tmpdir.join('backup-ci.yml'))
+            output = ci_cmd('generate', '--copy-to', copy_to_file, output=str)
+            # That fake token should still have resulted in being unable to
+            # register build group with cdash, but the workload should
+            # still have been generated.
+            expect = 'Problem populating buildgroup'
+            assert(expect in output)
+
+            dir_contents = os.listdir(tmpdir.strpath)
+
+            print(dir_contents)
+
+            assert('backup-ci.yml' in dir_contents)
+
+            orig_file = str(tmpdir.join('.gitlab-ci.yml'))
+
+            assert(filecmp.cmp(orig_file, copy_to_file) is True)
+
+
+def test_ci_generate_with_external_pkg(tmpdir, mutable_mock_env_path,
+                                       env_deactivate, install_mockery,
+                                       mock_packages):
+    """Make sure we do not generate jobs for external pkgs"""
+    filename = str(tmpdir.join('spack.yaml'))
+    with open(filename, 'w') as f:
+        f.write("""\
+spack:
+  specs:
+    - archive-files
+    - externaltest
+  mirrors:
+    some-mirror: https://my.fake.mirror
+  gitlab-ci:
+    mappings:
+      - match:
+          - archive-files
+          - externaltest
+        runner-attributes:
+          tags:
+            - donotcare
+          image: donotcare
+""")
+
+    with tmpdir.as_cwd():
+        env_cmd('create', 'test', './spack.yaml')
+        outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+
+        with ev.read('test'):
+            ci_cmd('generate', '--output-file', outputfile)
+
+        with open(outputfile) as f:
+            contents = f.read()
+            print('generated contents: ')
+            print(contents)
+            yaml_contents = syaml.load(contents)
+            for ci_key in yaml_contents.keys():
+                if 'externaltool' in ci_key:
+                    print('Erroneously staged "externaltool" pkg')
+                    assert(False)
+
+
+def test_ci_generate_debug_with_custom_spack(tmpdir, mutable_mock_env_path,
+                                             env_deactivate, install_mockery,
+                                             mock_packages):
+    """Make sure we generate cloning of spack in job script if needed"""
+    filename = str(tmpdir.join('spack.yaml'))
+    with open(filename, 'w') as f:
+        f.write("""\
+spack:
+  specs:
+    - archive-files
+  mirrors:
+    some-mirror: https://my.fake.mirror
+  gitlab-ci:
+    enable-artifacts-buildcache: True
+    enable-debug-messages: True
+    mappings:
+      - match:
+          - archive-files
+        runner-attributes:
+          tags:
+            - donotcare
+          image: donotcare
+""")
+
+    with tmpdir.as_cwd():
+        env_cmd('create', 'test', './spack.yaml')
+        outfile = str(tmpdir.join('.gitlab-ci.yml'))
+
+        with ev.read('test'):
+            spack_repo = 'https://github.com/usera/spack.git'
+            spack_ref = 'custom-branch'
+            expected_clone_str = 'git clone "{0}"'.format(spack_repo)
+
+            ci_cmd('generate', '--output-file', outfile, '--spack-repo',
+                   spack_repo, '--spack-ref', spack_ref)
+
+            with open(outfile) as f:
+                contents = f.read()
+                yaml_contents = syaml.load(contents)
+                for ci_key in yaml_contents.keys():
+                    if '(specs)' in ci_key:
+                        next_job = yaml_contents[ci_key]
+                        print(next_job)
+                        assert('before_script' in next_job)
+                        before_script = next_job['before_script']
+                        for step in before_script:
+                            if expected_clone_str in step:
+                                break
+                        else:
+                            msg = 'job "{0}" did not clone spack repo'.format(
+                                ci_key)
+                            print(msg)
+                            assert(False)
+
+                        assert('script' in next_job)
+                        script = next_job['script']
+                        for step in script:
+                            if 'spack -d ci rebuild' in step:
+                                break
+                        else:
+                            msg = 'job {0} missing rebuild command'.format(
+                                ci_key)
+                            print(msg)
+                            assert(False)
+
+
+def test_ci_rebuild_basic(tmpdir, mutable_mock_env_path, env_deactivate,
+                          install_mockery, mock_packages,
+                          testing_gpg_directory):
+    working_dir = tmpdir.join('working_dir')
+
+    mirror_dir = working_dir.join('mirror')
+    mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+
+    signing_key_dir = spack_paths.mock_gpg_keys_path
+    signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
+    with open(signing_key_path) as fd:
+        signing_key = fd.read()
+
+    spack_yaml_contents = """
+spack:
+ definitions:
+   - packages: [archive-files]
+ specs:
+   - $packages
+ mirrors:
+   test-mirror: {0}
+ gitlab-ci:
+   enable-artifacts-buildcache: True
+   mappings:
+     - match:
+         - archive-files
+       runner-attributes:
+         tags:
+           - donotcare
+         image: donotcare
+ cdash:
+   build-group: Not important
+   url: https://my.fake.cdash
+   project: Not used
+   site: Nothing
+""".format(mirror_url)
+
+    print('spack.yaml:\n{0}\n'.format(spack_yaml_contents))
+
+    filename = str(tmpdir.join('spack.yaml'))
+    with open(filename, 'w') as f:
+        f.write(spack_yaml_contents)
+
+    with tmpdir.as_cwd():
+        env_cmd('create', 'test', './spack.yaml')
+        with ev.read('test'):
+            root_spec = ('eJyNjsGOwyAMRO/5Ct96alRFFK34ldUqcohJ6BJAQFHUry9Nk66'
+                         'UXNY3v5mxJ3qSojoDBjnqTGelDUVRQZlMIWpnBZya+nJa0Mv1Fg'
+                         'G8waRcmAQkimkHWxcF9NRptHyVEoaBkoD5i7ecLVC6yZd/YTtpc'
+                         'SIBg5Tr/mnA6mt9qTZL9CiLr7trk7StJyd/F81jKGoqoe2gVAaH'
+                         '0uT7ZwPeH9A875HaA9MfidHdHxgxjgJuTGVtIrvfHGtynjkGyzi'
+                         'xRrkHy94t1lftvv1n4AkVK3kQ')
+
+            # Create environment variables as gitlab would do it
+            set_env_var('CI_PROJECT_DIR', working_dir.strpath)
+            set_env_var('SPACK_SIGNING_KEY', signing_key)
+            set_env_var('SPACK_ROOT_SPEC', root_spec)
+            set_env_var('SPACK_JOB_SPEC_PKG_NAME', 'archive-files')
+            set_env_var('SPACK_COMPILER_ACTION', 'NONE')
+            set_env_var('SPACK_CDASH_BUILD_NAME', '(specs) archive-files')
+            set_env_var('SPACK_RELATED_BUILDS_CDASH', '')
+
+            rebuild_output = ci_cmd(
+                'rebuild', fail_on_error=False, output=str)
+
+            print(rebuild_output)
+
+
+def test_ci_pushyaml(tmpdir):
+    fake_yaml_contents = """generate ci jobs:
+  script:
+    - "./share/spack/qa/gitlab/generate-gitlab-ci-yml.sh"
+  tags:
+    - "spack-pre-ci"
+  artifacts:
+    paths:
+      - ci-generation
+    when: always
+ """
+    local_repo_path = tmpdir.join('local_repo')
+    initialize_new_repo(local_repo_path.strpath, True)
+
+    remote_repo_path = tmpdir.join('remote_repo')
+    initialize_new_repo(remote_repo_path.strpath)
+
+    current_branch, current_sha = get_repo_status(local_repo_path.strpath)
+
+    print('local repo info: {0}, {1}'.format(current_branch, current_sha))
+
+    local_jobs_yaml = local_repo_path.join('.gitlab-ci.yml')
+    with local_jobs_yaml.open('w') as f:
+        f.write(fake_yaml_contents)
+
+    pushyaml_args = [
+        'pushyaml',
+        '--downstream-repo', remote_repo_path.strpath,
+        '--branch-name', current_branch,
+        '--commit-sha', current_sha,
+    ]
+
+    with fs.working_dir(local_repo_path.strpath):
+        ci_cmd(*pushyaml_args)
+
+    with fs.working_dir(remote_repo_path.strpath):
+        branch_to_checkout = 'multi-ci-{0}'.format(current_branch)
+        git('checkout', branch_to_checkout)
+        with open('.gitlab-ci.yml') as fd:
+            pushed_contents = fd.read()
+            assert pushed_contents == fake_yaml_contents
+
+
+@pytest.mark.disable_clean_stage_check
+def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
+                              install_mockery, mock_packages, mock_fetch,
+                              mock_stage, testing_gpg_directory):
+    working_dir = tmpdir.join('working_dir')
+
+    mirror_dir = working_dir.join('mirror')
+    mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+
+    signing_key_dir = spack_paths.mock_gpg_keys_path
+    signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
+    with open(signing_key_path) as fd:
+        signing_key = fd.read()
+
+    ci.import_signing_key(signing_key)
+
+    spack_yaml_contents = """
+spack:
+ definitions:
+   - packages: [patchelf]
+ specs:
+   - $packages
+ mirrors:
+   test-mirror: {0}
+""".format(mirror_url)
+
+    print('spack.yaml:\n{0}\n'.format(spack_yaml_contents))
+
+    filename = str(tmpdir.join('spack.yaml'))
+    with open(filename, 'w') as f:
+        f.write(spack_yaml_contents)
+
+    with tmpdir.as_cwd():
+        env_cmd('create', 'test', './spack.yaml')
+        with ev.read('test') as env:
+            spec_map = ci.get_concrete_specs(
+                'patchelf', 'patchelf', '', 'FIND_ANY')
+            concrete_spec = spec_map['patchelf']
+            spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
+            yaml_path = str(tmpdir.join('spec.yaml'))
+            with open(yaml_path, 'w') as ypfd:
+                ypfd.write(spec_yaml)
+
+            install_cmd('--keep-stage', yaml_path)
+
+            # env, spec, yaml_path, mirror_url, build_id
+            ci.push_mirror_contents(
+                env, concrete_spec, yaml_path, mirror_url, '42')
+
+            buildcache_list_output = buildcache_cmd('list', output=str)
+
+            assert('patchelf' in buildcache_list_output)
+
+            logs_dir = working_dir.join('logs_dir')
+            if not os.path.exists(logs_dir.strpath):
+                os.makedirs(logs_dir.strpath)
+
+            ci.copy_stage_logs_to_artifacts(concrete_spec, logs_dir.strpath)
+
+            logs_dir_list = os.listdir(logs_dir.strpath)
+
+            assert('spack-build-env.txt' in logs_dir_list)
+            assert('spack-build-out.txt' in logs_dir_list)
+
+            # Also just make sure that if something goes wrong with the
+            # stage logs copy, no exception is thrown
+            ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath)
+
+            dl_dir = working_dir.join('download_dir')
+            if not os.path.exists(dl_dir.strpath):
+                os.makedirs(dl_dir.strpath)
+
+            buildcache_cmd('download', '--spec-yaml', yaml_path, '--path',
+                           dl_dir.strpath, '--require-cdashid')
+
+            dl_dir_list = os.listdir(dl_dir.strpath)
+
+            assert(len(dl_dir_list) == 3)
diff --git a/lib/spack/spack/test/cmd/release_jobs.py b/lib/spack/spack/test/cmd/release_jobs.py
deleted file mode 100644
index 7a51354c1c..0000000000
--- a/lib/spack/spack/test/cmd/release_jobs.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-import os
-import pytest
-import re
-
-import spack
-import spack.environment as ev
-from spack import repo
-from spack.cmd.release_jobs import stage_spec_jobs, spec_deps_key_label
-from spack.main import SpackCommand
-from spack.spec import Spec
-from spack.test.conftest import MockPackage, MockPackageMultiRepo
-
-
-env = SpackCommand('env')
-release_jobs = SpackCommand('release-jobs')
-
-
-@pytest.fixture()
-def env_deactivate():
-    yield
-    spack.environment._active_environment = None
-    os.environ.pop('SPACK_ENV', None)
-
-
-def test_specs_staging(config):
-    """Make sure we achieve the best possible staging for the following
-spec DAG::
-
-        a
-       /|
-      c b
-        |\
-        e d
-          |\
-          f g
-
-In this case, we would expect 'c', 'e', 'f', and 'g' to be in the first stage,
-and then 'd', 'b', and 'a' to be put in the next three stages, respectively.
-
-"""
-    default = ('build', 'link')
-
-    g = MockPackage('g', [], [])
-    f = MockPackage('f', [], [])
-    e = MockPackage('e', [], [])
-    d = MockPackage('d', [f, g], [default, default])
-    c = MockPackage('c', [], [])
-    b = MockPackage('b', [d, e], [default, default])
-    a = MockPackage('a', [b, c], [default, default])
-
-    mock_repo = MockPackageMultiRepo([a, b, c, d, e, f, g])
-
-    with repo.swap(mock_repo):
-        spec_a = Spec('a')
-        spec_a.concretize()
-
-        spec_a_label = spec_deps_key_label(spec_a)[1]
-        spec_b_label = spec_deps_key_label(spec_a['b'])[1]
-        spec_c_label = spec_deps_key_label(spec_a['c'])[1]
-        spec_d_label = spec_deps_key_label(spec_a['d'])[1]
-        spec_e_label = spec_deps_key_label(spec_a['e'])[1]
-        spec_f_label = spec_deps_key_label(spec_a['f'])[1]
-        spec_g_label = spec_deps_key_label(spec_a['g'])[1]
-
-        spec_labels, dependencies, stages = stage_spec_jobs([spec_a])
-
-        assert (len(stages) == 4)
-
-        assert (len(stages[0]) == 4)
-        assert (spec_c_label in stages[0])
-        assert (spec_e_label in stages[0])
-        assert (spec_f_label in stages[0])
-        assert (spec_g_label in stages[0])
-
-        assert (len(stages[1]) == 1)
-        assert (spec_d_label in stages[1])
-
-        assert (len(stages[2]) == 1)
-        assert (spec_b_label in stages[2])
-
-        assert (len(stages[3]) == 1)
-        assert (spec_a_label in stages[3])
-
-
-def test_release_jobs_with_env(tmpdir, mutable_mock_env_path, env_deactivate,
-                               install_mockery, mock_packages):
-    """Make sure we can get a .gitlab-ci.yml from an environment file
-    which has the gitlab-ci, cdash, and mirrors sections."""
-    filename = str(tmpdir.join('spack.yaml'))
-    with open(filename, 'w') as f:
-        f.write("""\
-spack:
-  definitions:
-    - packages: [archive-files]
-  specs:
-    - $packages
-  mirrors:
-    some-mirror: https://my.fake.mirror
-  gitlab-ci:
-    mappings:
-      - match:
-          - archive-files
-        runner-attributes:
-          tags:
-            - donotcare
-          image: donotcare
-  cdash:
-    build-group: Not important
-    url: https://my.fake.cdash
-    project: Not used
-    site: Nothing
-""")
-    with tmpdir.as_cwd():
-        env('create', 'test', './spack.yaml')
-        outputfile = str(tmpdir.join('.gitlab-ci.yml'))
-
-        with ev.read('test'):
-            release_jobs('--output-file', outputfile)
-
-        with open(outputfile) as f:
-            contents = f.read().replace(os.linesep, '')
-            assert('archive-files' in contents)
-            assert(re.search(r'stages:\s*\[\s*stage-0', contents))
diff --git a/var/spack/gpg.mock/keys/package-signing-key b/var/spack/gpg.mock/keys/package-signing-key
new file mode 100644
index 0000000000..43d5c4ed66
--- /dev/null
+++ b/var/spack/gpg.mock/keys/package-signing-key
@@ -0,0 +1 @@
+LS0tLS1CRUdJTiBQR1AgUFVCTElDIEtFWSBCTE9DSy0tLS0tCgptUUVOQkYzd0NMMEJDQUMyUURBemw4c1RLRmxHMWRsS09GNm9YckFqZUFyaVdtWWhNZUZFcXNhU3ZwUU5QekJSCkkvbDc5Zmd1OUk5bytxZDNCUmJlK1VGbFpKdnBDS051aFl3R1QyeTZYWGJjemNRRVVjRzBYRU5xd1prRlVaRlAKd1l4V0VZVURwTnZVeHpqUWR5RlhIcjdMa2svM3JncDlZVUJ4MTMzQTVJbEZtcFBCbVdIeFFObVRCelV2Qk5pRgpEM0pvNWZZaENQMzVyM080cVpxWE1hMHZPUFI2QS95TjVnUXRLdCtFemgwOGpYRHRQT0Y4U3pOQm9Kc0FTcnRTCmZ6Z2p0L0xtdXc3QWNqM1ZsR2JyS2wzTER3Q0NjUVB6Nk9RR0dSeFJsc3oyOHl5STA0a2pBM2FjYjkwUDlHT1cKenUyMjBsZ29Bb2x3M1dMdC9VMUNrNHdrR0dXSEpHQlpNQnovQUJFQkFBRzBNRk53WVdOcklFSjFhV3hrSUZCcApjR1ZzYVc1bElDaEVaVzF2SUV0bGVTa2dQR3RsZVVCemNHRmpheTVrWlcxdlBva0JUZ1FUQVFnQU9CWWhCT29YCjE3clFhY25GR3hab09XWHhrU2kvR0JHREJRSmQ4QWk5QWhzdkJRc0pDQWNDQmhVS0NRZ0xBZ1FXQWdNQkFoNEIKQWhlQUFBb0pFR1h4a1NpL0dCR0RybEVILzJPVEltVGxjYWtQNmtDa1hJcEIrdTYySXRocVMvcGo2eUg5VFppeAprQ01UT2xLaWRIV3ludXZidHFwZjJ5RGE3NTJnaTg5M2w5ckhGS2VYWkFQVXN0eENaanhxdUJqNHg4UWFacFZiClNBdFg2UGlOTjJnQldsRVhIM0RYZllMK1QzSTkrVW9nVlJOUWFTZmxaM2ovL3RqVDMwMjA1UzBsRXZuVnBSdXYKNm5iQkZ4V0pHTzRPZTRlby81ajhHeE9LRHRsS2t2TXc4SGtHcUczM3hiREhBODB2VXlwRFZMSkJ0OUpJRVF2agpVdmhaengxcmNPMFdjK2FqNVNKOWVOUkFEN05FMzFVWHRYTnVpNTAxc29IR3FVL3R3TXl2c3NSMFQzMUdlYlEvCmJrbXY0KzlMczhuU2tvTEdYVG5rbi9YT0piRnZUbXhmZFBrRVVUc0llRFBGS1phNUFRMEVYZkFJdlFFSUFLM2UKYk12TUl2QndxeEVFSTR1bmh1aGwvVjJJR1ZpYS8zeldPNm5aVlduMmdORFluNXdGcituYTJSQXRpKzZyTFIzZwo3RTVyWk9hcFAzdVc0b3I3QUd3WmFqc2pLdGoyR0xqWGZtRW16R204TFBqTUN2ZEFva2gzbnh3eHZLMFBoUHA5CjJOVEJIU2xuN0thODJuTUo4a0pqTEpBekxGb2dZN1Nvd0dUeVp0RHd6RDI0ckxaYnNCRXdkbm9GRWI4SFM5a1UKd0paWnBuMUkwYVBPNCtla2VoZ1FRKzNmLzAyZXRYcUtNOXd1cWVMZWVXZXo1Rlk5amUyYXN1ZVI2TnBabDVmUgpjK1loakdiNmtYb0NtZU1sNnIrN1I4YVA4Qjh0VHdrRFN1bFVZS0VJUE5wVUczTEI3K0paOEdSeFZNa3N4Z3V2Cld1VXpCN0k2UEcwZWxmNUpKaFVBRVFFQUFZa0NiQVFZQVFnQUlCWWhCT29YMTdyUWFjbkZHeFpvT1dYeGtTaS8KR0JHREJRSmQ4QWk5QWhzdUFVQUpFR1h4a1NpL0dCR0R3SFFnQkJrQkNBQWRGaUVFNlVlaWpBdkxQMmt0MC9WdgpaZTBzbGlXcXJXTUZBbDN3Q0wwQUNna1FaZTBzbGlXcXJXT1R1Z2YvYWVqTk0yZGVFdTQrQWo1TDdRWVh4aXhnClJPZ25DVkJ0ekN3S2lPSWNlS3hjM2RDQmpHVlZDTksvTmxNY205ZEY0N2YxMDl3cGVMc2tESjllZ1RWcG0yc3kKQXNJRE5HRWIwNzZFV21vdGloTmhJZWtHam1NLzdDL244NlpMeVEyL05ZNVpxSzJkWUVpbHZxTVhIbkY0aDdnOAozVzdpUzRQSzh5QXp1T1krcDVXQ29hcGdlLzVMeEcvNHNGOGdOMVJUc2xyYjdwVWxCMkNPbXoyTXI3MExxS2VmCnF5TkNhVkV6NkVSdFRhek1xTW1nbERCRDE5TmFnQkxBM1BpSklsQ1VWNDNqVDBOUXVSVjlxSCsvbitlT0xFeUYKTkdNMTNHYTRJVjVhZUp5RzNEbStLQnpvQ0VzRmtvMmFBRWJ4SEhodlpjYlY1RkozRk4xUEhodkJtZUZraFd5ZgpCLzlGdzgwanNVMHJESU1nMU5NWUtlK05mN0FOOEw1NXI3OWM2Q2FYVXd2NG1KSDRUSXpVaE5ZS0cyU3BwQVFZCk1wOVJtc0l6blM3cWdBZ2JLLzFBSysrZEdFLzZjUGZ6VXd3V1ozSFVmcy9hZG4rNVVNU2NaRmhUbHBmOWdBSmQKTkRwY0lQM084YnVTMEFSK2NwVm52M1I4TmJ2TlJURWhEUDg2TFhYcDZueld4SzZYdW51a2dua04vcFpsdGY5eApwaFZReEVRMGUzSHRaNi9UYWZXaVlzVEx0RmdMNTc4VTBjTm94bVA4M1MxRitUUlJXSiszdXh5SFUxWVZiSFVHCng3MzdPdDZ6ZWRTbFBJS0JFOUF5UU5mL3pMWUtYNTNmbStaZ0RMNFQ4cnVERGJrQnNRRkxiME5CQURpSDMvSDYKOHB0NERBSmVzOE93NzcrY011dExZWWluCj1TODd5Ci0tLS0tRU5EIFBHUCBQVUJMSUMgS0VZIEJMT0NLLS0tLS0KLS0tLS1CRUdJTiBQR1AgUFJJVkFURSBLRVkgQkxPQ0stLS0tLQoKbFFPWUJGM3dDTDBCQ0FDMlFEQXpsOHNUS0ZsRzFkbEtPRjZvWHJBamVBcmlXbVloTWVGRXFzYVN2cFFOUHpCUgpJL2w3OWZndTlJOW8rcWQzQlJiZStVRmxaSnZwQ0tOdWhZd0dUMnk2WFhiY3pjUUVVY0cwWEVOcXdaa0ZVWkZQCndZeFdFWVVEcE52VXh6alFkeUZYSHI3TGtrLzNyZ3A5WVVCeDEzM0E1SWxGbXBQQm1XSHhRTm1UQnpVdkJOaUYKRDNKbzVmWWhDUDM1cjNPNHFacVhNYTB2T1BSNkEveU41Z1F0S3QrRXpoMDhqWER0UE9GOFN6TkJvSnNBU3J0UwpmemdqdC9MbXV3N0FjajNWbEdicktsM0xEd0NDY1FQejZPUUdHUnhSbHN6Mjh5eUkwNGtqQTNhY2I5MFA5R09XCnp1MjIwbGdvQW9sdzNXTHQvVTFDazR3a0dHV0hKR0JaTUJ6L0FCRUJBQUVBQi8wYW15bE9SdUV6SlVkUFE0WHEKdzJyeU9veU5TUWVSdnUrODlkcTBteGZOTVh4TXFNWmxlaEtBYWNxM0ZDWGhoZ0l2cW5NSnp5cWdZajB1bW4rOQpjVXFkV3pmOHh3dEV0ZGRoYUF3V3lBZGhqT3pKYlh5QXY3azhrV2N4UG42SFJDUkRycmlUenQyOHUxbm9SeVNwCjVDb3oxR2s3NFVFM0E1ZUJnbUpkaFlHZDZJYlVFWk5vR2d1UDdYWDhQNmhyaW9lYkgrVGpXVHFWRkVFOXdwSW4KRmhSc3VtMktLWFM5cjNBUGFzM05RVjBRcHhORjdzWkRWM1BrWWVpR096Zzk1amtGb2x5cjhIb0FjSFVJMkwrKwpFbWVOZDZadkFMN2EvTk4vdUdHY05Tb3VtWTVJaVdSM0ZscGFlNmJ4UXZybDNYMlpGVmJZRW84U1VjWFNKalF0Cm1CWEJCQUROOGJ4ZHhwd3NPd0NYT2xTNWhWWW1xOFE4Q1dUaEVyOHpzK2gzY2dCSGlQZ0FmUUNJMGYzM2o1OXQKcTd2SERFc3NHTGg3WDJpdlF6M3BwZkVma3h0VHAzdEJ1WFNxczR1V1dYeXJHWDlIbGZRaDBCRFBQOXFLTlpPRQpXTUNHSXZtSUJsQnhJNEp2U1ZiOUtlSEtrZkRCcFdJUjFYUUZGK0RlSXE3Qlh2OGF2d1FBNG93eFhLTWxVMnpDCjh5aFNqd3hpaHo1R3gxNXpMUDJ5RzgwM2NZZ21xYjRYSGVUczBrbmlrMVprNi81SXNkZjVVc2hSU3BVY1hJdGsKM0RyeS9uYzNuR01lN2hmS21UdTBJSUNrend0MFpCUE12RnV2RVRjTFp5bm1ZdTdmRlpENXVnNmpxb09aVFZpTApUZ1NKMFNuY2FvWGVhSHppd2dtUENPTGZtUVFPemNFRC9BcGVmVWVPL2M2Y2dNazNwQ3pLaEtaYTlqTS80cm9hCnFLZ1VXNGxFWHdvVUdJZkhiS3RGMmdkZCtBWGlkWmtiV3lycGRuejh5SkY0Q1JlMTFuV2ZwVlQxblNucm0zM1IKM1djdDB3WnJOckVBWlNhVzF1NE5GU09OM2Z3NmVoeEI5d0tWYjk5dFJIUU0zakorRGlWNENTWFB5NTc0YmxKeApiaUY3SDNWTUVTQVNOTzYwTUZOd1lXTnJJRUoxYVd4a0lGQnBjR1ZzYVc1bElDaEVaVzF2SUV0bGVTa2dQR3RsCmVVQnpjR0ZqYXk1a1pXMXZQb2tCVGdRVEFRZ0FPQlloQk9vWDE3clFhY25GR3hab09XWHhrU2kvR0JHREJRSmQKOEFpOUFoc3ZCUXNKQ0FjQ0JoVUtDUWdMQWdRV0FnTUJBaDRCQWhlQUFBb0pFR1h4a1NpL0dCR0RybEVILzJPVApJbVRsY2FrUDZrQ2tYSXBCK3U2Mkl0aHFTL3BqNnlIOVRaaXhrQ01UT2xLaWRIV3ludXZidHFwZjJ5RGE3NTJnCmk4OTNsOXJIRktlWFpBUFVzdHhDWmp4cXVCajR4OFFhWnBWYlNBdFg2UGlOTjJnQldsRVhIM0RYZllMK1QzSTkKK1VvZ1ZSTlFhU2ZsWjNqLy90alQzMDIwNVMwbEV2blZwUnV2Nm5iQkZ4V0pHTzRPZTRlby81ajhHeE9LRHRsSwprdk13OEhrR3FHMzN4YkRIQTgwdlV5cERWTEpCdDlKSUVRdmpVdmhaengxcmNPMFdjK2FqNVNKOWVOUkFEN05FCjMxVVh0WE51aTUwMXNvSEdxVS90d015dnNzUjBUMzFHZWJRL2JrbXY0KzlMczhuU2tvTEdYVG5rbi9YT0piRnYKVG14ZmRQa0VVVHNJZURQRktaYWRBNWNFWGZBSXZRRUlBSzNlYk12TUl2QndxeEVFSTR1bmh1aGwvVjJJR1ZpYQovM3pXTzZuWlZXbjJnTkRZbjV3RnIrbmEyUkF0aSs2ckxSM2c3RTVyWk9hcFAzdVc0b3I3QUd3WmFqc2pLdGoyCkdMalhmbUVtekdtOExQak1DdmRBb2toM254d3h2SzBQaFBwOTJOVEJIU2xuN0thODJuTUo4a0pqTEpBekxGb2cKWTdTb3dHVHladER3ekQyNHJMWmJzQkV3ZG5vRkViOEhTOWtVd0paWnBuMUkwYVBPNCtla2VoZ1FRKzNmLzAyZQp0WHFLTTl3dXFlTGVlV2V6NUZZOWplMmFzdWVSNk5wWmw1ZlJjK1loakdiNmtYb0NtZU1sNnIrN1I4YVA4Qjh0ClR3a0RTdWxVWUtFSVBOcFVHM0xCNytKWjhHUnhWTWtzeGd1dld1VXpCN0k2UEcwZWxmNUpKaFVBRVFFQUFRQUgKOTA0YW5NVHY3c0lUMnNUS0Z5MmxFL1ZSMjM4b3BEb3BacHV0b1IrcmdiTVlDTVhJaWVxTW8zbHAxaGh1WFczWgpkMnIwbnpLYkM3aVNUdkkxMVk2Wk1wZGMwMXU5Y0lJR0N4VDl1TWZycGVmWm9Gb2pUc25EUHlOT21Tc1JMTENSClNDcytYU2sxbHVRQ3kwd2JpZ1lqY2JCZzNLUHFXUUlqaXFhZEo5QXhFLzdIYnFUcXZXd3owaWlUNGJndlNhWDMKRTRJbjFhZ2NhV1RQMFpjaTNuMWQxZGR5c1pIUjFMaXR0cm56TTVEb3lSeHc0K3ZjTWIzb2VBME9yaVRuR3dadApuN1pqR291TkFlUEh5R0ZHd1pFZWlwMllzVWJRQmZKbk9FSlB4UFlqRm1NU1IyT3I2MzdtMXlGM1pZdDRPMysrCjF3Qm05eVJUYXRPOHlsakhTY3VCd1FRQXdMVVdCM21CWmhqSGZjS0pkRmpRZG8vM2FFQWI1SENhMWVhVWFTTXUKVSs1RkMwV0luNnF1OWhDQWdtb05IeFdxbXkzM3lKaUtwbHRuNEpCKzF5MFNBNkFKTFo4TER5WXgxdVhPTHJEYgpmdUEwSVRickJRZCt6U0hHYVp5Tks4THZpNENNWXN0NVQxR1VZa1lVSWszd2I4MlJDZmFFUFhza2gybXBWQkVICjRRVUVBT2I1WTNXNzNqTG44S3hlU3AvL1FGbkw4QTlMTzJFMFFHNlp0aDRhd1J6THF0MVpTU0FhQXRCU1RLTHcKeFgxbVRGb1VjbVBjb3RwT1BvemlCU2daSzJ1VnoxeXdlNmpMUmg1bFUwZnRqekJiNGJ2OTB2K3BwM1ZNZEhnUQo0YytHN09vMERBcWxkR2FkY2dRVGgvMGdBKzR0L3psMTAyTjVoZHpsUUtUUy8zM1JBLzBjOHpWcnZ4MnorNDIvCncwMHNKWGJSSndZRUxKUWV5OVpoZklnUlNkczlhWE5pMEUyZEYrd0xtN0JOVytiQkQ5MlV3OG5FYmxVZWNpamEKWDVZcXhWdFhTVG5IVlZDN1FoeXgxVSt3UHFDZlJTYmJPL0FhUlBrMyt6dmRRc2Rkbk9DM3h6TzEvMkdLc3FENwoyZGNyVGFBZC9pRTBkMVB6VUNLYWdnRHh3RWZINFQ4OGlRSnNCQmdCQ0FBZ0ZpRUU2aGZYdXRCcHljVWJGbWc1ClpmR1JLTDhZRVlNRkFsM3dDTDBDR3k0QlFBa1FaZkdSS0w4WUVZUEFkQ0FFR1FFSUFCMFdJUVRwUjZLTUM4cy8KYVMzVDlXOWw3U3lXSmFxdFl3VUNYZkFJdlFBS0NSQmw3U3lXSmFxdFk1TzZCLzlwNk0weloxNFM3ajRDUGt2dApCaGZHTEdCRTZDY0pVRzNNTEFxSTRoeDRyRnpkMElHTVpWVUkwcjgyVXh5YjEwWGp0L1hUM0NsNHV5UU1uMTZCCk5XbWJheklDd2dNMFlSdlR2b1JhYWkyS0UyRWg2UWFPWXovc0wrZnpwa3ZKRGI4MWpsbW9yWjFnU0tXK294Y2UKY1hpSHVEemRidUpMZzhyeklETzQ1ajZubFlLaHFtQjcva3ZFYi9pd1h5QTNWRk95V3R2dWxTVUhZSTZiUFl5dgp2UXVvcDUrckkwSnBVVFBvUkcxTnJNeW95YUNVTUVQWDAxcUFFc0RjK0lraVVKUlhqZU5QUTFDNUZYMm9mNytmCjU0NHNUSVUwWXpYY1pyZ2hYbHA0bkliY09iNG9IT2dJU3dXU2pab0FSdkVjZUc5bHh0WGtVbmNVM1U4ZUc4R1oKNFdTRmJKOEgvMFhEelNPeFRTc01neURVMHhncDc0MS9zQTN3dm5tdnYxem9KcGRUQy9pWWtmaE1qTlNFMWdvYgpaS21rQkJneW4xR2F3ak9kTHVxQUNCc3IvVUFyNzUwWVQvcHc5L05UREJabmNkUit6OXAyZjdsUXhKeGtXRk9XCmwvMkFBbDAwT2x3Zy9jN3h1NUxRQkg1eWxXZS9kSHcxdTgxRk1TRU0vem90ZGVucWZOYkVycGU2ZTZTQ2VRMysKbG1XMS8zR21GVkRFUkRSN2NlMW5yOU5wOWFKaXhNdTBXQXZudnhUUncyakdZL3pkTFVYNU5GRlluN2U3SElkVApWaFZzZFFiSHZmczYzck41MUtVOGdvRVQwREpBMS8vTXRncGZuZCtiNW1BTXZoUHl1NE1OdVFHeEFVdHZRMEVBCk9JZmY4ZnJ5bTNnTUFsNnp3N0R2djV3eTYwdGhpS2M9Cj0xQndzCi0tLS0tRU5EIFBHUCBQUklWQVRFIEtFWSBCTE9DSy0tLS0tCg==
\ No newline at end of file
-- 
GitLab