FAQ | This is a LIVE service | Changelog

Skip to content
Commits on Source (2)
root=true
[*.md]
indent_size=2
indent_style=space
max_line_length=80
[*.{yml,yaml}]
indent_size=2
indent_style=space
......
......@@ -5,6 +5,20 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [2.4.0] - 2023-06-28
### Added
- New `auto-devops/common-pipeline.yml` intended to be included from the
majority of our applications, packages, libraries and tools.
- New `job-rules.yml` which contains common job rules templates.
- New `auto-devops/python-tox.yml` intended to provide a slimmed-down version
of the existing tox test pipeline which doesn't do things like spin up databases,
etc.
- New `auto-devops/python-package.yml` intended for standalone Python packages.
This new pipeline incorporates functionality from `pypi-release.yml` and can,
in time, replace it.
## [2.3.0] - 2023-06-28
### Added
......
......@@ -2,3 +2,24 @@
This repository contains a set of templates intended to be included into CI jobs
on GitLab. See the comments at the top of each template for usage information.
There is a "common pipeline" which brings in Auto DevOps and "safe" jobs
suitable for a wide array of projects which can be included in your CI
configuration. For example:
```yml
include:
- project: 'uis/devops/continuous-delivery/ci-templates'
file: '/auto-devops/common-pipeline.yml'
ref: v2.3.0
```
See the [common pipeline definition](./auto-devops/common-pipeline.yml) for more
information.
## Specific documentation
The following documents describe support for specific languages or repository
types:
* [Python projects](./auto-devops/python.md).
# A common pipeline which brings in a set of "safe" templates. A "safe" template is one where jobs are auto-enabled only
# if a) they should always be enabled or b) there are rules which enable them. For example a job is only enabled if
# there are files present in the repo which indicate that the job is appropriate to run).
#
# All jobs automatically added by "safe" templates should allow for disabling via a `..._DISABLED` variable.
include:
# Bring in the AutoDevOps template from GitLab. It can be viewed at:
# https://gitlab.com/gitlab-org/gitlab-ee/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
- template: Auto-DevOps.gitlab-ci.yml
# Utilities
- local: "/python-common.yml"
# "Safe" common templates.
- local: "/auto-devops/pre-commit.yml"
- local: "/auto-devops/python-tox.yml"
- local: "/auto-devops/python-publish.yml"
- local: "/auto-devops/python-check-tags-match-version.yml"
variables:
# Auto Test from Auto Devops is deprecated and will be removed in GitLab 17.0. Get ahead of the curve by never
# enabling it for pipelines using this template.
TEST_DISABLED: "1"
# Runs pre-commit in repos which have enabled it. See https://pre-commit.com/
# for information on the pre-commit tool.
# Runs pre-commit in repos which have enabled it. See https://pre-commit.com/ for information on the pre-commit tool.
#
# This template is intended to be "include"-d from CI configurations. An
# example of how to include this template:
# The job only runs for push and merge request events, favouring the latter if a merge request is open. The pre-commit
# job only runs if a `.pre-commit-config.yaml` file is present. The pre-commit job may be disabled by setting the
# `PRE_COMMIT_DISABLED` variable to any non-empty value.
#
# This template is intended to be "include"-d from CI configurations. An example of how to include this template:
#
# include:
# - project: 'uis/devops/continuous-delivery/ci-templates'
# file: '/auto-devops/pre-commit.yml'
# ref: v2.1.2
#
# The rule requires the "test" rule from AutoDevOps be present.
include:
- local: '/job-rules.yml'
pre-commit:
image: registry.gitlab.developers.cam.ac.uk/uis/devops/infra/dockerimages/pre-commit:3.3
......@@ -21,4 +24,10 @@ pre-commit:
# In other respects, it acts as a "test".
stage: test
rules: !reference [test, rules]
rules:
- if: $PRE_COMMIT_DISABLED
when: never
- !reference [.rules:disable-except-for-pushes-and-merge-requests]
- exists:
- .pre-commit-config.yaml
# Extends AutoDevOps template from GitLab for ensuring commit tags match the Python project version.
#
# See /auto-devops/python.md for the full documentation.
include:
- local: "/job-rules.yml"
# Template for check tag jobs. Jobs using this template should add a "before_script" which writes the version of the
# Python package to a file at $PYTHON_CHECK_TAG_VERSION_OUTPUT_PATH.
.python:check-tag:
image: $PYTHON_IMAGE
stage: test
# This job does not need the "build" stage to have completed.
needs: []
script:
- |
set -e
if [ ! -f "$PYTHON_CHECK_TAG_VERSION_OUTPUT_PATH" ]; then
echo "Error: package version was not written to '$PYTHON_CHECK_TAG_VERSION_OUTPUT_PATH'." >&2
exit 1;
fi
package_version=$(cat $PYTHON_CHECK_TAG_VERSION_OUTPUT_PATH)
if [ "$CI_COMMIT_TAG" != "$package_version" ]; then
echo "This commit has tag '$CI_COMMIT_TAG' but the Python package has version '$package_version'." >&2
exit 1
fi
rules:
- if: $PYTHON_CHECK_TAG_DISABLED
when: never
# We only ever want to run check tag jobs on push events where there is a commit tag.
- !reference [.rules:disable-except-for-pushes]
- !reference [.rules:disable-for-untagged-commits]
variables:
PYTHON_CHECK_TAG_VERSION_OUTPUT_PATH: .python-package-version
artifacts:
paths:
- $PYTHON_CHECK_TAG_VERSION_OUTPUT_PATH
python:check-tag-poetry:
extends: .python:check-tag
before_script:
- echo -e "\e[0Ksection_start:`date +%s`:install_poetry[collapsed=true]\r\e[0KInstall poetry"
- pip install poetry
- echo -e "\e[0Ksection_end:`date +%s`:install_poetry\r\e[0K"
- poetry version --short | tr -d '[[:space:]]' > "$PYTHON_CHECK_TAG_VERSION_OUTPUT_PATH"
rules:
- if: $PYTHON_CHECK_TAG_POETRY_DISABLED
when: never
- !reference [.python:check-tag, rules]
- exists:
- poetry.lock
python:check-tag-setuptools:
extends: .python:check-tag
before_script:
- echo -e "\e[0Ksection_start:`date +%s`:install_setuptools[collapsed=true]\r\e[0KInstall setuptools"
- pip install setuptools
- echo -e "\e[0Ksection_end:`date +%s`:install_setuptools\r\e[0K"
- python setup.py --version | tr -d '[[:space:]]' > "$PYTHON_CHECK_TAG_VERSION_OUTPUT_PATH"
rules:
- if: $PYTHON_CHECK_TAG_POETRY_DISABLED
when: never
- !reference [.python:check-tag, rules]
- exists:
- setup.py
# Extends AutoDevOps template from GitLab for publishing Python packages.
#
# See /auto-devops/python.md for the full documentation.
include:
- local: "/job-rules.yml"
- local: "/python-common.yml"
# Build tarball and wheel packages for a Python package which uses poetry for packaging. This job will only execute if
# there is a poetry.lock file in the repository root.
python:build-dist-poetry:
image: $PYTHON_IMAGE
stage: build
before_script:
- echo -e "\e[0Ksection_start:`date +%s`:install_poetry[collapsed=true]\r\e[0KInstall poetry"
- pip install poetry
- echo -e "\e[0Ksection_end:`date +%s`:install_poetry\r\e[0K"
script:
- poetry build
artifacts:
paths:
- dist/
rules:
- if: $PYTHON_BUILD_DIST_DISABLED
when: never
- if: $PYTHON_BUILD_DIST_POETRY_DISABLED
when: never
- !reference [.rules:disable-except-for-pushes-and-merge-requests]
- exists:
- poetry.lock
# Build tarball and wheel packages for a Python package which uses setup.py for packaging. This job will only execute if
# there is a setup.py file in the repository root.
python:build-dist-setuptools:
image: $PYTHON_IMAGE
stage: build
before_script:
- echo -e "\e[0Ksection_start:`date +%s`:install_setuptools[collapsed=true]\r\e[0KInstall setuptools"
- pip install setuptools wheel
- echo -e "\e[0Ksection_end:`date +%s`:install_setuptools\r\e[0K"
script:
- python3 setup.py sdist bdist_wheel
artifacts:
paths:
- dist/
rules:
- if: $PYTHON_BUILD_DIST_DISABLED
when: never
- if: $PYTHON_BUILD_DIST_SETUPTOOLS_DISABLED
when: never
- !reference [.rules:disable-except-for-pushes-and-merge-requests]
- exists:
- setup.py
# Job which publishes a Python package. Extended by jobs below to publish to specific locations.
.python:publish:
image: $PYTHON_IMAGE
before_script:
- |-
if [ ! -d "dist" ]; then
echo "No dist/ folder. Did at least one build-dist-... job run?" >&2
exit 1
fi
- echo -e "\e[0Ksection_start:`date +%s`:install_twine[collapsed=true]\r\e[0KInstall twine"
- pip install twine
- echo -e "\e[0Ksection_end:`date +%s`:install_twine\r\e[0K"
script:
# Note: dist/ is an artifact built by one of the build jobs.
- twine upload --disable-progress-bar --verbose --non-interactive dist/*
rules:
# Allow rule to be disabled.
- if: $PUBLISH_PYTHON_PACKAGE_DISABLED
when: never
# Disable for pipelines which are not pushes or merge requests.
- !reference [.rules:disable-except-for-pushes-and-merge-requests]
# Automatically trigger if the commit is tagged and packaging-related files are present.
- !reference [.rules:disable-for-untagged-commits]
- exists:
- setup.py
- pyproject.toml
# Publish to GitLab package registry for new tags unless the PYTHON_PUBLISH_TO_GITLAB_DISABLED variable is set to a
# non-empty value.
python:publish-to-gitlab:
extends: .python:publish
stage: production
variables:
TWINE_USERNAME: gitlab-ci-token
TWINE_PASSWORD: $CI_JOB_TOKEN
TWINE_REPOSITORY_URL: ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi
rules:
- if: '$PYTHON_PUBLISH_TO_GITLAB_DISABLED'
when: never
- !reference [.python:publish, rules]
# Job for publishing to test pypi instance on new tags. Only present if TEST_PYPI_API_TOKEN defined. Disabled if the
# PYTHON_PUBLISH_TO_TEST_PYPI_DISABLED variable is set to a non-empty value.
python:publish-to-test-pypi:
extends: .python:publish
stage: staging
variables:
TWINE_USERNAME: __token__
TWINE_PASSWORD: $TEST_PYPI_API_TOKEN
TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/
rules:
- if: '$PYTHON_PUBLISH_TO_TEST_PYPI_DISABLED'
when: never
# Never do this if TEST_PYPI_API_TOKEN is unset or is empty.
- if: '($TEST_PYPI_API_TOKEN == null) || ($TEST_PYPI_API_TOKEN == "")'
when: never
- !reference [.python:publish, rules]
# Job for publishing to real pypi instance on new tags. Only present if PYPI_API_TOKEN defined. Disabled if the
# PYTHON_PUBLISH_TO_PYPI_DISABLED variable is set to a non-empty value.
python:publish-to-pypi:
extends: .python:publish
stage: production
variables:
TWINE_USERNAME: __token__
TWINE_PASSWORD: $PYPI_API_TOKEN
TWINE_REPOSITORY_URL: https://upload.pypi.org/legacy/
rules:
- if: '$PYTHON_PUBLISH_TO_PYPI_DISABLED'
when: never
# Never do this if PYPI_API_TOKEN is unset or is empty.
- if: '($PYPI_API_TOKEN == null) || ($PYPI_API_TOKEN == "")'
when: never
- !reference [.python:publish, rules]
# Extends AutoDevOps template from GitLab for Python testing via tox.
#
# See /auto-devops/python.md for the full documentation.
include:
- local: "/job-rules.yml"
- local: "/python-common.yml"
# Template job for running tox tests. Can be extended for complex pipelines.
.python:tox:
image: $PYTHON_IMAGE
before_script:
- echo -e "\e[0Ksection_start:`date +%s`:install_tox[collapsed=true]\r\e[0KInstall tox"
- pip install tox $TOX_ADDITIONAL_REQUIREMENTS
- echo -e "\e[0Ksection_end:`date +%s`:install_tox\r\e[0K"
script:
- "tox $TOX_OPTS"
needs: [] # The common case is that we don't need any existing jobs.
artifacts:
name: "${CI_JOB_NAME}-tox"
reports:
coverage_report:
coverage_format: cobertura
path: $TOXINI_ARTEFACT_DIR/py3/coverage.xml
junit: $TOXINI_ARTEFACT_DIR/py3/junit.xml
paths:
- $TOXINI_ARTEFACT_DIR/
coverage: '/^TOTAL\s+\d+\s+\d+\s+(\d+)%$/'
# Run tox tests on pushes and merge requests for recent versions of Python if tox.ini is present and the
# PYTHON_TOX_DISABLED variable is not set to a non-empty value.
python:tox:
extends: .python:tox
rules:
- if: "$PYTHON_TOX_DISABLED"
when: never
- !reference [.rules:disable-except-for-pushes-and-merge-requests]
- exists:
- tox.ini
variables:
TOXINI_ARTEFACT_DIR: ${CI_PROJECT_DIR}/tox-artifacts
parallel:
matrix:
# Check all the supported Python versions
- PYTHON_VERSION: !reference [.python:versions]
# Python support
The following templates are included as part of the [common
pipeline](./common-pipeline.yml).
* [python-tox.yml](./python-tox.yml)
* [python-check-tags-match-version.yml](./python-check-tags-match-version.yml)
* [python-publish.yml](./python-publish.yml)
The jobs are "safe" in that they are only enabled if files are present in the
repository indicating they are sensible to run. All jobs can be disabled
explicitly by setting `..._DISABLED` variables.
## Summary
The common Python pipeline jobs will:
* Build your Python package as a wheel and tarball if there is a `setup.py` or
`poetry.lock` file in your repository root.
* Run `tox` if there is a `tox.ini` file in your repository root.
* Check that a commit's tag matches the Python package version if there is a
`setup.py` or `poetry.lock` file in your repository root.
* Publish your package to GitLab's package registry when a commit is tagged and
if a package was built.
* Publish your package to the test PyPI instance when a commit is tagged,
a package was built and the `TEST_PYPI_API_TOKEN` variable is set.
* Publish your package to the main PyPI instance when a commit is tagged,
a package was built and the `PYPI_API_TOKEN` variable is set.
If you have configured publication to both the test PyPI instance and the main
one, publication to the main instance will not happen until there has been a
successful publication to the test one.
Any of the automatically created jobs above may be disabled by setting a
corresponding `..._DISABLED` variable.
## Running tests
The [tox test runner template](./python-tox.yml) will add a "python:tox" job to
commit and merge requests pipelines in the "test" stage if a `tox.ini` file is
present.
The default behaviour is to run `tox` in the three most recent Python versions.
The job can be disabled by setting the `PYTHON_TOX_DISABLED` variable.
The `tox` command will be run passing it the contents of the `TOX_OPTS`
variable. As such you can arrange for tox testenvs to run in isolated test jobs
If you need to `pip install` additional requirements, you can pass them in the
`TOX_ADDITIONAL_REQUIREMENTS` variable.
If you want code coverage and test reports to be uploaded, arrange for them to
be placed in the following locations:
* Code-coverage: `$TOXINI_ARTEFACT_DIR/py3/coverage.xml`
* Test run results: `$TOXINI_ARTEFACT_DIR/py3/junit.xml`
Any other files present in `$TOXINI_ARTEFACT_DIR` are uploaded as artefacts.
### Customisation
You can arrange for tox testenvs to run in isolated test jobs by extending the
"python:tox" job.
For example, this configuration will run the `py3` toxenv in the last three
supported Python versions and the `flake8` and `black` toxenvs in the most
recent version.
```yaml
include:
- project: 'uis/devops/continuous-delivery/ci-templates'
file: '/auto-devops/common-pipeline.yml'
python:tox:
parallel:
matrix:
# Tests against a standard set of Python versions by setting PYTHON_VERSION.
- PYTHON_VERSION: !reference [.python:versions]
# Check formatting with the flake8 and black toxenvs
- TOX_ENV: flake8
- TOX_ENV: black
variables:
TOX_ENV: py3
TOX_OPTS: -e $TOX_ENV
```
A corresponding minimal `tox.ini` file which lints using black and flake8, runs
tests using `pytest`, and reports code coverage and test results back to GitLab
looks like the following:
```ini
[tox]
envlist=flake8,black,py3
skipsdist=True
[_vars]
build_root={env:TOXINI_ARTEFACT_DIR:{toxinidir}/build}
[testenv]
deps=
.
coverage
pytest
pytest-cov
commands=
pytest --doctest-modules --cov={toxinidir} --junitxml={[_vars]build_root}/{envname}/junit.xml
coverage html --directory {[_vars]build_root}/{envname}/htmlcov/
coverage xml -o {[_vars]build_root}/{envname}/coverage.xml
[testenv:py3]
basepython=python3
[testenv:flake8]
basepython=python3
deps=
flake8==6.0.0
commands=
flake8 --version
flake8 --tee --output-file={[_vars]build_root}/{envname}/report.txt .
[testenv:black]
basepython=python3
deps=
black==23.3.0
commands=
black --version
black --check .
```
## Publishing packages
The [publish template](./python-publish.yml) supports building packages from
your code and publishing them to GitLab's package registry and, optionally, the
test and production PyPI instances.
Packages are *built* for push and merge request pipelines as part of the "build"
stage. Package building can be disabled by setting the
`PYTHON_BUILD_DIST_DISABLED` variable.
There are specialised build jobs named "python:build-dist-poetry" and
"python:build-dist-setuptools" for poetry and setuptools-based packaging. Jobs
are enabled automatically based on the presence of `poetry.lock` or `setup.py`
files in the repository. If necessary the build jobs can be selectively disabled
by setting the `PYTHON_BUILD_DIST_POETRY_DISABLED` or
`PYTHON_BUILD_DIST_SETUPTOOLS_DISABLED` variables.
If you push a new tag, packages will be built and published.
If you have also included the [check tags
template](./python-check-tags-match-version.yml), a job named
"python:check-tag-poetry" or "python:check-tag-setuptools" will check that the
commit tag matches the Python package version. You can disable all tag checks by
setting the `PYTHON_CHECK_TAG_DISABLED` variable. If necessary the check jobs
can be selectively disabled by setting the `PYTHON_BUILD_DIST_POETRY_DISABLED`
or `PYTHON_BUILD_DIST_SETUPTOOLS_DISABLED` variables.
The "python:publish-to-gitlab" job runs in the "production" stage for new tags
and publishes your package to GitLab's built in package registry if there is a
`setup.py` or `pyproject.toml` file present in your repository. It can be
disabled by setting the `PYTHON_PUBLISH_TO_GITLAB_DISABLED` variable.
For publishing to PyPI, API tokens need to be created and arranged to be
present in the `TEST_PYPI_API_TOKEN` and `PYPI_API_TOKEN` variables
respectively.
The "python:publish-to-test-pypi" and "python:publish-to-pypi" jobs run like the
"python:publish-to-gitlab" job except that they require the
`TEST_PYPI_API_TOKEN` and `PYPI_API_TOKEN` variables be set respectively. They
can be selectively disabled by setting the
`PYTHON_PUBLISH_TO_TEST_PYPI_DISABLED` and `PYTHON_PUBLISH_TO_PYPI_DISABLED`
variables.
You can extend or override the publish jobs. A template publish job is available
named ".python:publish". The following variables must be set:
* `TWINE_REPOSITORY_URL` - the repository to upload packages to.
* `TWINE_USERNAME` - the username to use to authenticate to the repository.
* `TWINE_PASSWORD` - the password to use to authenticate to the repository.
These publication jobs intentionally do not have a great deal of configuration
available. Complicated packaging should be performed in specialised jobs.
# Re-usable job rules template. Does not require AutoDevOps.
# These rules are useful if you can't immediately see what a long list of job rule conditions means. There is no
# requirement to use the rules in this file.
# Disable the job if the source was not a merge request or push event. If there are open merge requests, jobs are
# disabled on push events.
#
# This rule only ever *disables* a job; additional rules to enable the job must appear *after* this one.
.rules:disable-except-for-pushes-and-merge-requests:
- if: '($CI_PIPELINE_SOURCE != "merge_request_event") && ($CI_PIPELINE_SOURCE != "push")'
when: never
# $CI_COMMIT_BRANCH is only set for pushes with an associated branch. $CI_COMMIT_TAG is only set for pushes to a
# tag. We want to disable jobs in those cases if there are open merge requests since the merge request pipeline
# should run those jobs.
- if: '($CI_COMMIT_BRANCH || $CI_COMMIT_TAG) && $CI_OPEN_MERGE_REQUESTS'
when: never
# Enable the job on merge requests or push events. If a merge request is open for the commit associated with the
# pipeline, the push event jobs will be disabled in favour of the merge requests.
#
# This rule only ever *enables* a job; if needed, additional rules to disable the job appear *before* this one.
.rules:enable-for-pushes-and-merge-requests:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: '($CI_PIPELINE_SOURCE == "push") && ($CI_OPEN_MERGE_REQUESTS == "" || $CI_OPEN_MERGE_REQUESTS == null)'
# Disable the job if the source was not a push event. This job will run only on push pipelines, never on merge request
# ones.
#
# This rule only ever *disables* a job; additional rules to enable the job must appear *after* this one.
.rules:disable-except-for-pushes:
- if: '$CI_PIPELINE_SOURCE != "push"'
when: never
# Enable the job on *all* push events. This rule does nothing to enable a job on merge request events and enables jobs
# for push events even if there is an open merge request.
#
# This rule only ever *enables* a job; if needed, additional rules to disable the job appear *before* this one.
.rules:enable-for-pushes:
- if: '$CI_PIPELINE_SOURCE == "push"'
# Disable the job for unless there is a tag associated with the commit. This rule does not check the pipeline event
# source.
#
# This rule only ever *disables* a job; additional rules to enable the job must appear *after* this one.
.rules:disable-for-untagged-commits:
- if: '($CI_COMMIT_TAG == null) || ($CI_COMMIT_TAG == "")'
when: never
# Enable the if is a tag associated with the commit. This rule does not check the pipeline event source.
#
# This rule only ever *enables* a job; if needed, additional rules to disable the job appear *before* this one.
.rules:enable-for-tagged-commits:
- if: $CI_COMMIT_TAG
# Stand alone version of auto-devops/pre-commit.yml.
#
# This template can be used for pure terraform projects which do not benefit from the full Auto DevOps pipeline.
include:
- local: '/auto-devops-stages.yml'
- local: '/auto-devops/pre-commit.yml'
# Uploading packages to PyPI
#
# DEPRECATED: Use auto-devops/python-package.yml instead.
#
# Jobs which package and upload the product to PyPI. Two manually triggered jobs
# are created: "pypi-release" and "test-pypi-release". The jobs will only be
# scheduled if the matching PYPI_API_TOKEN or TEST_PYPI_API_TOKEN variables are
......
# Common utilities for Python pipelines. Does not require Auto DevOps.
variables:
# Default Python version and base docker image to use when building Python packages.
PYTHON_VERSION: "3.11"
PYTHON_IMAGE: registry.gitlab.developers.cam.ac.uk/uis/devops/infra/dockerimages/python:$PYTHON_VERSION-alpine
# A default set of python versions to test. This is usually the most recent three released versions. Note that adding a
# new version here also requires that we add a new version to the common docker image repository project.
.python:versions: ["3.9", "3.10", "3.11"]