Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ardumont/swh-jenkins-jobs
  • olasd/swh-jenkins-jobs
  • lunar/swh-jenkins-jobs
  • HarshvMahawar/swh-jenkins-jobs
  • jayeshv/swh-jenkins-jobs
  • anlambert/swh-jenkins-jobs
  • swh/infra/ci-cd/swh-jenkins-jobs
  • vlorentz/swh-jenkins-jobs
  • douardda/swh-jenkins-jobs
  • zack/swh-jenkins-jobs
10 results
Show changes
Commits on Source (62)
Showing
with 166 additions and 114 deletions
......@@ -25,20 +25,25 @@ in a docker container. The local ``swh-jenkins-jobs`` repository will be mounted
volume and cloned by Jenkins so do not forget to commit the changes you want to test.
- Launch jenkins
Executing the following script located in the root directory of that repository will automatically configure the docker image build and start the compose session.
```
docker-compose build
docker-compose up
./start-docker-jenkins.sh
```
Connect to localhost:8080, then within the jenkins ui:
Jenkins jobs for Software Heritage should be automatically registered when the jenkins service is starting.
If the jobs did not get automatically registered, you can trigger their creation by following these instructions:
- Connect to localhost:8080, then within the jenkins ui:
- Create a jenkins folder `jenkins-tools`
- Create a new `free-style` job named `job-builder` inside the `jenkins-tools` targeting
this git repository `file:///opt/swh-jenkins-jobs`
- Configure the branch your are developing on (e.g. `*/master`, `*/awesome-feature`,
...)
- Add a `build` step `Execute shell` with this content
- Create a new `free-style` job named `job-builder` inside the `jenkins-tools`
- Add a `build` step `Execute shell` with this content
```
tox -- update --delete-old
git config --global --add safe.directory /opt/swh-jenkins-jobs/.git
git clone file:///opt/swh-jenkins-jobs
cd swh-jenkins-jobs
tox -- update --delete-old --jobs-only
```
- Save your build configuration
- Trigger a build \o/
......@@ -46,32 +51,3 @@ tox -- update --delete-old
This will install the jobs in your local jenkins. Jobs that can be run directly on the
built-in node can be executed. Other jobs that may need to run docker needs the docker
agent to be configured.
# Configure a docker agent
For making a docker agent runnable, it needs the 50000 port to be
available.
Then, within your local jenkins, click on the local jenkins interface:
> Manage jenkins
> Manage nodes and clouds
> New nodes
Then fill in the form, keeping the default values and adapting the rest:
- name: docker agent
- remote root dir: /var/tmp/jenkins
- labels: docker
- launch method: launch agent by connecting it to the controller
- custom workdir path: /var/tmp/jenkins
Save, then click on 'docker agent' and follow the proposed instructions:
```
$ curl -sO http://localhost:8080/jnlpJars/agent.jar
$ mkdir -p /var/tmp/jenkins
$ java -jar agent.jar \
-jnlpUrl http://localhost:8080/manage/computer/docker%20agent/jenkins-agent.jnlp \
-workDir "/var/tmp/jenkins"
```
version: '3'
volumes:
jenkins_data:
......@@ -25,6 +23,4 @@ services:
-Dhudson.security.csrf.GlobalCrumbIssuerConfiguration.DISABLE_CSRF_PROTECTION=true
ports:
- 8080:8080
# For docker agent
- 50000:50000
entrypoint: /docker/entrypoint.sh
FROM jenkins/jenkins:jdk11
FROM jenkins/jenkins:lts-jdk17
USER root
......
......@@ -12,22 +12,6 @@ def jobXml = '''<?xml version='1.1' encoding='UTF-8'?>
<project>
<description></description>
<keepDependencies>false</keepDependencies>
<scm class="hudson.plugins.git.GitSCM" plugin="git@5.2.1">
<configVersion>2</configVersion>
<userRemoteConfigs>
<hudson.plugins.git.UserRemoteConfig>
<url>file:///opt/swh-jenkins-jobs</url>
</hudson.plugins.git.UserRemoteConfig>
</userRemoteConfigs>
<branches>
<hudson.plugins.git.BranchSpec>
<name>*/master</name>
</hudson.plugins.git.BranchSpec>
</branches>
<doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations>
<submoduleCfg class="empty-list"/>
<extensions/>
</scm>
<canRoam>true</canRoam>
<disabled>false</disabled>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
......@@ -36,7 +20,12 @@ def jobXml = '''<?xml version='1.1' encoding='UTF-8'?>
<concurrentBuild>false</concurrentBuild>
<builders>
<hudson.tasks.Shell>
<command>tox -- update --delete-old</command>
<command>
git config --global --add safe.directory /opt/swh-jenkins-jobs/.git
git clone file:///opt/swh-jenkins-jobs
cd swh-jenkins-jobs
tox -- update --delete-old --jobs-only
</command>
<configuredLocalRules/>
</hudson.tasks.Shell>
</builders>
......
......@@ -14,7 +14,7 @@
trigger-note: false
add-ci-message: true
# secret jenkins token is generated when executing tox
secret-token: !include-raw: jobs/templates/jenkins-token
secret-token: !include-raw-verbatim: jobs/templates/jenkins-token
dsl: !include-jinja2: templates/swh-build-docker-image.groovy.j2
- project:
......
......@@ -30,6 +30,10 @@
name: "PARALLEL"
description: Execute image builds in parallel
default: true
- bool:
name: "REBUILD_BASE_IMAGE"
description: Force the rebuild of the base image
default: false
dsl: !include-jinja2: templates/swh-apps-build-docker-images.groovy.j2
- job-template:
......
......@@ -128,5 +128,5 @@
add-ci-message: true
cancel-pending-builds-on-update: true
# secret jenkins token is generated when executing tox
secret-token: !include-raw: jobs/templates/jenkins-token
secret-token: !include-raw-verbatim: jobs/templates/jenkins-token
dsl: !include-jinja2: templates/swh-docs-pipeline.groovy.j2
......@@ -13,7 +13,7 @@
project-type: pipeline
docker_image: tox
docker_options: --tmpfs /tmp:exec --privileged --device /dev/fuse
python_version: "3.10"
python_version: "3.11"
sandbox: true
timeout: 120
max_concurrent: 2
......@@ -63,7 +63,7 @@
add-ci-message: true
cancel-pending-builds-on-update: true
# secret jenkins token is generated when executing tox
secret-token: !include-raw: templates/jenkins-token
secret-token: !include-raw-verbatim: templates/jenkins-token
dsl: !include-jinja2: templates/swh-docker-dev.groovy.j2
......
......@@ -10,7 +10,7 @@
gitlab_project_name: swh/infra/swh-mirror
project-type: pipeline
docker_image: tox
node: built-in
sandbox: true
max_concurrent: 1
properties:
- gitlab:
......@@ -23,9 +23,12 @@
trigger-push: true
trigger-merge-request: true
trigger-open-merge-request-push: source
trigger-note: true
note-regex: ".*@jenkins.*retry.*build.*"
wip-skip: false
add-ci-message: true
cancel-pending-builds-on-update: true
# secret jenkins token is generated when executing tox
secret-token: !include-raw: jobs/templates/jenkins-token
secret-token: !include-raw-verbatim: jobs/templates/jenkins-token
dsl: !include-jinja2: templates/swh-mirror-run-tests.groovy.j2
......@@ -14,8 +14,7 @@
repo_name: swh-alter
pkg: alter
python_module: swh.alter
default-branch-name: main
default-tox-environment: py311
default_branch_name: main
jobs:
- "swh-jobs-{name}"
......@@ -47,12 +46,23 @@
- "swh-jobs-{name}"
- project:
name: DDATASET
display-name: swh-dataset
repo_name: swh-dataset
pkg: dataset
python_module: swh.dataset
default-tox-environment: py311
name: swh-datasets
display-name: swh-datasets
repo_name: swh-datasets
pkg: datasets
python_module: swh.datasets
do_rust: true
timeout: 30
default_branch_name: main
jobs:
- "swh-jobs-{name}"
- project:
name: swh-export
display-name: swh-export
repo_name: swh-export
pkg: export
python_module: swh.export
jobs:
- "swh-jobs-{name}"
......@@ -62,6 +72,7 @@
repo_name: swh-deposit
pkg: deposit
python_module: swh.deposit
default-tox-environment: py311,deposit_checker_import
jobs:
- "swh-jobs-{name}"
......@@ -71,9 +82,9 @@
repo_name: swh-graph
pkg: graph
python_module: swh.graph
default-tox-environment: py311,java
do_rust: true
timeout: 20
timeout: 30
precommit_skip_checks: mypy,cargo-check,clippy
jobs:
- "swh-jobs-{name}"
......@@ -232,7 +243,7 @@
pkg: perfecthash
python_module: swh.perfecthash
use_cibuildwheel: true
default-tox-environment: py310,c
default-tox-environment: py311,c
jobs:
- "swh-jobs-{name}"
......@@ -242,7 +253,9 @@
repo_name: swh-provenance
pkg: provenance
python_module: swh.provenance
timeout: 15
do_rust: true
timeout: 30
default_branch_name: main
jobs:
- "swh-jobs-{name}"
......@@ -336,6 +349,7 @@
nb_cypress_runners: 4
timeout: 30
max_concurrent: 3
precommit_skip_checks: mypy,eslint
jobs:
- "swh-jobs-{name}"
......@@ -354,7 +368,6 @@
repo_name: swh-webhooks
pkg: webhooks
python_module: swh.webhooks
default-tox-environment: py311
jobs:
- "swh-jobs-{name}"
......@@ -376,3 +389,12 @@
python_module: swh.foo
jobs:
- "swh-jobs-{name}"
- project:
name: swh-coarnotify
display-name: swh-coarnotify
repo_name: swh-coarnotify
pkg: coarnotify
python_module: swh.coarnotify
jobs:
- "swh-jobs-{name}"
\ No newline at end of file
......@@ -3,8 +3,9 @@ stage('Cypress tests') {
script {
// run cypress tests into dedicated container with mounted yarn cache
docker.image('{{docker_registry}}/cypress:latest')
.inside("--mount type=volume,src=shared-jenkins-cachedir,dst=/home/jenkins/.cache") {
def cypressImage = docker.image('{{docker_registry}}/cypress:latest')
cypressImage.pull()
cypressImage.inside("--mount type=volume,src=shared-jenkins-cachedir,dst=/home/jenkins/.cache") {
script {
sh '''
......@@ -15,10 +16,10 @@ stage('Cypress tests') {
rm -f swh-web-test*.sqlite3*
# apply django migrations and create users (will produce swh-web-test.sqlite3 file)
python{{cypress_python_version}} swh/web/manage.py migrate --settings=swh.web.settings.tests
python{{cypress_python_version}} swh/web/manage.py migrate --settings=${DJANGO_SETTINGS_MODULE:-swh.web.settings.cypress}
for create_users_script in swh/web/tests/create_test_*
do
cat $create_users_script | python{{cypress_python_version}} swh/web/manage.py shell --settings=swh.web.settings.tests
cat $create_users_script | python{{cypress_python_version}} swh/web/manage.py shell --settings=${DJANGO_SETTINGS_MODULE:-swh.web.settings.cypress}
done
# build swh-web static assets in test mode (for code coverage)
......@@ -56,11 +57,11 @@ stage('Cypress tests') {
cp swh-web-test.sqlite3 swh-web-test{{ n }}.sqlite3
# run django server on a dedicated port for that test runner and wait for it to be up
python{{cypress_python_version}} swh/web/manage.py runserver --nostatic --settings=swh.web.settings.tests 0.0.0.0:500{{ n }}&
python{{cypress_python_version}} swh/web/manage.py runserver --nostatic --settings=${DJANGO_SETTINGS_MODULE:-swh.web.settings.cypress} 0.0.0.0:500{{ n }}&
wait-for-it -t 90 localhost:500{{ n }}
# execute cypress tests
yarn run cypress run --env split={{ nb_cypress_runners }},splitIndex1={{ n }} --config baseUrl=http://localhost:500{{ n }}
CYPRESS_NO_COMMAND_LOG=1 yarn run cypress run --env split={{ nb_cypress_runners }},splitIndex1={{ n }} --config baseUrl=http://127.0.0.1:500{{ n }}
# copy nyc coverage outputs to dedicated merge folders for later processing
cp cypress/coverage{{ n }}/coverage-final.json cypress/coverage_output/coverage-final{{ n }}.json
......
stage('Python tests') {
steps {
sh '''
tox -e $TOX_ENVIRONMENT -- \
tox -e "$TOX_ENVIRONMENT" -- \
--cov-report=xml \
--junit-xml=test-results.xml \
-v
-vv
'''
}
post {
......
......@@ -4,10 +4,19 @@ stage('Build rust artifacts') {
{% endfilter %}
steps {
sh '''\
set -e
prepare-cargo-cache
ORC_USE_SYSTEM_LIBRARIES=true cargo build --all-features
'''
{% if production_jenkins %}
withCredentials([
string(credentialsId: 'sccache-redis-endpoint', variable: 'SCCACHE_REDIS_ENDPOINT'),
string(credentialsId: 'sccache-redis-password', variable: 'SCCACHE_REDIS_PASSWORD'),
]) {
{% endif %}
sh '''\
set -e
prepare-cargo-cache
RUSTC_WRAPPER=sccache CARGO_INCREMENTAL=0 cargo build --all-features
'''
{% if production_jenkins %}
}
{% endif %}
}
}
......@@ -4,10 +4,23 @@ stage('Rust tests') {
{% endfilter %}
steps {
sh '''\
set -e
prepare-cargo-cache
ORC_USE_SYSTEM_LIBRARIES=true cargo test --all-features
'''
{% if production_jenkins %}
withCredentials([
string(credentialsId: 'sccache-redis-endpoint', variable: 'SCCACHE_REDIS_ENDPOINT'),
string(credentialsId: 'sccache-redis-password', variable: 'SCCACHE_REDIS_PASSWORD'),
]) {
{% endif %}
sh '''\
set -e
prepare-cargo-cache
export RUSTC_WRAPPER=sccache
export CARGO_INCREMENTAL=0
cargo clippy --all-features -- --deny warnings
RUSTDOCFLAGS="--deny warnings" cargo doc --all-features --no-deps
cargo test --all-features
'''
{% if production_jenkins %}
}
{% endif %}
}
}
......@@ -3,8 +3,10 @@ failure {
updateGitlabCommitStatus name: 'jenkins', state: 'failed'
if (env.gitlabMergeRequestIid) {
def message = "Jenkins job [${env.JOB_NAME} #${env.BUILD_ID}]"
message += "(${env.BUILD_URL}) {- failed -}.<br/>"
message += "See [Console Output](${env.BUILD_URL}console) and "
message += "(${env.BUILD_URL}) {-failed-} "
message += "in ${currentBuild.durationString.minus(' and counting')}.<br/>"
message += "See [Console Output](${env.BUILD_URL}console), "
message += "[Blue Ocean](${env.RUN_DISPLAY_URL}) and "
message += "[Coverage Report](${env.BUILD_URL}coverage) for more details."
addGitLabMRComment comment: message
}
......@@ -15,8 +17,10 @@ success {
updateGitlabCommitStatus name: 'jenkins', state: 'success'
if (env.gitlabMergeRequestIid) {
def message = "Jenkins job [${env.JOB_NAME} #${env.BUILD_ID}]"
message += "(${env.BUILD_URL}) {+ succeeded +}.<br/>"
message += "See [Console Output](${env.BUILD_URL}console) and "
message += "(${env.BUILD_URL}) {+succeeded+} "
message += "in ${currentBuild.durationString.minus(' and counting')}.<br/>"
message += "See [Console Output](${env.BUILD_URL}console), "
message += "[Blue Ocean](${env.RUN_DISPLAY_URL}) and "
message += "[Coverage Report](${env.BUILD_URL}coverage) for more details."
addGitLabMRComment comment: message
}
......
......@@ -48,6 +48,8 @@ pipeline {
expression { "${env.gitlabSourceBranch}" ==~ /refs\/tags\/v\d+(:?\.\d+)+(:?(:?a|b|rc)\d+)?/ }
// Job to build images exist
expression { jenkinsJobExists('/swh-apps/build-docker-images') }
// temporarily disabled to avoid churn in coordinated releases
expression { false }
}
steps {
build(
......
......@@ -16,5 +16,5 @@
trigger-note: false
add-ci-message: true
# secret jenkins token is generated when executing tox
secret-token: !include-raw: jenkins-token
secret-token: !include-raw-verbatim: jenkins-token
dsl: !include-jinja2: incoming-tag.groovy.j2
......@@ -135,7 +135,7 @@ pipeline {
# around the same time, they will then take the next version. Thus
# avoiding any writing concurrent issue down the (pipe)line.
git tag -a ${application_tag} -m "Empty tag to be overwritten"
git push origin ${application_tag} --follow-tags
git push origin ${application_tag}
"""
}
}
......@@ -177,8 +177,14 @@ pipeline {
if ( dockerfile_exists ) {
// Determine docker image name and version
full_image_version = "${registry_domain}/${image_name}:${image_version}"
latest_image_version = "${registry_domain}/${image_name}:latest"
echo "full image version: ${full_image_version}"
sh "docker build --pull --tag ${full_image_version} apps/${application}"
sh """
# Build image with the versioned tag
docker build --pull --tag ${full_image_version} apps/${application}
# Build image with latest tag
docker tag ${full_image_version} ${latest_image_version}
"""
}
}
}
......@@ -206,7 +212,10 @@ pipeline {
script {
if ( ! dry_run && dockerfile_exists ) {
docker.withRegistry("https://${registry_domain}", dockerCredentialsKeyID) {
sh "docker push ${full_image_version}"
sh """
docker push ${full_image_version}
docker push ${latest_image_version}
"""
}
} else {
echo "Skipping `docker image push`."
......@@ -223,7 +232,10 @@ pipeline {
always {
script {
if ( dockerfile_exists && cleanup_image ) {
sh "docker image rm ${full_image_version}"
sh """
docker image rm ${full_image_version}
docker image rm ${latest_image_version}
"""
}
}
}
......
......@@ -4,6 +4,7 @@ def module_version = params.VERSION
def dry_run = params.DRY_RUN
def cleanup_image = params.CLEANUP_IMAGE
def trigger_in_parallel = params.PARALLEL
def rebuild_base_image = params.REBUILD_BASE_IMAGE
// For local build, use 'docker' label, else 'built-in' (the default, e.g. for prod)
{% if production_jenkins %}
def label_node = 'built-in'
......@@ -73,6 +74,28 @@ pipeline {
}
}
stage('Build/Reuse "swh-base" image') {
when {
expression { rebuild_base_image }
}
steps {
script {
build job: 'build-docker-image',
parameters: [
string(name: 'APPLICATION', value: 'swh-base'),
booleanParam(name: 'DRY_RUN', value: dry_run),
booleanParam(name: 'CLEANUP_IMAGE', value: cleanup_image),
],
// period before building
quietPeriod: 1,
// result of this step will be the result of the downstream job
propagate: true,
// make pipeline wait for result of the job
wait: true
}
}
}
stage('Search impacted application images to rebuild') {
steps {
sh """#!/bin/bash
......
......@@ -24,7 +24,7 @@ pipeline {
{% endfilter %}
options {
timeout(time: 40, unit: 'MINUTES')
timeout(time: 60, unit: 'MINUTES')
timestamps()
{% filter indent(width=4) %}
{%- include 'templates/includes/throttle-job-property.groovy.j2' -%}
......@@ -67,13 +67,11 @@ pipeline {
stage('Run tests') {
steps {
lock('docker-agent-host-port-5080') {
sh '''
basetemp=$WORKSPACE/tmp-pytest
mkdir -p $basetemp
tox -- -v --basetemp=$basetemp
'''
}
sh '''
basetemp=$WORKSPACE/tmp-pytest
mkdir -p $basetemp
tox -- -vv --basetemp=$basetemp
'''
}
}
}
......