Commit d0d54215 authored by 徐豪's avatar 徐豪
Browse files

init

parents

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.
.base-image-build:
extends: .use-kaniko
variables:
GIT_LFS_SKIP_SMUDGE: 1 # disable pulling objects from lfs
retry: 2
.base-image-build-buildx:
extends: .use-buildx
variables:
GIT_LFS_SKIP_SMUDGE: 1 # disable pulling objects from lfs
retry: 2
# This image is used by:
# - The `review-qa-*` jobs
# - The `e2e:package-and-test` child pipeline test stage jobs
# See https://docs.gitlab.com/ee/development/testing_guide/end_to_end/index.html#testing-code-in-merge-requests for more details.
build-qa-image:
extends:
- .base-image-build-buildx
- .build-images:rules:build-qa-image
stage: build-images
needs: []
script:
- run_timed_command "scripts/build_qa_image"
build-qa-image as-if-foss:
extends:
- build-qa-image
- .as-if-foss
- .build-images:rules:build-qa-image-as-if-foss
follow-up:build-qa-image:
extends:
- build-qa-image
- .qa:rules:follow-up-e2e
needs:
- manual:e2e-test-pipeline-generate
retag-gdk-image:
extends:
- .base-image-build
- .build-images:rules:retag-gdk-image
tags:
- docker
stage: build-images
needs: []
script:
- |
image="registry.gitlab.com/gitlab-org/gitlab-development-kit/asdf-bootstrapped-verify/main"
tag=$(awk '/ARG GDK_SHA=/ {print $2}' qa/gdk/Dockerfile.gdk | sed 's/.*=//g')
skopeo login -u $RETAG_GDK_IMAGE_TOKEN_NAME -p $RETAG_GDK_IMAGE_TOKEN $CI_REGISTRY
skopeo copy docker://${image}:${tag} docker://${image}:stable-${tag}
build-gdk-image:
extends:
- .base-image-build-buildx
- .build-images:rules:build-gdk-image
tags:
- docker
stage: build-images
needs:
- job: retag-gdk-image
optional: true
script:
- run_timed_command "scripts/build_gdk_image"
# NOTE: release-tools verifies the presence on this job for a commit that is
# candidate to an auto-deploy package. This job name can't be changed without
# changing the code in release-tools.
build-assets-image:
extends:
- .base-image-build
- .build-images:rules:build-assets-image
stage: build-images
needs: ["compile-production-assets"]
script:
- skopeo login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- run_timed_command "scripts/build_assets_image"
artifacts:
expire_in: 7 days
paths:
# The `cached-assets-hash.txt` file is used in `review-build-cng-env` (`.gitlab/ci/review-apps/main.gitlab-ci.yml`)
# to pass the assets image tag to the CNG downstream pipeline.
- cached-assets-hash.txt
build-assets-image as-if-foss:
extends:
- build-assets-image
- .as-if-foss
- .build-images:rules:build-assets-image-as-if-foss
needs: ["compile-production-assets as-if-foss"]
follow-up:build-assets-image:
extends:
- build-assets-image
- .qa:rules:follow-up-e2e
needs: ["follow-up:compile-production-assets"]
cache-workhorse:
extends:
- .default-retry
- .default-before_script
- .ruby-cache
- .setup-test-env-cache
- .caching:rules:cache-workhorse
stage: prepare
variables:
SETUP_DB: "false"
script:
- source scripts/gitlab_component_helpers.sh
- 'gitlab_workhorse_archive_doesnt_exist || { echoinfo "INFO: Exiting early as package exists."; exit 0; }'
- run_timed_command "scripts/setup-test-env"
- run_timed_command "select_gitlab_workhorse_essentials"
- run_timed_command "create_gitlab_workhorse_package"
- run_timed_command "upload_gitlab_workhorse_package"
artifacts:
expire_in: 7d
paths:
- ${TMP_TEST_GITLAB_WORKHORSE_PATH}/
.cache-assets-base:
extends:
- .compile-assets-base
- .assets-compile-cache
- .caching:rules:cache-assets
stage: prepare
variables:
WEBPACK_REPORT: "false"
script:
- yarn_install_script
- export GITLAB_ASSETS_HASH=$(bundle exec rake gitlab:assets:hash_sum)
- source scripts/gitlab_component_helpers.sh
- 'gitlab_assets_archive_doesnt_exist || { echoinfo "INFO: Exiting early as package exists."; exit 0; }'
- assets_compile_script
- echo -n "${GITLAB_ASSETS_HASH}" > "cached-assets-hash.txt"
- run_timed_command "create_gitlab_assets_package"
- run_timed_command "upload_gitlab_assets_package"
cache-assets:test:
extends: .cache-assets-base
cache-assets:production:
extends:
- .cache-assets-base
- .production
packages-cleanup:
extends:
- .default-retry
- .caching:rules:packages-cleanup
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
stage: prepare
before_script:
- source scripts/utils.sh
- install_gitlab_gem
script:
- scripts/packages/automated_cleanup.rb
spec:
inputs:
cng_path:
type: string
default: 'build/CNG-mirror'
---
default:
interruptible: true
stages:
- prepare
- deploy
include:
- local: .gitlab/ci/global.gitlab-ci.yml
.build-cng-env:
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}-alpine3.20
stage: prepare
needs:
# We need this job because we need its `cached-assets-hash.txt` artifact, so that we can pass the assets image tag to the downstream CNG pipeline.
- pipeline: $PARENT_PIPELINE_ID
job: build-assets-image
variables:
BUILD_ENV: build.env
before_script:
- source ./scripts/utils.sh
- install_gitlab_gem
script:
- 'ruby -r./scripts/trigger-build.rb -e "puts Trigger.variables_for_env_file(Trigger::CNG.new.variables)" > $BUILD_ENV'
- echo "GITLAB_ASSETS_TAG=$(assets_image_tag)" >> $BUILD_ENV
- ruby -e 'puts "FULL_RUBY_VERSION=#{RUBY_VERSION}"' >> $BUILD_ENV
- cat $BUILD_ENV
artifacts:
reports:
dotenv: $BUILD_ENV
paths:
- $BUILD_ENV
expire_in: 7 days
when: always
.build-cng:
stage: prepare
inherit:
variables: false
variables:
GITLAB_REF_SLUG: "${GITLAB_REF_SLUG}"
# CNG pipeline specific variables
GITLAB_VERSION: "${GITLAB_VERSION}"
GITLAB_TAG: "${GITLAB_TAG}"
GITLAB_ASSETS_TAG: "${GITLAB_ASSETS_TAG}"
CE_PIPELINE: "${CE_PIPELINE}" # Based on https://docs.gitlab.com/ee/ci/jobs/job_control.html#check-if-a-variable-exists, `if: '$CE_PIPELINE'` will evaluate to `false` when this variable is empty
EE_PIPELINE: "${EE_PIPELINE}" # Based on https://docs.gitlab.com/ee/ci/jobs/job_control.html#check-if-a-variable-exists, `if: '$EE_PIPELINE'` will evaluate to `false` when this variable is empty
GITLAB_ELASTICSEARCH_INDEXER_VERSION: "${GITLAB_ELASTICSEARCH_INDEXER_VERSION}"
GITLAB_KAS_VERSION: "${GITLAB_KAS_VERSION}"
GITLAB_PAGES_VERSION: "${GITLAB_PAGES_VERSION}"
GITLAB_SHELL_VERSION: "${GITLAB_SHELL_VERSION}"
GITLAB_WORKHORSE_VERSION: "${GITLAB_WORKHORSE_VERSION}"
GITALY_SERVER_VERSION: "${GITALY_SERVER_VERSION}"
RUBY_VERSION: "${FULL_RUBY_VERSION}"
# Source variables, see scripts/trigger-build.rb
TOP_UPSTREAM_SOURCE_PROJECT: "${TOP_UPSTREAM_SOURCE_PROJECT}"
TOP_UPSTREAM_SOURCE_JOB: "${TOP_UPSTREAM_SOURCE_JOB}"
TOP_UPSTREAM_MERGE_REQUEST_PROJECT_ID: "${TOP_UPSTREAM_MERGE_REQUEST_PROJECT_ID}"
TOP_UPSTREAM_MERGE_REQUEST_IID: "${TOP_UPSTREAM_MERGE_REQUEST_IID}"
TOP_UPSTREAM_SOURCE_SHA: "${TOP_UPSTREAM_SOURCE_SHA}"
TOP_UPSTREAM_SOURCE_REF_SLUG: "${TOP_UPSTREAM_SOURCE_REF_SLUG}"
# prevent cache invalidation between pipeline runs
CACHE_BUSTER: "false"
# link component version shas to current project instead of default CI_PIPELINE_CREATED_AT which forces rebuilds on each pipeline run
CONTAINER_VERSION_SUFFIX: "${CI_PROJECT_NAME}"
trigger:
project: '${CI_PROJECT_NAMESPACE}/$[[ inputs.cng_path ]]'
branch: $TRIGGER_BRANCH
strategy: depend
include:
- local: .gitlab/ci/rails/shared.gitlab-ci.yml
db:rollback single-db-ci-connection:
extends:
- db:rollback
- .single-db-ci-connection
- .rails:rules:single-db-ci-connection
db:migrate:reset single-db-ci-connection:
extends:
- db:migrate:reset
- .single-db-ci-connection
- .rails:rules:single-db-ci-connection
db:check-schema-single-db-ci-connection:
extends:
- db:check-schema
- .single-db-ci-connection
- .rails:rules:single-db-ci-connection
db:post_deployment_migrations_validator-single-db-ci-connection:
extends:
- db:post_deployment_migrations_validator
- .single-db-ci-connection
- .rails:rules:db:check-migrations-single-db-ci-connection
db:backup_and_restore single-db-ci-connection:
extends:
- db:backup_and_restore
- .single-db-ci-connection
- .rails:rules:db-backup
db:rollback:
extends:
- .db-job-base
- .rails:rules:db-rollback
script:
- bundle exec rake db:migrate VERSION=20230718020825 # 16.3 First migration
- bundle exec rake db:migrate
db:rollback single-db:
extends:
- db:rollback
- .single-db
- .rails:rules:single-db
# https://docs.gitlab.com/ee/development/database/dbmigrate_multi_version_upgrade_job.html
db:migrate:multi-version-upgrade:
extends:
- .db-job-base
- .rails:rules:db:migrate:multi-version-upgrade
script:
- curl -o latest_upgrade_stop.gz https://gitlab.com/gitlab-org/quality/pg-dump-generator/-/raw/main/pg_dumps/ee/latest_upgrade_stop.gz
- gunzip -c latest_upgrade_stop.gz > gitlabhq_production
- bundle exec rake db:drop db:create
- apt-get update -qq && apt-get install -y -qq postgresql-client
- psql --output /dev/null -v "ON_ERROR_STOP=1" -h postgres -U postgres -d gitlabhq_test < gitlabhq_production
- bundle exec rake gitlab:db:configure
# Validate minimum PG version supported by GitLab
db:migrate:multi-version-upgrade-pg13:
extends:
- db:migrate:multi-version-upgrade
- .use-pg13
db:migrate:reset:
extends: .db-job-base
script:
- bundle exec rake db:migrate:reset
db:migrate:reset single-db:
extends:
- db:migrate:reset
- .single-db
- .rails:rules:single-db
db:check-schema:
extends:
- .db-job-base
- .rails:rules:ee-mr-and-default-branch-only
script:
- run_timed_command "bundle exec rake db:drop db:create db:migrate"
db:check-schema-single-db:
extends:
- db:check-schema
- .single-db
- .rails:rules:single-db
db:check-migrations:
extends:
- .db-job-base
- .use-pg14 # Should match the db same version used by GDK
- .rails:rules:ee-and-foss-mr-with-migration
script:
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME --depth 20
- scripts/validate_migration_schema
allow_failure: true
db:check-migrations-single-db:
extends:
- db:check-migrations
- .single-db
- .rails:rules:db:check-migrations-single-db
db:post_deployment_migrations_validator:
extends:
- .db-job-base
- .rails:rules:ee-and-foss-mr-with-migration
script:
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME --depth 20
- scripts/post_deployment_migrations_validator
allow_failure: true
db:post_deployment_migrations_validator-single-db:
extends:
- db:post_deployment_migrations_validator
- .single-db
- .rails:rules:db:check-migrations-single-db
db:migrate-non-superuser:
extends:
- .db-job-base
- .rails:rules:ee-and-foss-mr-with-migration
script:
- bundle exec rake gitlab:db:reset_as_non_superuser
db:gitlabcom-database-testing:
extends: .rails:rules:db:gitlabcom-database-testing
stage: test
image: ruby:${RUBY_VERSION}-alpine
needs: []
allow_failure: true
script:
- source scripts/utils.sh
- install_gitlab_gem
- ./scripts/trigger-build.rb gitlab-com-database-testing
db:backup_and_restore:
extends:
- .db-job-base
- .rails:rules:db-backup
variables:
SETUP_DB: "false"
GITLAB_ASSUME_YES: "1"
script:
- . scripts/prepare_build.sh
- bundle exec rake db:drop db:create db:schema:load db:seed_fu
- mkdir -p tmp/tests/public/uploads tmp/tests/{artifacts,pages,lfs-objects,terraform_state,registry,packages,ci_secure_files,external-diffs}
- bundle exec rake gitlab:backup:create
- date
- bundle exec rake gitlab:backup:restore
db:backup_and_restore single-db:
extends:
- db:backup_and_restore
- .single-db
- .rails:rules:db-backup
db:rollback geo:
extends:
- db:rollback
- .rails:rules:ee-only-migration
script:
- bundle exec rake db:migrate:geo VERSION=20170627195211
- bundle exec rake db:migrate:geo
.run-dev-fixtures:
extends:
- .default-retry
- .ruby-cache
- .default-before_script
- .use-pg14
stage: test
needs: ["setup-test-env"]
variables:
FIXTURE_PATH: "db/fixtures/development"
SEED_VSA: "true"
SEED_PRODUCTIVITY_ANALYTICS: "true"
VSA_ISSUE_COUNT: 1
SIZE: 0 # number of external projects to fork, requires network connection
# SEED_NESTED_GROUPS: "false" # requires network connection
.run-dev-fixtures-script: &run-dev-fixtures-script
- section_start "gitaly-test-spawn" "Spawning Gitaly"; scripts/gitaly-test-spawn; section_end "gitaly-test-spawn"; # Do not use 'bundle exec' here
- section_start "seeding-db" "Seeding DB"; bundle exec rake db:seed_fu; section_end "seeding-db";
run-dev-fixtures:
extends:
- .run-dev-fixtures
- .dev-fixtures:rules:ee-and-foss
script:
- *run-dev-fixtures-script
run-dev-fixtures-ee:
extends:
- .run-dev-fixtures
- .dev-fixtures:rules:ee-only
- .use-pg14-es7-ee
script:
- cp ee/db/fixtures/development/* $FIXTURE_PATH
- *run-dev-fixtures-script
.review-docs:
extends:
- .default-retry
- .docs:rules:review-docs
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}-alpine
stage: review
needs: []
variables:
# We're cloning the repo instead of downloading the script for now
# because some repos are private and CI_JOB_TOKEN cannot access files.
# See https://gitlab.com/gitlab-org/gitlab/issues/191273
GIT_DEPTH: 1
# By default, deploy the Review App using the `main` branch of the `gitlab-org/gitlab-docs` project
DOCS_BRANCH: main
environment:
name: review-docs/mr-${CI_MERGE_REQUEST_IID}
# DOCS_REVIEW_APPS_DOMAIN and DOCS_GITLAB_REPO_SUFFIX are CI variables
# Discussion: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/14236/diffs#note_40140693
auto_stop_in: 2 weeks
url: http://${DOCS_BRANCH}-${DOCS_GITLAB_REPO_SUFFIX}-${CI_MERGE_REQUEST_IID}.${DOCS_REVIEW_APPS_DOMAIN}/${DOCS_GITLAB_REPO_SUFFIX}
on_stop: review-docs-cleanup
before_script:
- source ./scripts/utils.sh
- install_gitlab_gem
# Always trigger a docs build in gitlab-docs only on docs-only branches.
# Useful to preview the docs changes live.
review-docs-deploy:
extends: .review-docs
script:
- ./scripts/trigger-build.rb docs deploy
# Cleanup remote environment of gitlab-docs
review-docs-cleanup:
extends: .review-docs
environment:
name: review-docs/mr-${CI_MERGE_REQUEST_IID}
action: stop
script:
- ./scripts/trigger-build.rb docs cleanup
.docs-markdown-lint-image:
# When updating the image version here, update it in /scripts/lint-doc.sh too.
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-docs/lint-markdown:alpine-3.20-vale-3.4.2-markdownlint2-0.13.0-lychee-0.15.1
docs-lint markdown:
extends:
- .default-retry
- .docs:rules:docs-lint
- .docs-markdown-lint-image
- .yarn-cache
stage: lint
needs: []
script:
- source ./scripts/utils.sh
- yarn_install_script
- install_gitlab_gem
- scripts/lint-doc.sh
- scripts/lint/check_mermaid.mjs
docs-lint blueprint:
extends:
- .default-retry
- .docs:rules:docs-blueprints-lint
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}-slim
stage: lint
needs: []
script:
- scripts/lint-docs-blueprints.rb
docs code_quality:
extends:
- .reports:rules:docs_code_quality
- .docs-markdown-lint-image
stage: lint
needs: []
dependencies: []
allow_failure: true
script:
- vale --output=doc/.vale/vale-json.tmpl --minAlertLevel warning doc > gl-code-quality-report-docs.json || exit_code=$?
artifacts:
reports:
codequality: gl-code-quality-report-docs.json
paths:
- gl-code-quality-report-docs.json
expire_in: 2 weeks
when: always
docs-lint links:
extends:
- .docs:rules:docs-lint
- .docs-markdown-lint-image
stage: lint
needs: []
script:
- lychee --offline --no-progress --include-fragments doc
ui-docs-links lint:
extends:
- .docs:rules:docs-lint
- .static-analysis-base
- .ruby-cache
stage: lint
needs: []
script:
- bundle exec haml-lint -i DocumentationLinks
docs-lint deprecations-and-removals:
variables:
SETUP_DB: "false"
extends:
- .default-retry
- .ruby-cache
- .default-before_script
- .docs:rules:deprecations-and-removals
stage: lint
needs: []
script:
- bundle exec rake gitlab:docs:check_deprecations
docs-lint redirects:
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}-alpine
stage: lint
extends:
- .default-retry
- .docs:rules:redirect-check
needs: []
script:
- ./scripts/lint-docs-redirects.rb
.with-fixtures-needs:
needs:
- "rspec-all frontend_fixture"
.with-graphql-schema-dump-needs:
needs:
- "graphql-schema-dump"
.compile-assets-base:
extends:
- .default-retry
- .default-before_script
- .assets-compile-cache
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images/${BUILD_OS}-${OS_VERSION}-ruby-${RUBY_VERSION}-node-${NODE_VERSION}:rubygems-${RUBYGEMS_VERSION}-git-2.33-lfs-2.9-yarn-1.22-graphicsmagick-1.3.36
variables:
SETUP_DB: "false"
WEBPACK_VENDOR_DLL: "true"
# Disable warnings in browserslist which can break on backports
# https://github.com/browserslist/browserslist/blob/a287ec6/node.js#L367-L384
BROWSERSLIST_IGNORE_OLD_DATA: "true"
WEBPACK_COMPILE_LOG_PATH: "tmp/webpack-output.log"
stage: prepare
needs: []
script:
- yarn_install_script
- export GITLAB_ASSETS_HASH=$(bin/rake gitlab:assets:hash_sum)
- 'echo "CACHE_ASSETS_AS_PACKAGE: ${CACHE_ASSETS_AS_PACKAGE}"'
# The new strategy to cache assets as generic packages is experimental and can be disabled by removing the `CACHE_ASSETS_AS_PACKAGE` variable
- |
if [[ "${CACHE_ASSETS_AS_PACKAGE}" == "true" ]]; then
source scripts/gitlab_component_helpers.sh
if ! gitlab_assets_archive_doesnt_exist; then
# We remove all assets from the native cache as they could pollute the fresh assets from the package
rm -rf public/assets/ app/assets/javascripts/locale/**/app.js
run_timed_command "download_and_extract_gitlab_assets"
fi
fi
- assets_compile_script
- echo -n "${GITLAB_ASSETS_HASH}" > "cached-assets-hash.txt"
.update-cache-base:
after_script:
- yarn patch-package --reverse # To avoid caching patched modules
compile-production-assets:
extends:
- .compile-assets-base
- .production
- .frontend:rules:compile-production-assets
artifacts:
name: webpack-report
expire_in: 31d
paths:
# These assets are used in multiple locations:
# - in `build-assets-image` job to create assets image for packaging systems
# - GitLab UI for integration tests: https://gitlab.com/gitlab-org/gitlab-ui/-/blob/e88493b3c855aea30bf60baee692a64606b0eb1e/.storybook/preview-head.pug#L1
- cached-assets-hash.txt
- public/assets/
- "${WEBPACK_COMPILE_LOG_PATH}"
when: always
compile-production-assets as-if-foss:
extends:
- compile-production-assets
- .as-if-foss
- .frontend:rules:compile-production-assets-as-if-foss
follow-up:compile-production-assets:
extends:
- compile-production-assets
- .qa:rules:follow-up-e2e
needs:
- manual:e2e-test-pipeline-generate
compile-test-assets:
extends:
- .compile-assets-base
- .frontend:rules:compile-test-assets
artifacts:
expire_in: 7d
paths:
- public/assets/
- config/helpers/tailwind/ # Assets created during tailwind compilation
- "${WEBPACK_COMPILE_LOG_PATH}"
when: always
update-assets-compile-production-cache:
extends:
- compile-production-assets
- .update-cache-base
- .assets-compile-cache-push
- .shared:rules:update-cache
artifacts: {} # This job's purpose is only to update the cache.
update-assets-compile-test-cache:
extends:
- compile-test-assets
- .update-cache-base
- .assets-compile-cache-push
- .shared:rules:update-cache
artifacts: {} # This job's purpose is only to update the cache.
update-storybook-yarn-cache:
extends:
- .default-retry
- .default-utils-before_script
- .update-cache-base
- .storybook-yarn-cache-push
- .shared:rules:update-cache
stage: prepare
script:
- yarn_install_script_storybook
retrieve-frontend-fixtures:
variables:
SETUP_DB: "false"
extends:
- .default-retry
- .frontend:rules:default-frontend-jobs
stage: prepare
needs: []
script:
- source scripts/utils.sh
- source scripts/gitlab_component_helpers.sh
- install_gitlab_gem
- export_fixtures_sha_for_download
- |
if check_fixtures_download; then
run_timed_command "download_and_extract_fixtures"
fi
artifacts:
expire_in: 30 days
paths:
- tmp/tests/frontend/
# Download fixtures only when a merge request contains changes to only JS files
# and fixtures are present in the package registry.
.frontend-fixtures-base:
extends:
- .default-retry
- .default-before_script
- .ruby-cache
- .use-pg14
- .repo-from-artifacts
stage: fixtures
needs:
- "setup-test-env"
- "retrieve-tests-metadata"
- "retrieve-frontend-fixtures"
# it's ok to wait for the repo artifact as we're waiting for setup-test-env (which takes longer than clone-gitlab-repo) anyway
- !reference [.repo-from-artifacts, needs]
variables:
# Don't add `CRYSTALBALL: "false"` here as we're enabling Crystalball for scheduled pipelines (in `.gitlab-ci.yml`), so that we get coverage data
# for the `frontend fixture RSpec files` that will be added to the Crystalball mapping in `update-tests-metadata`.
# More information in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74003.
WEBPACK_VENDOR_DLL: "true"
script:
- source scripts/gitlab_component_helpers.sh
- |
if check_fixtures_reuse; then
echoinfo "INFO: Reusing frontend fixtures from 'retrieve-frontend-fixtures'."
exit 0
fi
- run_timed_command "gem install knapsack --no-document"
- section_start "gitaly-test-spawn" "Spawning Gitaly"; scripts/gitaly-test-spawn; section_end "gitaly-test-spawn"; # Do not use 'bundle exec' here
- source ./scripts/rspec_helpers.sh
- rspec_parallelized_job
artifacts:
name: frontend-fixtures
expire_in: 31d
when: always
paths:
- crystalball/
- knapsack/
- log/*.log
- tmp/tests/frontend/
# Builds FOSS, and EE fixtures in the EE project.
# Builds FOSS fixtures in the FOSS project.
rspec-all frontend_fixture:
extends:
- .frontend-fixtures-base
- .frontend:rules:frontend_fixture
needs:
- !reference [.frontend-fixtures-base, needs]
- "compile-test-assets"
parallel: 7
# Uploads EE fixtures in the EE project.
# Uploads FOSS fixtures in the FOSS project.
upload-frontend-fixtures:
variables:
SETUP_DB: "false"
extends:
- .default-retry
- .default-before_script
- .repo-from-artifacts
- .frontend:rules:upload-frontend-fixtures
stage: fixtures
needs:
# it's ok to wait for the repo artifact as we're waiting for the fixtures (which wait for the repo artifact) anyway
- !reference [.repo-from-artifacts, needs]
- !reference [.with-fixtures-needs, needs]
script:
- source scripts/gitlab_component_helpers.sh
- export_fixtures_sha_for_upload
- 'fixtures_archive_doesnt_exist || { echoinfo "INFO: Exiting early as package exists."; exit 0; }'
- run_timed_command "create_fixtures_package"
- run_timed_command "upload_fixtures_package"
graphql-schema-dump:
variables:
SETUP_DB: "false"
extends:
- .default-retry
- .ruby-cache
- .default-before_script
- .frontend:rules:default-frontend-jobs
stage: fixtures
needs: []
script:
- bundle exec rake gitlab:graphql:schema:dump
artifacts:
expire_in: 30 days
name: graphql-schema
paths:
- tmp/tests/graphql/gitlab_schema.graphql
- tmp/tests/graphql/gitlab_schema.json
.frontend-test-base:
extends:
- .default-retry
- .yarn-cache
variables:
# Disable warnings in browserslist which can break on backports
# https://github.com/browserslist/browserslist/blob/a287ec6/node.js#L367-L384
BROWSERSLIST_IGNORE_OLD_DATA: "true"
USE_BUNDLE_INSTALL: "false"
SETUP_DB: "false"
before_script:
- !reference [.default-before_script, before_script]
- yarn_install_script
stage: test-frontend
jest-build-cache:
extends:
- .frontend-test-base
- .frontend:rules:jest
needs: []
artifacts:
name: jest-cache
expire_in: 12h
when: always
paths:
- tmp/cache/jest/
script:
- run_timed_command "yarn jest:ci:build-cache"
variables:
# Propagate exit code correctly. See https://gitlab.com/groups/gitlab-org/-/epics/6074.
FF_USE_NEW_BASH_EVAL_STRATEGY: 'true'
FORCE_COLOR: '1'
allow_failure:
# In merge requests, failures exit with 2, so fail the pipeline. Otherwise,
# they exit with 1, so as not to break master and other pipelines.
exit_codes: 1
.vue3:
variables:
VUE_VERSION: 3
NODE_OPTIONS: --max-old-space-size=7680
allow_failure: true
.with-jest-build-cache-vue3-needs:
needs:
- job: jest-build-cache-vue3
optional: true
jest-build-cache-vue3:
extends:
- jest-build-cache
- .frontend:rules:jest-vue3
- .vue3
jest:
extends:
- .frontend-test-base
- .frontend:rules:jest
needs:
- job: jest-build-cache
optional: true
artifacts:
name: coverage-frontend
expire_in: 31d
when: always
paths:
- coverage-frontend/
- junit_jest.xml
- tmp/tests/frontend/
reports:
junit: junit_jest.xml
parallel: 11
script:
- run_timed_command "yarn jest:ci:without-fixtures"
jest-with-fixtures:
extends:
- jest
- .repo-from-artifacts
- .frontend:rules:jest
needs:
- !reference [jest, needs]
# it's ok to wait for the repo artifact as we're waiting for the fixtures (which wait for the repo artifact) anyway
- !reference [.repo-from-artifacts, needs]
- !reference [.with-fixtures-needs, needs]
parallel: 2
script:
- run_timed_command "yarn jest:ci:with-fixtures"
jest vue3:
extends:
- jest
- .frontend:rules:jest-vue3
- .vue3
needs:
- !reference [.with-jest-build-cache-vue3-needs, needs]
jest-with-fixtures vue3:
extends:
- jest-with-fixtures
- .frontend:rules:jest-vue3
- .vue3
needs:
- !reference ["jest vue3", needs]
# it's ok to wait for the repo artifact as we're waiting for the fixtures (which wait for the repo artifact) anyway
- !reference [.repo-from-artifacts, needs]
- !reference [.with-fixtures-needs, needs]
jest predictive:
extends:
- jest
- .frontend:rules:jest:predictive
needs:
- !reference [jest, needs]
- "detect-tests"
script:
- if [[ -s "$RSPEC_CHANGED_FILES_PATH" ]] || [[ -s "$RSPEC_MATCHING_JS_FILES_PATH" ]]; then run_timed_command "yarn jest:ci:predictive-without-fixtures"; fi
jest-with-fixtures predictive:
extends:
- jest-with-fixtures
- .frontend:rules:jest:predictive
needs:
- !reference [jest-with-fixtures, needs]
- "detect-tests"
script:
- if [[ -s "$RSPEC_CHANGED_FILES_PATH" ]] || [[ -s "$RSPEC_MATCHING_JS_FILES_PATH" ]]; then run_timed_command "yarn jest:ci:predictive-with-fixtures"; fi
jest-integration:
extends:
- .frontend-test-base
- .repo-from-artifacts
- .frontend:rules:jest-integration
script:
- run_timed_command "yarn jest:integration --ci"
needs:
# it's ok to wait for the repo artifact as we're waiting for the fixtures (which wait for the repo artifact) anyway
- !reference [.repo-from-artifacts, needs]
- !reference [.with-fixtures-needs, needs]
- !reference [.with-graphql-schema-dump-needs, needs]
jest-snapshot-vue3:
extends:
- .frontend-test-base
- .repo-from-artifacts
- .frontend:rules:jest-snapshot-vue3
needs:
# it's ok to wait for the repo artifact as we're waiting for the fixtures (which wait for the repo artifact) anyway
- !reference [.repo-from-artifacts, needs]
- !reference [.with-fixtures-needs, needs]
variables:
VUE_VERSION: 3
JEST_REPORT: jest-test-report.json
SNAPSHOT_TEST_REPORT: jest-snapshot-test-report.json
script:
- |
yarn jest:snapshots --ci --json --outputFile="${JEST_REPORT}" || echo 'Proceed to parsing test report...'
echo $(ruby -rjson -e 'puts JSON.generate(JSON.parse(File.read(ENV["JEST_REPORT"])).dig("snapshot"))') > "${SNAPSHOT_TEST_REPORT}"
echo " ============= snapshot test report start =============="
cat "${SNAPSHOT_TEST_REPORT}"
echo " ============= snapshot test report end ================"
snapshot_test_failed=$(ruby -rjson -e 'puts JSON.parse(File.read(ENV["SNAPSHOT_TEST_REPORT"])).dig("failure")')
if [[ "${snapshot_test_failed}" == "true" ]]
then
echo "You have failed snapshot tests! Exiting 1..."
exit 1
else
echo 'All snapshot tests passed! Exiting 0...'
exit 0
fi
artifacts:
name: snapshot_tests
expire_in: 31d
when: always
paths:
- jest-snapshot-test-match.json
- jest-snapshot-test-report.json
coverage-frontend:
extends:
- .default-retry
- .default-utils-before_script
- .yarn-cache
- .repo-from-artifacts
- .frontend:rules:coverage-frontend
needs:
- !reference [.repo-from-artifacts, needs]
- job: "jest"
optional: true
- job: "jest-with-fixtures"
optional: true
- job: "jest predictive"
optional: true
stage: post-test
script:
- yarn_install_script
- run_timed_command "yarn node scripts/frontend/merge_coverage_frontend.js"
# Removing the individual coverage results, as we just merged them.
- if ls coverage-frontend/jest-* > /dev/null 2>&1; then
rm -r coverage-frontend/jest-*;
fi
coverage: '/^Statements\s*:\s*?(\d+(?:\.\d+)?)%/'
artifacts:
name: coverage-frontend
expire_in: 31d
paths:
- coverage-frontend/
reports:
coverage_report:
coverage_format: cobertura
path: coverage-frontend/cobertura-coverage.xml
webpack-dev-server:
extends:
- .default-retry
- .default-utils-before_script
- .yarn-cache
- .repo-from-artifacts
- .frontend:rules:default-frontend-jobs
stage: test-frontend
variables:
WEBPACK_MEMORY_TEST: "true"
WEBPACK_VENDOR_DLL: "true"
script:
- yarn_install_script
- run_timed_command "retry yarn webpack-vendor"
- run_timed_command "node --expose-gc node_modules/.bin/webpack-dev-server --config config/webpack.config.js"
artifacts:
name: webpack-dev-server
expire_in: 31d
paths:
- webpack-dev-server.json
compile-storybook:
extends:
- .frontend-test-base
- .storybook-yarn-cache
- .repo-from-artifacts
- .frontend:rules:compile-storybook
stage: pages
needs:
# it's ok to wait for the repo artifact as we're waiting for the fixtures (which wait for the repo artifact) anyway
- !reference [.repo-from-artifacts, needs]
- !reference [.with-fixtures-needs, needs]
- !reference [.with-graphql-schema-dump-needs, needs]
artifacts:
name: storybook
expire_in: 31d
when: always
paths:
- storybook/public
script:
- yarn_install_script_storybook
- run_timed_command "yarn run storybook:build"
include:
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "activerecord-gitlab"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "click_house-client"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-rspec"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-schema-validation"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-utils"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "ipynbdiff"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-rspec_flaky"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-safe_request_store"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "csv_builder"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-http"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-backup-cli"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-secret_detection"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-database-load_balancing"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-database-lock_retries"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-housekeeper"
glfm-verify:
extends:
- .rails-job-base
- .glfm:rules:glfm-verify
- .use-pg14
stage: test
needs: ["setup-test-env"]
script:
- !reference [.base-script, script]
- bundle exec scripts/glfm/verify-all-generated-files-are-up-to-date.rb
artifacts:
name: changed-files
when: on_failure
expire_in: 31d
paths:
- glfm_specification/
.default-retry:
retry:
max: 2 # This is confusing but this means "3 runs at max".
when:
- api_failure
- data_integrity_failure
- runner_system_failure
- scheduler_failure
- stuck_or_timeout_failure
- unknown_failure
.default-utils-before_script:
before_script:
- echo $FOSS_ONLY
- '[ "$FOSS_ONLY" = "1" ] && rm -rf ee/ qa/spec/ee/ qa/qa/specs/features/ee/ qa/qa/ee/ qa/qa/ee.rb'
- export GOPATH=$CI_PROJECT_DIR/.go
- mkdir -p $GOPATH
- source scripts/utils.sh
.default-before_script:
before_script:
- !reference [.default-utils-before_script, before_script]
- source scripts/prepare_build.sh
.repo-from-artifacts:
variables:
GIT_STRATEGY: "${CI_FETCH_REPO_GIT_STRATEGY}"
needs:
# If the job extending this also defines `needs`, make sure to update
# its `needs` to include `clone-gitlab-repo` because it'll be overridden.
- job: clone-gitlab-repo
optional: true # Optional so easier to switch in between
.production:
variables:
RAILS_ENV: "production"
NODE_ENV: "production"
GITLAB_ALLOW_SEPARATE_CI_DATABASE: "true"
.ruby-gems-cache: &ruby-gems-cache
key: "ruby-gems-${BUILD_OS}-${OS_VERSION}-ruby-${RUBY_VERSION}"
paths:
- vendor/ruby/
policy: pull
.ruby-gems-cache-push: &ruby-gems-cache-push
<<: *ruby-gems-cache
policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.ruby-coverage-gems-cache: &ruby-coverage-gems-cache
key: "ruby-coverage-gems-${BUILD_OS}-${OS_VERSION}-ruby-${RUBY_VERSION}"
paths:
- vendor/ruby/
policy: pull
.ruby-coverage-gems-cache-push: &ruby-coverage-gems-cache-push
<<: *ruby-coverage-gems-cache
policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.gitaly-binaries-cache: &gitaly-binaries-cache
key:
files:
- GITALY_SERVER_VERSION
- lib/gitlab/setup_helper.rb
prefix: "gitaly-binaries-${BUILD_OS}-${OS_VERSION}"
paths:
- ${TMP_TEST_FOLDER}/gitaly/_build/bin/
- ${TMP_TEST_FOLDER}/gitaly/_build/deps/git/install/
- ${TMP_TEST_FOLDER}/gitaly/config.toml
- ${TMP_TEST_FOLDER}/gitaly/config.toml.transactions
- ${TMP_TEST_FOLDER}/gitaly/gitaly2.config.toml
- ${TMP_TEST_FOLDER}/gitaly/gitaly2.config.toml.transactions
- ${TMP_TEST_FOLDER}/gitaly/internal/
- ${TMP_TEST_FOLDER}/gitaly/run/
- ${TMP_TEST_FOLDER}/gitaly/run2/
- ${TMP_TEST_FOLDER}/gitaly/Makefile
- ${TMP_TEST_FOLDER}/gitaly/praefect.config.toml
- ${TMP_TEST_FOLDER}/gitaly/praefect-db.config.toml
policy: pull
.go-pkg-cache: &go-pkg-cache
key: "go-pkg-${BUILD_OS}-${OS_VERSION}"
paths:
- .go/pkg/mod/
policy: pull
.go-pkg-cache-push: &go-pkg-cache-push
<<: *go-pkg-cache
policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.node-modules-cache: &node-modules-cache
key: "node-modules-${BUILD_OS}-${OS_VERSION}-${NODE_ENV}"
paths:
- node_modules/
- tmp/cache/webpack-dlls/
policy: pull
.node-modules-cache-push: &node-modules-cache-push
<<: *node-modules-cache
policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.assets-tmp-cache: &assets-tmp-cache
key: "assets-tmp-${BUILD_OS}-${OS_VERSION}-ruby-${RUBY_VERSION}-node-${NODE_ENV}-v1"
paths:
- tmp/cache/assets/sprockets/
- tmp/cache/babel-loader/
- tmp/cache/vue-loader/
policy: pull
.assets-tmp-cache-push: &assets-tmp-cache-push
<<: *assets-tmp-cache
policy: push # We want to rebuild the cache from scratch to ensure we don't pile up outdated cache files.
.storybook-node-modules-cache: &storybook-node-modules-cache
key: "storybook-node-modules-${BUILD_OS}-${OS_VERSION}-${NODE_ENV}"
paths:
- storybook/node_modules/
policy: pull
.storybook-node-modules-cache-push: &storybook-node-modules-cache-push
<<: *storybook-node-modules-cache
policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.rubocop-cache: &rubocop-cache
key: "rubocop-${BUILD_OS}-${OS_VERSION}-ruby-${RUBY_VERSION}"
paths:
- tmp/rubocop_cache/
policy: pull
.rubocop-cache-push: &rubocop-cache-push
<<: *rubocop-cache
# We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up but RuboCop has a mechanism
# for keeping only the N latest cache files, so we take advantage of it with `pull-push`.
policy: push
.qa-ruby-gems-cache: &qa-ruby-gems-cache
key:
prefix: "qa-ruby-gems-${BUILD_OS}-${OS_VERSION}-ruby-${RUBY_VERSION}"
files:
- qa/Gemfile.lock
paths:
- qa/vendor/ruby
policy: pull
.qa-ruby-gems-cache-push: &qa-ruby-gems-cache-push
<<: *qa-ruby-gems-cache
policy: pull-push
.setup-test-env-cache:
cache:
- *ruby-gems-cache
- *gitaly-binaries-cache
- *go-pkg-cache
.setup-test-env-cache-push:
cache:
- *ruby-gems-cache-push
- *go-pkg-cache-push
.gitaly-binaries-cache-push:
cache:
- <<: *gitaly-binaries-cache
policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.ruby-cache:
cache:
- *ruby-gems-cache
.static-analysis-cache:
cache:
- *ruby-gems-cache
- *node-modules-cache
- *rubocop-cache
.rubocop-job-cache:
cache:
- *ruby-gems-cache
- *rubocop-cache
.rubocop-job-cache-push:
cache:
- *ruby-gems-cache # We don't push this cache as it's already rebuilt by `update-setup-test-env-cache`
- *rubocop-cache-push
.ruby-gems-coverage-cache:
cache:
- *ruby-coverage-gems-cache
.ruby-gems-coverage-cache-push:
cache:
- *ruby-coverage-gems-cache-push
.ruby-node-cache:
cache:
- *ruby-gems-cache
- *node-modules-cache
.qa-bundler-variables: &qa-bundler-variables
variables:
BUNDLE_SUPPRESS_INSTALL_USING_MESSAGES: "true"
BUNDLE_SILENCE_ROOT_WARNING: "true"
BUNDLE_PATH: vendor
BUNDLE_WITHOUT: development
.qa-cache:
<<: *qa-bundler-variables
cache:
- *qa-ruby-gems-cache
.qa-cache-push:
<<: *qa-bundler-variables
cache:
- *qa-ruby-gems-cache-push
.yarn-cache:
cache:
- *node-modules-cache
.assets-compile-cache:
cache:
- *ruby-gems-cache
- *node-modules-cache
- *assets-tmp-cache
.assets-compile-cache-push:
cache:
- *ruby-gems-cache # We don't push this cache as it's already rebuilt by `update-setup-test-env-cache`
- *node-modules-cache-push
- *assets-tmp-cache-push
.storybook-yarn-cache:
cache:
- *node-modules-cache
- *storybook-node-modules-cache
.storybook-yarn-cache-push:
cache:
- *node-modules-cache # We don't push this cache as it's already rebuilt by `update-assets-compile-*-cache`
- *storybook-node-modules-cache-push
.code-quality-cache: &code-quality-cache
paths:
- gl-code-quality-report.json
.code-quality-cache-push:
cache:
- key: $CI_COMMIT_BRANCH-code-quality-cache
<<: *code-quality-cache
policy: push
.code-quality-cache-master-pull:
cache:
- key: master-code-quality-cache
<<: *code-quality-cache
policy: pull # Docs pipelines can save time by pulling a copy of the code quality report from the latest master pipeline.
.redis-services:
services:
- name: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:redis-cluster-6.2.12
alias: rediscluster # configure connections in config/redis.yml
- name: redis:${REDIS_VERSION}-alpine
.pg-base-variables:
variables:
POSTGRES_HOST_AUTH_METHOD: trust
.db-services:
services:
- name: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:postgres-${PG_VERSION}-pgvector-0.4.1
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off", "-c", "max_locks_per_transaction=256"]
alias: postgres
- !reference [.redis-services, services]
.db-services-with-auto-explain:
services:
- name: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:postgres-${PG_VERSION}-pgvector-0.4.1
command:
- postgres
- -c
- fsync=off
- -c
- synchronous_commit=off
- -c
- full_page_writes=off
- -c
- max_locks_per_transaction=256
- -c
- log_filename=pglog
- -c
- log_destination=csvlog
- -c
- logging_collector=true
- -c
- auto_explain.log_min_duration=0
- -c
- auto_explain.log_format=json
- -c
- auto_explain.log_timing=off
alias: postgres
- !reference [.redis-services, services]
.zoekt-variables:
variables:
ZOEKT_INDEX_BASE_URL: http://zoekt-ci-image:6060
ZOEKT_SEARCH_BASE_URL: http://zoekt-ci-image:6070
.zoekt-services:
services:
- name: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:zoekt-ci-image-1.8
alias: zoekt-ci-image
.ai-gateway-variables:
variables:
AIGW_AUTH__BYPASS_EXTERNAL: true
AIGW_GOOGLE_CLOUD_PLATFORM__PROJECT: $VERTEX_AI_PROJECT
AIGW_GOOGLE_CLOUD_PLATFORM__SERVICE_ACCOUNT_JSON_KEY: $VERTEX_AI_CREDENTIALS
AIGW_FASTAPI__DOCS_URL: "/docs"
AIGW_FASTAPI__OPENAPI_URL: "/openapi.json"
AIGW_FASTAPI__API_PORT: 5052
ANTHROPIC_API_KEY: $ANTHROPIC_API_KEY_FOR_SERVICE
# CI_DEBUG_SERVICES: "true" # Enable this variable when you debug ai-gateway boot failure.
.ai-gateway-services:
services:
- name: registry.gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/model-gateway:v1.8.0
alias: ai-gateway
.use-pg13:
extends:
- .pg-base-variables
services:
- !reference [.db-services, services]
variables:
PG_VERSION: "13"
REDIS_VERSION: "6.2"
.use-pg14:
extends:
- .pg-base-variables
services:
- !reference [.db-services-with-auto-explain, services]
variables:
PG_VERSION: "14"
REDIS_VERSION: "6.2"
.use-pg15:
extends:
- .pg-base-variables
services:
- !reference [.db-services-with-auto-explain, services]
variables:
PG_VERSION: "15"
REDIS_VERSION: "7.0"
.use-pg16:
extends:
- .pg-base-variables
services:
- !reference [.db-services-with-auto-explain, services]
variables:
PG_VERSION: "16"
REDIS_VERSION: "7.0"
.es7-services:
services:
- !reference [.zoekt-services, services]
- name: elasticsearch:7.17.6
command: ["elasticsearch", "-E", "discovery.type=single-node", "-E", "xpack.security.enabled=false", "-E", "cluster.routing.allocation.disk.threshold_enabled=false"]
.use-pg13-es7-ee:
extends:
- .use-pg13
- .zoekt-variables
services:
- !reference [.db-services, services]
- !reference [.es7-services, services]
.use-pg14-es7-ee:
extends:
- .use-pg14
- .zoekt-variables
- .ai-gateway-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.es7-services, services]
- !reference [.ai-gateway-services, services]
.use-pg15-es7-ee:
extends:
- .use-pg15
- .zoekt-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.es7-services, services]
.use-pg16-es7-ee:
extends:
- .use-pg16
- .zoekt-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.es7-services, services]
.es8-services:
services:
- !reference [.zoekt-services, services]
- name: elasticsearch:8.11.4
.es8-variables:
variables:
ES_SETTING_DISCOVERY_TYPE: "single-node"
ES_SETTING_XPACK_SECURITY_ENABLED: "false"
ES_SETTING_CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD__ENABLED: "false"
.use-pg13-es8-ee:
extends:
- .use-pg13
- .zoekt-variables
- .es8-variables
services:
- !reference [.db-services, services]
- !reference [.es8-services, services]
.use-pg14-es8-ee:
extends:
- .use-pg14
- .zoekt-variables
- .es8-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.es8-services, services]
.use-pg15-es8-ee:
extends:
- .use-pg15
- .zoekt-variables
- .es8-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.es8-services, services]
.use-pg16-es8-ee:
extends:
- .use-pg16
- .zoekt-variables
- .es8-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.es8-services, services]
.os1-services:
services:
- !reference [.zoekt-services, services]
- name: opensearchproject/opensearch:1.3.5
alias: elasticsearch
command: ["bin/opensearch", "-E", "discovery.type=single-node", "-E", "plugins.security.disabled=true", "-E", "cluster.routing.allocation.disk.threshold_enabled=false"]
.use-pg13-opensearch1-ee:
extends:
- .use-pg13
- .zoekt-variables
services:
- !reference [.db-services, services]
- !reference [.os1-services, services]
.use-pg14-opensearch1-ee:
extends:
- .use-pg14
- .zoekt-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.os1-services, services]
.use-pg15-opensearch1-ee:
extends:
- .use-pg15
- .zoekt-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.os1-services, services]
.use-pg16-opensearch1-ee:
extends:
- .use-pg16
- .zoekt-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.os1-services, services]
.os2-services:
services:
- !reference [.zoekt-services, services]
- name: opensearchproject/opensearch:2.2.1
alias: elasticsearch
command: ["bin/opensearch", "-E", "discovery.type=single-node", "-E", "plugins.security.disabled=true", "-E", "cluster.routing.allocation.disk.threshold_enabled=false"]
.use-pg13-opensearch2-ee:
extends:
- .use-pg13
- .zoekt-variables
services:
- !reference [.db-services, services]
- !reference [.os2-services, services]
.use-pg14-opensearch2-ee:
extends:
- .use-pg14
- .zoekt-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.os2-services, services]
.use-pg15-opensearch2-ee:
extends:
- .use-pg15
- .zoekt-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.os2-services, services]
.use-pg16-opensearch2-ee:
extends:
- .use-pg16
- .zoekt-variables
services:
- !reference [.db-services-with-auto-explain, services]
- !reference [.os2-services, services]
.use-pg14-clickhouse23:
extends: .use-pg14
services:
- !reference [.db-services-with-auto-explain, services]
- name: clickhouse/clickhouse-server:23.11.3.23-alpine
alias: clickhouse
variables:
CLICKHOUSE_USER: clickhouse
CLICKHOUSE_PASSWORD: clickhouse
CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT: 1
CLICKHOUSE_DB: gitlab_clickhouse_test
.use-kaniko:
image:
name: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:kaniko
entrypoint: [""]
before_script:
- source scripts/utils.sh
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
.as-if-foss:
variables:
FOSS_ONLY: '1'
.use-docker-in-docker:
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}docker:${DOCKER_VERSION}
services:
- docker:${DOCKER_VERSION}-dind
variables:
DOCKER_HOST: tcp://docker:2375
DOCKER_TLS_CERTDIR: ""
tags:
# See https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/7019 for tag descriptions
- gitlab-org-docker
.use-buildx:
extends: .use-docker-in-docker
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images/${BUILD_OS}-${OS_VERSION}-slim:docker-${DOCKER_VERSION}
variables:
QEMU_IMAGE: tonistiigi/binfmt:qemu-v7.0.0
before_script:
- !reference [.default-utils-before_script, before_script]
- echo "$CI_REGISTRY_PASSWORD" | docker login "$CI_REGISTRY" -u "$CI_REGISTRY_USER" --password-stdin
- |
if [[ "${ARCH}" =~ arm64 ]]; then
echo -e "\033[1;33mInstalling latest qemu emulators\033[0m"
docker pull -q ${QEMU_IMAGE};
docker run --rm --privileged ${QEMU_IMAGE} --uninstall qemu-*;
docker run --rm --privileged ${QEMU_IMAGE} --install all;
fi
- docker buildx create --use # creates and set's to active buildkit builder
.use-kube-context:
before_script:
- export KUBE_CONTEXT="${CI_PROJECT_NAMESPACE}/gitlab:review-apps"
- kubectl config use-context ${KUBE_CONTEXT}
.fast-no-clone-job:
variables:
GIT_STRATEGY: none # We will download the required files for the job from the API
before_script:
# Logic taken from scripts/utils.sh in download_files function
- |
url="${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/repository/files/scripts%2Futils.sh/raw?ref=${CI_COMMIT_SHA}"
curl -f --header "Private-Token: ${PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE}" "${url}" --create-dirs --output scripts/utils.sh
- source scripts/utils.sh
- run_timed_command "download_files ${FILES_TO_DOWNLOAD}"
.with_secret:
id_tokens:
GCP_ID_TOKEN:
aud: https://iam.googleapis.com/projects/${GCP_PROJECT_NUMBER}/locations/global/workloadIdentityPools/${GCP_WORKLOAD_IDENTITY_FEDERATION_POOL_ID}/providers/${GCP_WORKLOAD_IDENTITY_FEDERATION_PROVIDER_ID}
graphql-verify:
variables:
SETUP_DB: "false"
extends:
- .default-retry
- .ruby-cache
- .default-before_script
- .graphql:rules:graphql-verify
stage: test
needs: []
script:
- bundle exec rake gitlab:graphql:validate
- bundle exec rake gitlab:graphql:check_docs
- bundle exec rake gitlab:graphql:schema:dump
- node scripts/frontend/graphql_possible_types_extraction.js --check
artifacts:
expire_in: 7 days
name: graphql-schema-verify
paths:
- tmp/tests/graphql/gitlab_schema.graphql
- tmp/tests/graphql/gitlab_schema.json
when: always
.as-if-jh-sandbox-variables:
variables:
AS_IF_JH_BRANCH: "as-if-jh/${CI_COMMIT_REF_NAME}"
SANDBOX_REPOSITORY: "https://dummy:${AS_IF_JH_TOKEN}@gitlab.com/gitlab-org-sandbox/gitlab-jh-validation.git"
.shared-as-if-jh:
extends:
- .as-if-jh-sandbox-variables
variables:
GITLAB_JH_MIRROR_PROJECT: "33019816"
JH_FILES_TO_COMMIT: "jh package.json yarn.lock"
add-jh-files:
extends:
- .shared-as-if-jh
- .with_secret
- .as-if-jh:rules:prepare-as-if-jh
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
stage: prepare
needs: []
secrets:
ADD_JH_FILES_TOKEN:
gcp_secret_manager:
name: ADD_JH_FILES_TOKEN
token: $GCP_ID_TOKEN
before_script:
- source ./scripts/utils.sh
- export ADD_JH_FILES_TOKEN="$(cat ${ADD_JH_FILES_TOKEN})"
- export JH_MIRROR_REPOSITORY="https://dummy:${ADD_JH_FILES_TOKEN}@gitlab.com/gitlab-org/gitlab-jh-mirrors/gitlab.git"
- source ./scripts/setup/as-if-jh.sh
- install_gitlab_gem
script:
- set_jh_branch_env_variable
- download_jh_files ${JH_FILES_TO_COMMIT}
- echoinfo "Changes after downloading JiHu files:"
- git diff
- git status
artifacts:
expire_in: 2d
when: always # We also want the artifacts when we exit with error 3 (detached pipeline in that case)
paths:
# This should match JH_FILES_TO_COMMIT
- jh/
- package.json
- yarn.lock
allow_failure:
exit_codes:
- 3 # Set in the download_jh_files_from_git_clone function
prepare-as-if-jh-branch:
extends:
- .shared-as-if-jh
- .as-if-jh:rules:prepare-as-if-jh
stage: prepare
needs:
- add-jh-files
timeout: 10m
variables:
# We can't apply --filter=tree:0 for runner to set up the repository,
# so instead we tell runner to not clone anything, and we set up the
# repository by ourselves.
GIT_STRATEGY: "none"
before_script:
- git clone --filter=tree:0 "${CI_REPOSITORY_URL}" gitlab
# We should checkout before moving/changing files
- cd gitlab
- git checkout -b "${AS_IF_JH_BRANCH}" "${CI_COMMIT_SHA}"
- cd ..
- mv ${JH_FILES_TO_COMMIT} gitlab/
script:
- cd gitlab
- git add ${JH_FILES_TO_COMMIT}
- git commit -m 'Add JH files' # TODO: Mark which SHA we add
- git push -f "${SANDBOX_REPOSITORY}" "${AS_IF_JH_BRANCH}"
sync-as-if-jh-branch:
extends:
- .as-if-jh-sandbox-variables
- .as-if-jh:rules:sync-as-if-jh
stage: prepare
needs: ["prepare-as-if-jh-branch"]
inherit:
variables:
# From .gitlab-ci.yml for the default Docker image and cache
- DEFAULT_CI_IMAGE
- REGISTRY_HOST
- REGISTRY_GROUP
- BUILD_OS
- OS_VERSION
- RUBY_VERSION_DEFAULT
- RUBY_VERSION_NEXT
- RUBY_VERSION
- GO_VERSION
- RUST_VERSION
- PG_VERSION
- RUBYGEMS_VERSION
- CHROME_VERSION
- NODE_ENV
variables:
MERGE_FROM: "${CI_COMMIT_SHA}" # This is used in https://jihulab.com/gitlab-cn/gitlab/-/blob/e98bcb37aea4cfe1e78e1daef1b58b5f732cf289/jh/bin/build_packagejson where we run in https://gitlab.com/gitlab-org-sandbox/gitlab-jh-validation
trigger:
# What this runs can be found at:
# https://gitlab.com/gitlab-org-sandbox/gitlab-jh-validation/-/blob/as-if-jh-code-sync/jh/.gitlab-ci.yml
project: gitlab-org-sandbox/gitlab-jh-validation
branch: as-if-jh-code-sync
strategy: depend
start-as-if-jh:
extends:
- .as-if-jh:rules:prepare-as-if-jh
allow_failure: true
stage: prepare
needs:
- job: "prepare-as-if-jh-branch"
- job: "sync-as-if-jh-branch"
optional: true
inherit:
variables: false
variables:
FORCE_GITLAB_CI: "true" # TODO: Trigger a merge request pipeline
trigger:
project: gitlab-org-sandbox/gitlab-jh-validation
branch: as-if-jh/${CI_COMMIT_REF_NAME}
strategy: depend
include:
- component: ${CI_SERVER_FQDN}/gitlab-org/components/danger-review/danger-review@1.4.1
inputs:
job_image: "${DEFAULT_CI_IMAGE}"
job_stage: "preflight"
# By default DANGER_DANGERFILE_PREFIX is not defined but allows JiHu to
# use a different prefix.
# See https://jihulab.com/gitlab-cn/gitlab/-/blob/main-jh/jh/.gitlab-ci.yml
dangerfile: "${DANGER_DANGERFILE_PREFIX}Dangerfile"
# See https://gitlab.com/gitlab-org/gitlab/-/issues/466701
dry_run: false
danger-review:
extends:
- .default-retry
- .ruby-node-cache
# extending from .review:rules:danger doesn't work
rules: !reference [.review:rules:danger, rules]
variables:
BUNDLE_WITH: "danger"
before_script:
- source scripts/utils.sh
- bundle_install_script
- yarn_install_script
include:
- local: .gitlab/ci/rails/shared.gitlab-ci.yml
.if-merge-request: &if-merge-request
if: '$CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached"'
mailroom-config-validate:
extends:
- .rspec-base
- .mailroom:rules:mailroom-config-validate
stage: test
script:
- |
sed -i '/incoming_email:/,/enabled:/{s/enabled: false/enabled: true/}' config/gitlab.yml
- cp config/secrets.yml.example config/secrets.yml
- bundle exec ruby -r "mail_room" -e "MailRoom::CLI.new(%w[-c config/mail_room.yml])"
variables:
SETUP_DB: "false"
.mailroom:rules:mailroom-config-validate:
rules:
- <<: *if-merge-request
changes:
- config/gitlab.yml.example
- config/mail_room.yml
- Gemfile.lock
- lib/gitlab/mail_room.rb
- lib/gitlab/mail_room/**/*
- lib/gitlab/redis/**/*
.only-code-memory-job-base:
extends:
- .default-retry
- .ruby-cache
- .default-before_script
- .memory:rules
variables:
METRICS_FILE: "metrics.txt"
artifacts:
reports:
metrics: "${METRICS_FILE}"
expire_in: 62d
# Show memory usage caused by invoking require per gem.
# Hits the app with one request to ensure that any last minute require-s have been called.
# The application is booted in `production` environment.
# All tests are run without a webserver (directly using Rack::Mock by default).
memory-on-boot:
extends:
- .only-code-memory-job-base
- .production
- .use-pg14
stage: test
needs: ["setup-test-env", "compile-test-assets"]
variables:
SETUP_DB: "true"
MEMORY_ON_BOOT_FILE_PREFIX: "tmp/memory_on_boot_"
TEST_COUNT: 5
script:
- |
for i in $(seq 1 $TEST_COUNT)
do
echo "Starting run $i out of $TEST_COUNT"
PATH_TO_HIT="/users/sign_in" CUT_OFF=0.3 bundle exec derailed exec perf:mem >> "${MEMORY_ON_BOOT_FILE_PREFIX}$i.txt"
done
- scripts/generate-memory-metrics-on-boot "${MEMORY_ON_BOOT_FILE_PREFIX}" "$TEST_COUNT" >> "${METRICS_FILE}"
artifacts:
paths:
- "${METRICS_FILE}"
- "${MEMORY_ON_BOOT_FILE_PREFIX}*.txt"
.notify-defaults:
stage: notify
dependencies: []
cache: {}
notify-package-and-test-failure:
extends:
- .notify-defaults
- .notify:rules:notify-package-and-test-failure
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
before_script:
- source scripts/utils.sh
- apt-get update
- install_gitlab_gem
script:
- scripts/generate-failed-package-and-test-mr-message.rb
# Triggers downstream e2e tests in gitlab-org/opstrace/opstrace
# These e2e tests live in gitlab-org/opstrace/opstrace as a result
# of opstrace being brought in through an acquisition.
.e2e-observability-backend-base:
stage: test
needs: []
extends:
- .observability-backend:rules
inherit:
variables: false
variables:
TEST_GITLAB_COMMIT: $CI_COMMIT_SHA
trigger:
project: gitlab-org/opstrace/opstrace
strategy: depend
# e2e:observability-backend uses $CI_COMMIT_REF_NAME to
# checkout a branch in gitlab-org/opstrace/opstrace with
# the same name as the branch in this repo. Because opstrace
# is a different codebase, we match branch names without
# commit SHA.
e2e:observability-backend:
extends: .e2e-observability-backend-base
trigger:
project: gitlab-org/opstrace/opstrace
branch: $CI_COMMIT_REF_NAME
# e2e:observability-backend-main-branch will trigger
# an e2e test pipeline that checks out GitLab to
# $CI_COMMIT_SHA and Opstrace to the latest commit
# on main branch. Devs run this manually on local
# installs today periodically during development
# and this manual job increases dev velocity
# and testing reliablity.
e2e:observability-backend-main-branch:
extends: .e2e-observability-backend-base
trigger:
project: gitlab-org/opstrace/opstrace
branch: main
This diff is collapsed.
This diff is collapsed.
.compress-public: &compress-public
- find public -type f -regex '.*\.\(htm\|html\|txt\|text\|js\|json\|css\|svg\|xml\)$' -exec gzip -f -k {} \;
- find public -type f -regex '.*\.\(htm\|html\|txt\|text\|js\|json\|css\|svg\|xml\)$' -exec brotli -f -k {} \;
pages:
extends:
- .default-retry
- .pages:rules
stage: pages
environment: pages
resource_group: pages
needs:
- "rspec:coverage"
- "coverage-frontend"
- "compile-production-assets"
- "compile-storybook"
- "update-tests-metadata"
- "generate-frontend-fixtures-mapping"
before_script:
- apt-get update && apt-get -y install brotli gzip
script:
- mv public/ .public/
- mkdir public/
- mkdir -p public/$(dirname "$KNAPSACK_RSPEC_SUITE_REPORT_PATH") public/$(dirname "$FLAKY_RSPEC_SUITE_REPORT_PATH") public/$(dirname "$RSPEC_PACKED_TESTS_MAPPING_PATH") public/$(dirname "$FRONTEND_FIXTURES_MAPPING_PATH")
- mv coverage/ public/coverage-ruby/ || true
- mv coverage-frontend/ public/coverage-frontend/ || true
- mv storybook/public public/storybook || true
- cp .public/assets/application-*.css public/application.css || true
- mv $KNAPSACK_RSPEC_SUITE_REPORT_PATH public/$KNAPSACK_RSPEC_SUITE_REPORT_PATH || true
- mv $FLAKY_RSPEC_SUITE_REPORT_PATH public/$FLAKY_RSPEC_SUITE_REPORT_PATH || true
- mv $RSPEC_PACKED_TESTS_MAPPING_PATH.gz public/$RSPEC_PACKED_TESTS_MAPPING_PATH.gz || true
- mv $FRONTEND_FIXTURES_MAPPING_PATH public/$FRONTEND_FIXTURES_MAPPING_PATH || true
- *compress-public
artifacts:
paths:
- public
expire_in: 31d
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment