[Git][ghc/ghc][wip/ghcup-ci] 2 commits: ghcup-ci take 1

Matthew Pickering (@mpickering) gitlab at gitlab.haskell.org
Thu Jan 12 12:56:58 UTC 2023



Matthew Pickering pushed to branch wip/ghcup-ci at Glasgow Haskell Compiler / GHC


Commits:
5d16b5ef by Matthew Pickering at 2023-01-12T11:55:56+00:00
ghcup-ci take 1

- - - - -
db5cf3c9 by Matthew Pickering at 2023-01-12T12:56:08+00:00
Add release scripts

- - - - -


20 changed files:

- .gitlab-ci.yml
- .gitlab/gen_ci.hs
- .gitlab/generate_jobs
- + .gitlab/jobs-metadata.json
- + .gitlab/mk_ghcup_metadata.py
- + .gitlab/rel_eng/default.nix
- + .gitlab/rel_eng/fetch-gitlab-artifacts/.gitignore
- + .gitlab/rel_eng/fetch-gitlab-artifacts/README.mkd
- + .gitlab/rel_eng/fetch-gitlab-artifacts/default.nix
- + .gitlab/rel_eng/fetch-gitlab-artifacts/fetch_gitlab.py
- + .gitlab/rel_eng/fetch-gitlab-artifacts/setup.py
- + .gitlab/rel_eng/mk-ghcup-metadata/.gitignore
- + .gitlab/rel_eng/mk-ghcup-metadata/README.mkd
- + .gitlab/rel_eng/mk-ghcup-metadata/default.nix
- + .gitlab/rel_eng/mk-ghcup-metadata/mk_ghcup_metadata.py
- + .gitlab/rel_eng/mk-ghcup-metadata/setup.py
- + .gitlab/rel_eng/nix/sources.json
- + .gitlab/rel_eng/nix/sources.nix
- + .gitlab/rel_eng/upload.sh
- .gitlab/upload_ghc_libs.py → .gitlab/rel_eng/upload_ghc_libs.py


Changes:

=====================================
.gitlab-ci.yml
=====================================
@@ -166,7 +166,9 @@ not-interruptible:
   stage: not-interruptible
   script: "true"
   interruptible: false
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+  image: "debian:10"
+  variables:
+    GIT_STRATEGY: none
   tags:
     - lint
   rules:
@@ -181,828 +183,921 @@ not-interruptible:
 ############################################################
 
 # These jobs are generated by running the ./.gitlab/generate_jobs script
-include: '.gitlab/jobs.yaml'
+#include: '.gitlab/jobs.yaml'
 
 ############################################################
 # tool linting
 ############################################################
 
-ghc-linters:
-  stage: tool-lint
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
-  extends: .lint-params
-  variables:
-    BUILD_FLAVOUR: default
-  script:
-    - .gitlab/ci.sh configure
-    - timeout 10m .gitlab/ci.sh run_hadrian test --test-root-dirs="testsuite/tests/linters"
-  dependencies: []
-  rules:
-    - if: $CI_MERGE_REQUEST_ID
-    - *drafts-can-fail-lint
-
-# Run mypy Python typechecker on linter scripts.
-lint-linters:
-  image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
-  extends: .lint
-  script:
-    - mypy testsuite/tests/linters/regex-linters/*.py
-  dependencies: []
-
-# Check that .T files all parse by listing broken tests.
-lint-testsuite:
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV"
-  extends: .lint
-  script:
-    - make -Ctestsuite list_broken TEST_HC=$GHC
-  dependencies: []
-
-# Run mypy Python typechecker on testsuite driver
-typecheck-testsuite:
-  image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
-  extends: .lint
-  script:
-    - mypy testsuite/driver/runtests.py
-  dependencies: []
-
-# We allow the submodule checker to fail when run on merge requests (to
-# accommodate, e.g., haddock changes not yet upstream) but not on `master` or
-# Marge jobs.
-.lint-submods:
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
-  extends: .lint-params
-  variables:
-    BUILD_FLAVOUR: default
-  script:
-    - .gitlab/ci.sh configure
-    - .gitlab/ci.sh run_hadrian stage0:exe:lint-submodule-refs
-    - git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
-    - base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
-    - "echo Linting submodule changes between $base..$CI_COMMIT_SHA"
-    - git submodule foreach git remote update
-    - _build/stageBoot/bin/lint-submodule-refs . $(git rev-list $base..$CI_COMMIT_SHA)
-  dependencies: []
-
-# We allow the submodule checker to fail when run on merge requests (to
-# accommodate, e.g., haddock changes not yet upstream) but not on `master` or
-# Marge jobs.
-lint-author:
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
-  extends: .lint-params
-  variables:
-    BUILD_FLAVOUR: default
-  script:
-    - git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
-    - base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
-    - "echo Linting authors between $base..$CI_COMMIT_SHA"
-    - .gitlab/ci.sh lint_author $base $CI_COMMIT_SHA
-  dependencies: []
-  rules:
-    - if: $CI_MERGE_REQUEST_ID
-    - *drafts-can-fail-lint
-
-lint-ci-config:
-  image: "nixos/nix:2.8.0"
-  extends: .lint
-  variables:
-    BUILD_FLAVOUR: default
-  before_script:
-    - mkdir -p ~/.cabal
-    - cp -Rf cabal-cache/* ~/.cabal || true
-  script:
-    - nix shell --extra-experimental-features nix-command --extra-experimental-features flakes nixpkgs#cabal-install nixpkgs#ghc -c cabal update
-    - .gitlab/generate_jobs
-        # 1 if .gitlab/generate_jobs changed the output of the generated config
-    - nix shell --extra-experimental-features nix-command --extra-experimental-features flakes nixpkgs#git -c git diff --exit-code
-  after_script:
-    - rm -Rf cabal-cache
-    - cp -Rf ~/.cabal cabal-cache
-  dependencies: []
-  cache:
-    key: lint-ci-$CACHE_REV
-    paths:
-      - cabal-cache
-
-lint-submods:
-  extends: .lint-submods
-  # Allow failure on merge requests since any necessary submodule patches may
-  # not be upstreamed yet.
-  rules:
-    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*marge_bot_batch_merge_job.*/'
-      allow_failure: false
-    # Don't run on nightly because the program needs a base commit to check.
-    - if: $NIGHTLY
-      when: never
-    - allow_failure: true
-
-lint-submods-branch:
-  extends: .lint-submods
-  variables:
-    BUILD_FLAVOUR: default
-  script:
-    - .gitlab/ci.sh configure
-    - .gitlab/ci.sh run_hadrian stage0:exe:lint-submodule-refs
-    - "echo Linting submodule changes between $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA"
-    - git submodule foreach git remote update
-    - _build/stageBoot/bin/lint-submodule-refs . $(git rev-list $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA)
-  rules:
-    - if: '$CI_COMMIT_BRANCH == "master"'
-    - if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
-    - *drafts-can-fail-lint
-
-############################################################
-# GHC source code linting
-############################################################
-
-.lint-params:
-  needs: []
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
-  extends: .lint
-  before_script:
-    - export PATH="/opt/toolchain/bin:$PATH"
-    # workaround for docker permissions
-    - sudo chown ghc:ghc -R .
-    - .gitlab/ci.sh setup
-  after_script:
-    - .gitlab/ci.sh save_cache
-    - cat ci-timings
-  variables:
-    GHC_FLAGS: -Werror
-  cache:
-    key: lint-$CACHE_REV
-    paths:
-      - cabal-cache
-
-hlint-ghc-and-base:
-  extends: .lint-params
-  image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
-  variables:
-    BUILD_FLAVOUR: default
-  script:
-    - .gitlab/ci.sh setup
-    - .gitlab/ci.sh configure
-    - .gitlab/ci.sh run_hadrian lint:base
-    - .gitlab/ci.sh run_hadrian lint:compiler
-
-############################################################
-# GHC-in-GHCi (Hadrian)
-############################################################
-
-hadrian-ghc-in-ghci:
-  stage: quick-build
-  needs:
-    - job: lint-linters
-    - job: lint-submods
-      optional: true
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
-  before_script:
-    # workaround for docker permissions
-    - sudo chown ghc:ghc -R .
-  variables:
-    GHC_FLAGS: -Werror
-  tags:
-    - x86_64-linux
-  script:
-    - git clean -xdf && git submodule foreach git clean -xdf
-    - .gitlab/ci.sh setup
-    - .gitlab/ci.sh configure
-    # Load ghc-in-ghci then immediately exit and check the modules loaded
-    - echo ":q" | hadrian/ghci -j`mk/detect-cpu-count.sh`| tail -n2 | grep "Ok,"
-  after_script:
-    - .gitlab/ci.sh save_cache
-    - cat ci-timings
-  cache:
-    key: hadrian-ghci-$CACHE_REV
-    paths:
-      - cabal-cache
-
-############################################################
-# Hadrian Multi-Repl
-############################################################
-
-hadrian-multi:
-  stage: testing
-  needs:
-    - job: x86_64-linux-fedora33-release
-      optional: true
-    - job: nightly-x86_64-linux-fedora33-release
-      optional: true
-    - job: release-x86_64-linux-fedora33-release
-      optional: true
-  dependencies: null
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
-  before_script:
-    # workaround for docker permissions
-    - sudo chown ghc:ghc -R .
-  variables:
-    GHC_FLAGS: -Werror
-    CONFIGURE_ARGS: --enable-bootstrap-with-devel-snapshot
-  tags:
-    - x86_64-linux
-  script:
-    - export BOOT_HC=$GHC
-    - root=$(pwd)/ghc
-    - ls
-    - |
-      mkdir tmp
-      tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
-      pushd tmp/ghc-*/
-      ./configure --prefix=$root
-      make install
-      popd
-      rm -Rf tmp
-    - export HC=$root/bin/ghc
-    # This GHC means, use this GHC to configure with
-    - export GHC=$root/bin/ghc
-    - .gitlab/ci.sh setup
-    - .gitlab/ci.sh configure
-    # Now GHC means, use this GHC for hadrian
-    - export GHC=$BOOT_HC
-    # Load hadrian-multi then immediately exit and check the modules loaded
-    - echo ":q" | hadrian/ghci-multi -j`mk/detect-cpu-count.sh`| tail -n2 | grep "Ok,"
-  after_script:
-    - .gitlab/ci.sh save_cache
-  cache:
-    key: hadrian-ghci-$CACHE_REV
-    paths:
-      - cabal-cache
-
-############################################################
-# stack-hadrian-build
-############################################################
-
-# Verify that Hadrian builds with stack. Note that we don't actually perform a
-# build of GHC itself; we merely test that the Hadrian executable builds and
-# works (by invoking `hadrian --version`).
-stack-hadrian-build:
-  extends: hadrian-ghc-in-ghci
-  stage: quick-build
-  script:
-    - .gitlab/ci.sh setup
-    - .gitlab/ci.sh configure
-    - hadrian/build-stack --version
-
-####################################
-# Testing reinstallable ghc codepath
-####################################
-
-test-cabal-reinstall-x86_64-linux-deb10:
-  extends: nightly-x86_64-linux-deb10-validate
-  stage: full-build
-  variables:
-    REINSTALL_GHC: "yes"
-    BUILD_FLAVOUR: validate
-    TEST_ENV: "x86_64-linux-deb10-cabal-install"
-  rules:
-    - if: $NIGHTLY
-
-########################################
-# Testing ABI is invariant across builds
-########################################
-
-abi-test-nightly:
-  stage: full-build
-  needs:
-    - job: nightly-x86_64-linux-fedora33-release-hackage
-    - job: nightly-x86_64-linux-fedora33-release
-  tags:
-    - x86_64-linux
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
-  dependencies: null
-  before_script:
-    - mkdir -p normal
-    - mkdir -p hackage
-    - tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C normal/
-    - tar -xf ghc-x86_64-linux-fedora33-release-hackage_docs.tar.xz -C hackage/
-  script:
-    - .gitlab/ci.sh compare_interfaces_of "normal/ghc-*" "hackage/ghc-*"
-  artifacts:
-    paths:
-      - out
-  rules:
-    - if: $NIGHTLY
-
-############################################################
-# Packaging
-############################################################
-
-doc-tarball:
+        #ghc-linters:
+        #  stage: tool-lint
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+        #  extends: .lint-params
+        #  variables:
+        #    BUILD_FLAVOUR: default
+        #  script:
+        #    - .gitlab/ci.sh configure
+        #    - timeout 10m .gitlab/ci.sh run_hadrian test --test-root-dirs="testsuite/tests/linters"
+        #  dependencies: []
+        #  rules:
+        #    - if: $CI_MERGE_REQUEST_ID
+        #    - *drafts-can-fail-lint
+        #
+        ## Run mypy Python typechecker on linter scripts.
+        #lint-linters:
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
+        #  extends: .lint
+        #  script:
+        #    - mypy testsuite/tests/linters/regex-linters/*.py
+        #  dependencies: []
+        #
+        ## Check that .T files all parse by listing broken tests.
+        #lint-testsuite:
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV"
+        #  extends: .lint
+        #  script:
+        #    - make -Ctestsuite list_broken TEST_HC=$GHC
+        #  dependencies: []
+        #
+        ## Run mypy Python typechecker on testsuite driver
+        #typecheck-testsuite:
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
+        #  extends: .lint
+        #  script:
+        #    - mypy testsuite/driver/runtests.py
+        #  dependencies: []
+        #
+        ## We allow the submodule checker to fail when run on merge requests (to
+        ## accommodate, e.g., haddock changes not yet upstream) but not on `master` or
+        ## Marge jobs.
+        #.lint-submods:
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+        #  extends: .lint-params
+        #  variables:
+        #    BUILD_FLAVOUR: default
+        #  script:
+        #    - .gitlab/ci.sh configure
+        #    - .gitlab/ci.sh run_hadrian stage0:exe:lint-submodule-refs
+        #    - git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
+        #    - base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
+        #    - "echo Linting submodule changes between $base..$CI_COMMIT_SHA"
+        #    - git submodule foreach git remote update
+        #    - _build/stageBoot/bin/lint-submodule-refs . $(git rev-list $base..$CI_COMMIT_SHA)
+        #  dependencies: []
+        #
+        ## We allow the submodule checker to fail when run on merge requests (to
+        ## accommodate, e.g., haddock changes not yet upstream) but not on `master` or
+        ## Marge jobs.
+        #lint-author:
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+        #  extends: .lint-params
+        #  variables:
+        #    BUILD_FLAVOUR: default
+        #  script:
+        #    - git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
+        #    - base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
+        #    - "echo Linting authors between $base..$CI_COMMIT_SHA"
+        #    - .gitlab/ci.sh lint_author $base $CI_COMMIT_SHA
+        #  dependencies: []
+        #  rules:
+        #    - if: $CI_MERGE_REQUEST_ID
+        #    - *drafts-can-fail-lint
+        #
+        #lint-ci-config:
+        #  image: "nixos/nix:2.8.0"
+        #  extends: .lint
+        #  variables:
+        #    BUILD_FLAVOUR: default
+        #  before_script:
+        #    - mkdir -p ~/.cabal
+        #    - cp -Rf cabal-cache/* ~/.cabal || true
+        #  script:
+        #    - nix shell --extra-experimental-features nix-command --extra-experimental-features flakes nixpkgs#cabal-install nixpkgs#ghc -c cabal update
+        #    - .gitlab/generate_jobs
+        #        # 1 if .gitlab/generate_jobs changed the output of the generated config
+        #    - nix shell --extra-experimental-features nix-command --extra-experimental-features flakes nixpkgs#git -c git diff --exit-code
+        #  after_script:
+        #    - rm -Rf cabal-cache
+        #    - cp -Rf ~/.cabal cabal-cache
+        #  dependencies: []
+        #  cache:
+        #    key: lint-ci-$CACHE_REV
+        #    paths:
+        #      - cabal-cache
+        #
+        #lint-submods:
+        #  extends: .lint-submods
+        #  # Allow failure on merge requests since any necessary submodule patches may
+        #  # not be upstreamed yet.
+        #  rules:
+        #    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*marge_bot_batch_merge_job.*/'
+        #      allow_failure: false
+        #    # Don't run on nightly because the program needs a base commit to check.
+        #    - if: $NIGHTLY
+        #      when: never
+        #    - allow_failure: true
+        #
+        #lint-submods-branch:
+        #  extends: .lint-submods
+        #  variables:
+        #    BUILD_FLAVOUR: default
+        #  script:
+        #    - .gitlab/ci.sh configure
+        #    - .gitlab/ci.sh run_hadrian stage0:exe:lint-submodule-refs
+        #    - "echo Linting submodule changes between $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA"
+        #    - git submodule foreach git remote update
+        #    - _build/stageBoot/bin/lint-submodule-refs . $(git rev-list $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA)
+        #  rules:
+        #    - if: '$CI_COMMIT_BRANCH == "master"'
+        #    - if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
+        #    - *drafts-can-fail-lint
+        #
+        #############################################################
+        ## GHC source code linting
+        #############################################################
+        #
+        #.lint-params:
+        #  needs: []
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+        #  extends: .lint
+        #  before_script:
+        #    - export PATH="/opt/toolchain/bin:$PATH"
+        #    # workaround for docker permissions
+        #    - sudo chown ghc:ghc -R .
+        #    - .gitlab/ci.sh setup
+        #  after_script:
+        #    - .gitlab/ci.sh save_cache
+        #    - cat ci-timings
+        #  variables:
+        #    GHC_FLAGS: -Werror
+        #  cache:
+        #    key: lint-$CACHE_REV
+        #    paths:
+        #      - cabal-cache
+        #
+        #hlint-ghc-and-base:
+        #  extends: .lint-params
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
+        #  variables:
+        #    BUILD_FLAVOUR: default
+        #  script:
+        #    - .gitlab/ci.sh setup
+        #    - .gitlab/ci.sh configure
+        #    - .gitlab/ci.sh run_hadrian lint:base
+        #    - .gitlab/ci.sh run_hadrian lint:compiler
+        #
+        #############################################################
+        ## GHC-in-GHCi (Hadrian)
+        #############################################################
+        #
+        #hadrian-ghc-in-ghci:
+        #  stage: quick-build
+        #  needs:
+        #    - job: lint-linters
+        #    - job: lint-submods
+        #      optional: true
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+        #  before_script:
+        #    # workaround for docker permissions
+        #    - sudo chown ghc:ghc -R .
+        #  variables:
+        #    GHC_FLAGS: -Werror
+        #  tags:
+        #    - x86_64-linux
+        #  script:
+        #    - git clean -xdf && git submodule foreach git clean -xdf
+        #    - .gitlab/ci.sh setup
+        #    - .gitlab/ci.sh configure
+        #    # Load ghc-in-ghci then immediately exit and check the modules loaded
+        #    - echo ":q" | hadrian/ghci -j`mk/detect-cpu-count.sh`| tail -n2 | grep "Ok,"
+        #  after_script:
+        #    - .gitlab/ci.sh save_cache
+        #    - cat ci-timings
+        #  cache:
+        #    key: hadrian-ghci-$CACHE_REV
+        #    paths:
+        #      - cabal-cache
+        #
+        #############################################################
+        ## Hadrian Multi-Repl
+        #############################################################
+        #
+        #hadrian-multi:
+        #  stage: testing
+        #  needs:
+        #    - job: x86_64-linux-fedora33-release
+        #      optional: true
+        #    - job: nightly-x86_64-linux-fedora33-release
+        #      optional: true
+        #    - job: release-x86_64-linux-fedora33-release
+        #      optional: true
+        #  dependencies: null
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
+        #  before_script:
+        #    # workaround for docker permissions
+        #    - sudo chown ghc:ghc -R .
+        #  variables:
+        #    GHC_FLAGS: -Werror
+        #    CONFIGURE_ARGS: --enable-bootstrap-with-devel-snapshot
+        #  tags:
+        #    - x86_64-linux
+        #  script:
+        #    - export BOOT_HC=$GHC
+        #    - root=$(pwd)/ghc
+        #    - ls
+        #    - |
+        #      mkdir tmp
+        #      tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
+        #      pushd tmp/ghc-*/
+        #      ./configure --prefix=$root
+        #      make install
+        #      popd
+        #      rm -Rf tmp
+        #    - export HC=$root/bin/ghc
+        #    # This GHC means, use this GHC to configure with
+        #    - export GHC=$root/bin/ghc
+        #    - .gitlab/ci.sh setup
+        #    - .gitlab/ci.sh configure
+        #    # Now GHC means, use this GHC for hadrian
+        #    - export GHC=$BOOT_HC
+        #    # Load hadrian-multi then immediately exit and check the modules loaded
+        #    - echo ":q" | hadrian/ghci-multi -j`mk/detect-cpu-count.sh`| tail -n2 | grep "Ok,"
+        #  after_script:
+        #    - .gitlab/ci.sh save_cache
+        #  cache:
+        #    key: hadrian-ghci-$CACHE_REV
+        #    paths:
+        #      - cabal-cache
+        #
+        #############################################################
+        ## stack-hadrian-build
+        #############################################################
+        #
+        ## Verify that Hadrian builds with stack. Note that we don't actually perform a
+        ## build of GHC itself; we merely test that the Hadrian executable builds and
+        ## works (by invoking `hadrian --version`).
+        #stack-hadrian-build:
+        #  extends: hadrian-ghc-in-ghci
+        #  stage: quick-build
+        #  script:
+        #    - .gitlab/ci.sh setup
+        #    - .gitlab/ci.sh configure
+        #    - hadrian/build-stack --version
+        #
+        #####################################
+        ## Testing reinstallable ghc codepath
+        #####################################
+        #
+        #test-cabal-reinstall-x86_64-linux-deb10:
+        #  extends: nightly-x86_64-linux-deb10-validate
+        #  stage: full-build
+        #  variables:
+        #    REINSTALL_GHC: "yes"
+        #    BUILD_FLAVOUR: validate
+        #    TEST_ENV: "x86_64-linux-deb10-cabal-install"
+        #  rules:
+        #    - if: $NIGHTLY
+        #
+        #########################################
+        ## Testing ABI is invariant across builds
+        #########################################
+        #
+        #abi-test-nightly:
+        #  stage: full-build
+        #  needs:
+        #    - job: nightly-x86_64-linux-fedora33-release-hackage
+        #    - job: nightly-x86_64-linux-fedora33-release
+        #  tags:
+        #    - x86_64-linux
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
+        #  dependencies: null
+        #  before_script:
+        #    - mkdir -p normal
+        #    - mkdir -p hackage
+        #    - tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C normal/
+        #    - tar -xf ghc-x86_64-linux-fedora33-release-hackage_docs.tar.xz -C hackage/
+        #  script:
+        #    - .gitlab/ci.sh compare_interfaces_of "normal/ghc-*" "hackage/ghc-*"
+        #  artifacts:
+        #    paths:
+        #      - out
+        #  rules:
+        #    - if: $NIGHTLY
+        #
+        #############################################################
+        ## Packaging
+        #############################################################
+        #
+        #doc-tarball:
+        #  stage: packaging
+        #  needs:
+        #    - job: x86_64-linux-deb10-numa-slow-validate
+        #      optional: true
+        #    - job: nightly-x86_64-linux-deb10-validate
+        #      optional: true
+        #    - job: release-x86_64-linux-deb10-release
+        #      optional: true
+        #
+        #    - job: x86_64-windows-validate
+        #      optional: true
+        #    - job: nightly-x86_64-windows-validate
+        #      optional: true
+        #    - job: release-x86_64-windows-release+no_split_sections
+        #      optional: true
+        #
+        #  tags:
+        #    - x86_64-linux
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+        #  dependencies: null
+        #  variables:
+        #    LINUX_BINDIST: "ghc-x86_64-linux-deb10.tar.xz"
+        #    WINDOWS_BINDIST: "ghc-x86_64-windows.tar.xz"
+        #  artifacts:
+        #    paths:
+        #      - haddock.html.tar.xz
+        #      - libraries.html.tar.xz
+        #      - users_guide.html.tar.xz
+        #      - index.html
+        #      - "*.pdf"
+        #  script:
+        #    - |
+        #      mv "ghc-x86_64-linux-deb10-numa-slow-validate.tar.xz" "$LINUX_BINDIST" \
+        #        || mv "ghc-x86_64-linux-deb10-validate.tar.xz" "$LINUX_BINDIST" \
+        #        || mv "ghc-x86_64-linux-deb10-release.tar.xz" "$LINUX_BINDIST" \
+        #        || true
+        #      mv "ghc-x86_64-windows-validate.tar.xz" "$WINDOWS_BINDIST" \
+        #        || mv "ghc-x86_64-windows-release+no_split_sections.tar.xz" "$WINDOWS_BINDIST" \
+        #        || true
+        #      if [ ! -f "$LINUX_BINDIST" ]; then
+        #        echo "Error: $LINUX_BINDIST does not exist. Did the Debian 9 job fail?"
+        #        exit 1
+        #      fi
+        #      if [ ! -f "$WINDOWS_BINDIST" ]; then
+        #        echo "Error: $WINDOWS_BINDIST does not exist. Did the 64-bit Windows job fail?"
+        #        exit 1
+        #      fi
+        #    - rm -Rf docs
+        #    - bash -ex distrib/mkDocs/mkDocs $LINUX_BINDIST $WINDOWS_BINDIST
+        #    - ls -lh
+        #    - mv docs/*.tar.xz docs/index.html .
+        #
+        #hackage-doc-tarball:
+        #  stage: packaging
+        #  needs:
+        #    - job: x86_64-linux-fedora33-release-hackage
+        #      optional: true
+        #    - job: nightly-x86_64-linux-fedora33-release-hackage
+        #      optional: true
+        #    - job: release-x86_64-linux-fedora33-release-hackage
+        #      optional: true
+        #    - job: source-tarball
+        #  tags:
+        #    - x86_64-linux
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+        #  dependencies: null
+        #  variables:
+        #    # Don't clone the git repo..
+        #    GIT_STRATEGY: none
+        #    # Don't attempt to boot a source tarball
+        #    NO_BOOT: "1"
+        #  artifacts:
+        #    paths:
+        #      - hackage_docs
+        #  before_script:
+        #    - tar -xf ghc-*[0-9]-src.tar.xz
+        #    - tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C ghc*/
+        #  script:
+        #    - cd ghc*/
+        #    - mv .gitlab/upload_ghc_libs.py .
+        #    - .gitlab/ci.sh setup
+        #    - .gitlab/ci.sh configure
+        #    - ./upload_ghc_libs.py prepare --bindist ghc*linux/
+        #    - mv .upload-libs/docs ../hackage_docs
+        #  rules:
+        #    - if: $NIGHTLY
+        #    - if: '$RELEASE_JOB == "yes"'
+        #
+        #source-tarball:
+        #  stage: full-build
+        #  tags:
+        #    - x86_64-linux
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+        #  dependencies: []
+        #  artifacts:
+        #    paths:
+        #      - ghc-*.tar.xz
+        #  script:
+        #    - ./boot
+        #    - ./configure
+        #    - ./hadrian/build source-dist
+        #    - mv _build/source-dist/*.xz  .
+        #  rules:
+        #    - if: $NIGHTLY
+        #    - if: '$RELEASE_JOB == "yes"'
+        #    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
+        #
+        #generate-hadrian-bootstrap-sources:
+        #  stage: full-build
+        #  tags:
+        #    - x86_64-linux
+        #  image: "$DOCKER_IMAGE"
+        #  dependencies: []
+        #  parallel: *bootstrap_matrix
+        #  artifacts:
+        #    paths:
+        #      - hadrian-bootstrap-sources-*.tar.gz
+        #  script:
+        #    - bash -c "[ $($GHC --numeric-version) = $GHC_VERSION ] || { echo $GHC_VERSION is not the same as the version of $GHC && exit 1; }"
+        #    - python3 ./hadrian/bootstrap/bootstrap.py -w $GHC fetch -o hadrian-bootstrap-sources-$GHC_VERSION
+        #  rules:
+        #    - if: $NIGHTLY
+        #    - if: '$RELEASE_JOB == "yes"'
+        #    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
+        #
+        #
+        #package-hadrian-bootstrap-sources:
+        #  stage: full-build
+        #  tags:
+        #    - x86_64-linux
+        #  needs: ["generate-hadrian-bootstrap-sources"]
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
+        #  artifacts:
+        #    paths:
+        #      - hadrian-bootstrap-sources-all.tar.gz
+        #  script:
+        #    - tar -czvf hadrian-bootstrap-sources-all.tar.gz hadrian-bootstrap-sources-*.tar.gz
+        #  rules:
+        #    - if: $NIGHTLY
+        #    - if: '$RELEASE_JOB == "yes"'
+        #    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
+        #
+        #test-bootstrap:
+        #  stage: full-build
+        #  needs: [generate-hadrian-bootstrap-sources, source-tarball]
+        #  tags:
+        #    - x86_64-linux
+        #  image: "$DOCKER_IMAGE"
+        #  parallel: *bootstrap_matrix
+        #  dependencies: null
+        #  script:
+        #    - mkdir test-bootstrap
+        #    - tar -xf ghc-*[0-9]-src.tar.xz -C test-bootstrap
+        #    - tar -xf ghc-*-testsuite.tar.xz -C test-bootstrap
+        #    - cp hadrian-bootstrap-sources-$GHC_VERSION.tar.gz test-bootstrap/ghc-*
+        #    - pushd test-bootstrap/ghc-*
+        #    - python3 ./hadrian/bootstrap/bootstrap.py -w $GHC --bootstrap-sources hadrian-bootstrap-sources-$GHC_VERSION.tar.gz
+        #    - export HADRIAN_PATH="$PWD/_build/bin/hadrian"
+        #    - .gitlab/ci.sh setup
+        #    # Bootstrapping should not depend on HAPPY or ALEX so set them to false
+        #    # so the build fails if they are invoked.
+        #    - export HAPPY=/bin/false; export ALEX=/bin/false
+        #    - .gitlab/ci.sh configure
+        #    - .gitlab/ci.sh build_hadrian
+        #    - .gitlab/ci.sh test_hadrian
+        #    - popd
+        #    - rm -Rf test-bootstrap
+        #  variables:
+        #    # Don't record performance benchmarks
+        #    TEST_ENV: ""
+        #    BIN_DIST_NAME: "ghc-x86_64-deb10-linux"
+        #    BUILD_FLAVOUR: "validate"
+        #    NO_BOOT: "1"
+        #  rules:
+        #    - if: $NIGHTLY
+        #    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
+        #    - if: '$RELEASE_JOB == "yes"'
+        #      when: always
+        #      variables:
+        #        BUILD_FLAVOUR: "release"
+        #
+        #
+        #############################################################
+        ## Testing via head.hackage
+        #############################################################
+        #
+        ## Triggering jobs in the ghc/head.hackage project requires that we have a job
+        ## token for that repository. Furthermore the head.hackage CI job must have
+        ## access to an unprivileged access token with the ability to query the ghc/ghc
+        ## project such that it can find the job ID of the fedora33 job for the current
+        ## pipeline.
+        ##
+        ## hackage-lint: Can be triggered on any MR, normal validate pipeline or nightly build.
+        ##               Runs head.hackage with -dlint and a slow-validate bindist
+        ##
+        ## hackage-label-lint: Trigged on MRs with "user-facing" label, runs the slow-validate
+        ##                     head.hackage build with -dlint.
+        ##
+        ## nightly-hackage-lint: Runs automatically on nightly pipelines with slow-validate + dlint config.
+        ##
+        ## nightly-hackage-perf: Runs automaticaly on nightly pipelines with release build and eventlogging enabled.
+        ##
+        ## release-hackage-lint: Runs automatically on release pipelines with -dlint on a release bindist.
+        #
+        #.hackage:
+        #  stage: testing
+        #  variables:
+        #    UPSTREAM_PROJECT_PATH: "$CI_PROJECT_PATH"
+        #    UPSTREAM_PROJECT_ID: "$CI_PROJECT_ID"
+        #    UPSTREAM_PIPELINE_ID: "$CI_PIPELINE_ID"
+        #    RELEASE_JOB: "$RELEASE_JOB"
+        #  trigger:
+        #    project: "ghc/head.hackage"
+        #    branch: "upstream-testing"
+        #    strategy: "depend"
+        #
+        #hackage-lint:
+        #  needs:
+        #    - job: x86_64-linux-deb10-numa-slow-validate
+        #      optional: true
+        #      artifacts: false
+        #    - job: nightly-x86_64-linux-deb10-numa-slow-validate
+        #      optional: true
+        #      artifacts: false
+        #  extends: .hackage
+        #  variables:
+        #    SLOW_VALIDATE: 1
+        #    EXTRA_HC_OPTS: "-dlint"
+        #  # No for release jobs because there isn't a slow-valdate bindist. There is an
+        #  # automatic pipeline for release bindists (see release-hackage-lint)
+        #  rules:
+        #    - if: '$RELEASE_JOB != "yes"'
+        #  when: manual
+        #
+        #hackage-label-lint:
+        #  needs:
+        #    - job: x86_64-linux-deb10-numa-slow-validate
+        #      optional: true
+        #      artifacts: false
+        #  extends: .hackage
+        #  variables:
+        #    SLOW_VALIDATE: 1
+        #    EXTRA_HC_OPTS: "-dlint"
+        #  rules:
+        #    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*user-facing.*/'
+        #
+        ## The head.hackage job is split into two jobs because enabling `-dlint`
+        ## affects the total allocation numbers for the simplifier portion significantly.
+        #nightly-hackage-lint:
+        #  needs:
+        #    - job: nightly-x86_64-linux-deb10-numa-slow-validate
+        #      optional: true
+        #      artifacts: false
+        #  rules:
+        #    - if: $NIGHTLY
+        #      variables:
+        #        NIGHTLY: "$NIGHTLY"
+        #  extends: .hackage
+        #  variables:
+        #    SLOW_VALIDATE: 1
+        #    EXTRA_HC_OPTS: "-dlint"
+        #
+        #nightly-hackage-perf:
+        #  needs:
+        #    - job: nightly-x86_64-linux-fedora33-release
+        #      optional: true
+        #      artifacts: false
+        #  rules:
+        #    - if: $NIGHTLY
+        #      variables:
+        #        NIGHTLY: "$NIGHTLY"
+        #  extends: .hackage
+        #  variables:
+        #    # Generate logs for nightly builds which include timing information.
+        #    EXTRA_HC_OPTS: "-ddump-timings"
+        #    # Ask head.hackage to generate eventlogs
+        #    EVENTLOGGING: 1
+        #
+        #release-hackage-lint:
+        #  needs:
+        #    - job: release-x86_64-linux-fedora33-release
+        #      optional: true
+        #      artifacts: false
+        #  rules:
+        #    - if: '$RELEASE_JOB == "yes"'
+        #  extends: .hackage
+        #  variables:
+        #    # No slow-validate bindist on release pipeline
+        #    EXTRA_HC_OPTS: "-dlint"
+        #
+        #############################################################
+        ## Nofib testing
+        ## (Disabled: See #21859)
+        #############################################################
+        #
+        #perf-nofib:
+        #  # Dependencies used by perf-nofib can't be built when some compiler changes
+        #  # aren't (yet) supported by head.hackage.
+        #  # Hence we allow this job to fail.
+        #  allow_failure: true
+        #  stage: testing
+        #  needs:
+        #    - job: x86_64-linux-fedora33-release
+        #      optional: true
+        #    - job: nightly-x86_64-linux-fedora33-release
+        #      optional: true
+        #    - job: release-x86_64-linux-fedora33-release
+        #      optional: true
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
+        #  rules:
+        #    - when: never
+        #    - if: $CI_MERGE_REQUEST_ID
+        #    - if: '$CI_COMMIT_BRANCH == "master"'
+        #    - if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
+        #    - if: '$CI_MERGE_REQUEST_LABELS !~ /.*fast-ci.*/'
+        #  tags:
+        #    - x86_64-linux
+        #  before_script:
+        #    - cd nofib
+        #    - "cabal update --index=$HACKAGE_INDEX_STATE --project-file=cabal.project.head-hackage"
+        #  script:
+        #    - root=$(pwd)/ghc
+        #    - |
+        #      mkdir tmp
+        #      tar -xf ../ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
+        #      pushd tmp/ghc-*/
+        #      ./configure --prefix=$root
+        #      make install
+        #      popd
+        #      rm -Rf tmp
+        #    - export PATH=$root/bin:$PATH
+        #    - cabal install -w "$root/bin/ghc" --lib regex-compat unboxed-ref parallel random-1.2.1 --allow-newer --package-env local.env --project-file=cabal.project.head-hackage
+        #    - export GHC_ENVIRONMENT="$(pwd)/local.env"
+        #    - "make HC=$root/bin/ghc BOOT_HC=$root/bin/ghc boot mode=fast -j$CPUS"
+        #    - "make HC=$root/bin/ghc BOOT_HC=$root/bin/ghc EXTRA_RUNTEST_OPTS='-cachegrind +RTS -V0 -RTS' NoFibRuns=1 mode=fast -j$CPUS 2>&1 | tee nofib.log"
+        #  artifacts:
+        #    expire_in: 12 week
+        #    when: always
+        #    paths:
+        #      - nofib/nofib.log
+        #
+        #############################################################
+        ## Ad-hoc performance testing
+        #############################################################
+        #
+        #perf:
+        #  stage: testing
+        #  needs:
+        #    - job: x86_64-linux-fedora33-release
+        #      optional: true
+        #    - job: nightly-x86_64-linux-fedora33-release
+        #      optional: true
+        #    - job: release-x86_64-linux-fedora33-release
+        #      optional: true
+        #  dependencies: null
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
+        #  rules:
+        #    - if: $CI_MERGE_REQUEST_ID
+        #    - if: '$CI_COMMIT_BRANCH == "master"'
+        #    - if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
+        #  tags:
+        #    - x86_64-linux-perf
+        #  script:
+        #    - root=$(pwd)/ghc
+        #    - |
+        #      mkdir tmp
+        #      tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
+        #      pushd tmp/ghc-*/
+        #      ./configure --prefix=$root
+        #      make install
+        #      popd
+        #      rm -Rf tmp
+        #    - export BOOT_HC=$(which ghc)
+        #    - export HC=$root/bin/ghc
+        #    - .gitlab/ci.sh perf_test
+        #  artifacts:
+        #    expire_in: 2 year
+        #    when: always
+        #    paths:
+        #      - out
+        #  rules:
+        #    - if: '$CI_MERGE_REQUEST_LABELS !~ /.*fast-ci.*/'
+        #
+        #############################################################
+        ## ABI testing
+        #############################################################
+        #
+        #abi-test:
+        #  stage: testing
+        #  needs:
+        #    - job: x86_64-linux-fedora33-release
+        #      optional: true
+        #    - job: nightly-x86_64-linux-fedora33-release
+        #      optional: true
+        #    - job: release-x86_64-linux-fedora33-release
+        #      optional: true
+        #  dependencies: null
+        #  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
+        #  rules:
+        #    - if: $CI_MERGE_REQUEST_ID
+        #    - if: '$CI_COMMIT_BRANCH == "master"'
+        #    - if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
+        #  tags:
+        #    - x86_64-linux
+        #  script:
+        #    - root=$(pwd)/ghc
+        #    - |
+        #      mkdir tmp
+        #      tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
+        #      pushd tmp/ghc-*/
+        #      ./configure --prefix=$root
+        #      make install
+        #      popd
+        #      rm -Rf tmp
+        #    - export BOOT_HC=$(which ghc)
+        #    - export HC=$root/bin/ghc
+        #    - .gitlab/ci.sh abi_test
+        #  artifacts:
+        #    paths:
+        #      - out
+        #  rules:
+        #    - if: '$CI_MERGE_REQUEST_LABELS !~ /.*fast-ci.*/'
+        #
+        #
+        #############################################################
+        ## Documentation deployment via GitLab Pages
+        #############################################################
+        #
+        #pages:
+        #  stage: deploy
+        #  needs: [doc-tarball]
+        #  dependencies: null
+        #  image: ghcci/x86_64-linux-deb9:0.2
+        #  # See #18973
+        #  allow_failure: true
+        #  tags:
+        #    - x86_64-linux
+        #  script:
+        #    - mkdir -p public/doc
+        #    # haddock docs are not in the hadrian produce doc tarballs at the moment
+        #    # - tar -xf haddock.html.tar.xz -C public/doc
+        #    - tar -xf libraries.html.tar.xz -C public/doc
+        #    - tar -xf users_guide.html.tar.xz -C public/doc
+        #    - |
+        #      cat >public/index.html <<EOF
+        #      <!DOCTYPE HTML>
+        #      <meta charset="UTF-8">
+        #      <meta http-equiv="refresh" content="1; url=doc/">
+        #      EOF
+        #    - cp -f index.html public/doc
+        #  rules:
+        #    # N.B. only run this on ghc/ghc since the deployed pages are quite large
+        #    # and we only serve GitLab Pages for ghc/ghc.
+        #    - if: '$CI_COMMIT_BRANCH == "master" && $CI_PROJECT_NAMESPACE == "ghc"'
+        #    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*publish-docs.*/'
+        #
+        #  artifacts:
+        #    paths:
+        #      - public
+
+#############################################################
+# Generation of GHCUp metadata
+#############################################################
+
+project-version:
   stage: packaging
-  needs:
-    - job: x86_64-linux-deb10-numa-slow-validate
-      optional: true
-    - job: nightly-x86_64-linux-deb10-validate
-      optional: true
-    - job: release-x86_64-linux-deb10-release
-      optional: true
-
-    - job: x86_64-windows-validate
-      optional: true
-    - job: nightly-x86_64-windows-validate
-      optional: true
-    - job: release-x86_64-windows-release+no_split_sections
-      optional: true
-
-  tags:
-    - x86_64-linux
   image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
   dependencies: null
-  variables:
-    LINUX_BINDIST: "ghc-x86_64-linux-deb10.tar.xz"
-    WINDOWS_BINDIST: "ghc-x86_64-windows.tar.xz"
-  artifacts:
-    paths:
-      - haddock.html.tar.xz
-      - libraries.html.tar.xz
-      - users_guide.html.tar.xz
-      - index.html
-      - "*.pdf"
-  script:
-    - |
-      mv "ghc-x86_64-linux-deb10-numa-slow-validate.tar.xz" "$LINUX_BINDIST" \
-        || mv "ghc-x86_64-linux-deb10-validate.tar.xz" "$LINUX_BINDIST" \
-        || mv "ghc-x86_64-linux-deb10-release.tar.xz" "$LINUX_BINDIST" \
-        || true
-      mv "ghc-x86_64-windows-validate.tar.xz" "$WINDOWS_BINDIST" \
-        || mv "ghc-x86_64-windows-release+no_split_sections.tar.xz" "$WINDOWS_BINDIST" \
-        || true
-      if [ ! -f "$LINUX_BINDIST" ]; then
-        echo "Error: $LINUX_BINDIST does not exist. Did the Debian 9 job fail?"
-        exit 1
-      fi
-      if [ ! -f "$WINDOWS_BINDIST" ]; then
-        echo "Error: $WINDOWS_BINDIST does not exist. Did the 64-bit Windows job fail?"
-        exit 1
-      fi
-    - rm -Rf docs
-    - bash -ex distrib/mkDocs/mkDocs $LINUX_BINDIST $WINDOWS_BINDIST
-    - ls -lh
-    - mv docs/*.tar.xz docs/index.html .
-
-hackage-doc-tarball:
-  stage: packaging
-  needs:
-    - job: x86_64-linux-fedora33-release-hackage
-      optional: true
-    - job: nightly-x86_64-linux-fedora33-release-hackage
-      optional: true
-    - job: release-x86_64-linux-fedora33-release-hackage
-      optional: true
-    - job: source-tarball
   tags:
     - x86_64-linux
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
-  dependencies: null
   variables:
-    # Don't clone the git repo..
-    GIT_STRATEGY: none
-    # Don't attempt to boot a source tarball
-    NO_BOOT: "1"
-  artifacts:
-    paths:
-      - hackage_docs
-  before_script:
-    - tar -xf ghc-*[0-9]-src.tar.xz
-    - tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C ghc*/
+    BUILD_FLAVOUR: default
   script:
-    - cd ghc*/
-    - mv .gitlab/upload_ghc_libs.py .
-    - .gitlab/ci.sh setup
+    # Calculate the project version
     - .gitlab/ci.sh configure
-    - ./upload_ghc_libs.py prepare --bindist ghc*linux/
-    - mv .upload-libs/docs ../hackage_docs
-  rules:
-    - if: $NIGHTLY
-    - if: '$RELEASE_JOB == "yes"'
+    - cp hadrian/bindist/Makefile .
+    - touch config.mk
+    - make show VALUE=ProjectVersion > version.sh
 
-source-tarball:
-  stage: full-build
-  tags:
-    - x86_64-linux
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
   dependencies: []
   artifacts:
     paths:
-      - ghc-*.tar.xz
-  script:
-    - ./boot
-    - ./configure
-    - ./hadrian/build source-dist
-    - mv _build/source-dist/*.xz  .
+      - version.sh
   rules:
-    - if: $NIGHTLY
+    - if: '$NIGHTLY'
     - if: '$RELEASE_JOB == "yes"'
-    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
 
-generate-hadrian-bootstrap-sources:
-  stage: full-build
+        #  image: "nixos/nix:2.8.0"
+        #  extends: .lint
+        #  variables:
+        #    BUILD_FLAVOUR: default
+        #  before_script:
+        #    - mkdir -p ~/.cabal
+        #    - cp -Rf cabal-cache/* ~/.cabal || true
+        #  script:
+        #    - nix shell --extra-experimental-features nix-command --extra-experimental-features flakes nixpkgs#cabal-install nixpkgs#ghc -c cabal update
+        #    - .gitlab/generate_jobs
+        #        # 1 if .gitlab/generate_jobs changed the output of the generated config
+        #    - nix shell --extra-experimental-features nix-command --extra-experimental-features flakes nixpkgs#git -c git diff --exit-code
+        #  after_script:
+        #    - rm -Rf cabal-cache
+        #    - cp -Rf ~/.cabal cabal-cache
+        #  dependencies: []
+        #  cache:
+        #    key: lint-ci-$CACHE_REV
+        #    paths:
+
+.ghcup-metadata:
+  stage: deploy
+  image: "nixos/nix:2.8.0"
+  dependencies: null
   tags:
     - x86_64-linux
-  image: "$DOCKER_IMAGE"
-  dependencies: []
-  parallel: *bootstrap_matrix
-  artifacts:
-    paths:
-      - hadrian-bootstrap-sources-*.tar.gz
-  script:
-    - bash -c "[ $($GHC --numeric-version) = $GHC_VERSION ] || { echo $GHC_VERSION is not the same as the version of $GHC && exit 1; }"
-    - python3 ./hadrian/bootstrap/bootstrap.py -w $GHC fetch -o hadrian-bootstrap-sources-$GHC_VERSION
-  rules:
-    - if: $NIGHTLY
-    - if: '$RELEASE_JOB == "yes"'
-    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
+  variables:
+    BUILD_FLAVOUR: default
+    GIT_SUBMODULE_STRATEGY: "none"
+  before_script:
+    - ls
+    - cat version.sh
+    # Calculate the project version
+    - . ./version.sh
 
+    # Download existing ghcup metadata
+    - nix shell --extra-experimental-features nix-command --extra-experimental-features flakes nixpkgs#wget -c wget "https://raw.githubusercontent.com/haskell/ghcup-metadata/develop/ghcup-0.0.7.yaml"
 
-package-hadrian-bootstrap-sources:
-  stage: full-build
-  tags:
-    - x86_64-linux
-  needs: ["generate-hadrian-bootstrap-sources"]
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
   artifacts:
     paths:
-      - hadrian-bootstrap-sources-all.tar.gz
+      - metadata_test.yaml
+      - version.sh
+
+ghcup-metadata-nightly:
+  extends: .ghcup-metadata
+    #  needs:
+    #    - job: nightly-x86_64-linux-fedora33-release
+    #      artifacts: false
+    #    - job: nightly-x86_64-linux-centos7-validate
+    #      artifacts: false
+    #    - job: nightly-x86_64-darwin-validate
+    #      artifacts: false
+    #    - job: nightly-aarch64-darwin-validate
+    #      artifacts: false
+    #    - job: nightly-x86_64-windows-validate
+    #      artifacts: false
+    #    - job: nightly-x86_64-linux-alpine3_12-int_native-validate+fully_static
+    #      artifacts: false
+    #    - job: nightly-x86_64-linux-deb9-validate
+    #      artifacts: false
+    #    - job: nightly-i386-linux-deb9-validate
+    #      artifacts: false
+    #    - job: nightly-x86_64-linux-deb10-validate
+    #      artifacts: false
+    #    - job: nightly-aarch64-linux-deb10-validate
+    #      artifacts: false
+    #    - job: nightly-x86_64-linux-deb11-validate
+    #      artifacts: false
+    #    - job: source-tarball
+    #      artifacts: false
+  needs:
+    - job: project-version
   script:
-    - tar -czvf hadrian-bootstrap-sources-all.tar.gz hadrian-bootstrap-sources-*.tar.gz
+    - export CI_PIPELINE_ID=61347
+    - export ProjectVersion=9.6.20230111
+    - nix shell --extra-experimental-features nix-command -f .gitlab/rel_eng -c ghcup-metadata --metadata ghcup-0.0.7.yaml --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" > "metadata_test.yaml"
+    - echo "ProjectVersion=$ProjectVersion" > version.sh
   rules:
     - if: $NIGHTLY
-    - if: '$RELEASE_JOB == "yes"'
-    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
 
-test-bootstrap:
-  stage: full-build
-  needs: [generate-hadrian-bootstrap-sources, source-tarball]
-  tags:
-    - x86_64-linux
-  image: "$DOCKER_IMAGE"
-  parallel: *bootstrap_matrix
-  dependencies: null
+ghcup-metadata-release:
+  extends: .ghcup-metadata
   script:
-    - mkdir test-bootstrap
-    - tar -xf ghc-*[0-9]-src.tar.xz -C test-bootstrap
-    - tar -xf ghc-*-testsuite.tar.xz -C test-bootstrap
-    - cp hadrian-bootstrap-sources-$GHC_VERSION.tar.gz test-bootstrap/ghc-*
-    - pushd test-bootstrap/ghc-*
-    - python3 ./hadrian/bootstrap/bootstrap.py -w $GHC --bootstrap-sources hadrian-bootstrap-sources-$GHC_VERSION.tar.gz
-    - export HADRIAN_PATH="$PWD/_build/bin/hadrian"
-    - .gitlab/ci.sh setup
-    # Bootstrapping should not depend on HAPPY or ALEX so set them to false
-    # so the build fails if they are invoked.
-    - export HAPPY=/bin/false; export ALEX=/bin/false
-    - .gitlab/ci.sh configure
-    - .gitlab/ci.sh build_hadrian
-    - .gitlab/ci.sh test_hadrian
-    - popd
-    - rm -Rf test-bootstrap
-  variables:
-    # Don't record performance benchmarks
-    TEST_ENV: ""
-    BIN_DIST_NAME: "ghc-x86_64-deb10-linux"
-    BUILD_FLAVOUR: "validate"
-    NO_BOOT: "1"
+    - export CI_PIPELINE_ID=61299
+    - export ProjectVersion=9.6.0.20230111
+    - nix shell --extra-experimental-features nix-command -f .gitlab/rel_eng -c ghcup-metadata --release-mode --metadata ghcup-0.0.7.yaml --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" > "metadata_test.yaml"
+    - echo "ProjectVersion=$ProjectVersion" > version.sh
   rules:
-    - if: $NIGHTLY
-    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
     - if: '$RELEASE_JOB == "yes"'
-      when: always
-      variables:
-        BUILD_FLAVOUR: "release"
-
 
-############################################################
-# Testing via head.hackage
-############################################################
-
-# Triggering jobs in the ghc/head.hackage project requires that we have a job
-# token for that repository. Furthermore the head.hackage CI job must have
-# access to an unprivileged access token with the ability to query the ghc/ghc
-# project such that it can find the job ID of the fedora33 job for the current
-# pipeline.
-#
-# hackage-lint: Can be triggered on any MR, normal validate pipeline or nightly build.
-#               Runs head.hackage with -dlint and a slow-validate bindist
-#
-# hackage-label-lint: Trigged on MRs with "user-facing" label, runs the slow-validate
-#                     head.hackage build with -dlint.
-#
-# nightly-hackage-lint: Runs automatically on nightly pipelines with slow-validate + dlint config.
-#
-# nightly-hackage-perf: Runs automaticaly on nightly pipelines with release build and eventlogging enabled.
-#
-# release-hackage-lint: Runs automatically on release pipelines with -dlint on a release bindist.
-
-.hackage:
-  stage: testing
+.ghcup-metadata-testing:
+  stage: deploy
   variables:
     UPSTREAM_PROJECT_PATH: "$CI_PROJECT_PATH"
     UPSTREAM_PROJECT_ID: "$CI_PROJECT_ID"
     UPSTREAM_PIPELINE_ID: "$CI_PIPELINE_ID"
     RELEASE_JOB: "$RELEASE_JOB"
   trigger:
-    project: "ghc/head.hackage"
+    project: "ghc/ghcup-ci"
     branch: "upstream-testing"
     strategy: "depend"
 
-hackage-lint:
+ghcup-metadata-testing-nightly:
   needs:
-    - job: x86_64-linux-deb10-numa-slow-validate
-      optional: true
-      artifacts: false
-    - job: nightly-x86_64-linux-deb10-numa-slow-validate
-      optional: true
+    - job: ghcup-metadata-nightly
       artifacts: false
-  extends: .hackage
+  extends: .ghcup-metadata-testing
   variables:
-    SLOW_VALIDATE: 1
-    EXTRA_HC_OPTS: "-dlint"
-  # No for release jobs because there isn't a slow-valdate bindist. There is an
-  # automatic pipeline for release bindists (see release-hackage-lint)
+      NIGHTLY: "$NIGHTLY"
+      UPSTREAM_JOB_NAME: "ghcup-metadata-nightly"
   rules:
-    - if: '$RELEASE_JOB != "yes"'
-  when: manual
+    - if: '$NIGHTLY == "1"'
 
-hackage-label-lint:
+ghcup-metadata-testing-release:
   needs:
-    - job: x86_64-linux-deb10-numa-slow-validate
-      optional: true
+    - job: ghcup-metadata-release
       artifacts: false
-  extends: .hackage
+  extends: .ghcup-metadata-testing
   variables:
-    SLOW_VALIDATE: 1
-    EXTRA_HC_OPTS: "-dlint"
-  rules:
-    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*user-facing.*/'
-
-# The head.hackage job is split into two jobs because enabling `-dlint`
-# affects the total allocation numbers for the simplifier portion significantly.
-nightly-hackage-lint:
-  needs:
-    - job: nightly-x86_64-linux-deb10-numa-slow-validate
-      optional: true
-      artifacts: false
-  rules:
-    - if: $NIGHTLY
-      variables:
-        NIGHTLY: "$NIGHTLY"
-  extends: .hackage
-  variables:
-    SLOW_VALIDATE: 1
-    EXTRA_HC_OPTS: "-dlint"
-
-nightly-hackage-perf:
-  needs:
-    - job: nightly-x86_64-linux-fedora33-release
-      optional: true
-      artifacts: false
-  rules:
-    - if: $NIGHTLY
-      variables:
-        NIGHTLY: "$NIGHTLY"
-  extends: .hackage
-  variables:
-    # Generate logs for nightly builds which include timing information.
-    EXTRA_HC_OPTS: "-ddump-timings"
-    # Ask head.hackage to generate eventlogs
-    EVENTLOGGING: 1
-
-release-hackage-lint:
-  needs:
-    - job: release-x86_64-linux-fedora33-release
-      optional: true
-      artifacts: false
+      UPSTREAM_JOB_NAME: "ghcup-metadata-release"
   rules:
     - if: '$RELEASE_JOB == "yes"'
-  extends: .hackage
-  variables:
-    # No slow-validate bindist on release pipeline
-    EXTRA_HC_OPTS: "-dlint"
-
-############################################################
-# Nofib testing
-# (Disabled: See #21859)
-############################################################
-
-perf-nofib:
-  # Dependencies used by perf-nofib can't be built when some compiler changes
-  # aren't (yet) supported by head.hackage.
-  # Hence we allow this job to fail.
-  allow_failure: true
-  stage: testing
-  needs:
-    - job: x86_64-linux-fedora33-release
-      optional: true
-    - job: nightly-x86_64-linux-fedora33-release
-      optional: true
-    - job: release-x86_64-linux-fedora33-release
-      optional: true
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
-  rules:
-    - when: never
-    - if: $CI_MERGE_REQUEST_ID
-    - if: '$CI_COMMIT_BRANCH == "master"'
-    - if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
-    - if: '$CI_MERGE_REQUEST_LABELS !~ /.*fast-ci.*/'
-  tags:
-    - x86_64-linux
-  before_script:
-    - cd nofib
-    - "cabal update --index=$HACKAGE_INDEX_STATE --project-file=cabal.project.head-hackage"
-  script:
-    - root=$(pwd)/ghc
-    - |
-      mkdir tmp
-      tar -xf ../ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
-      pushd tmp/ghc-*/
-      ./configure --prefix=$root
-      make install
-      popd
-      rm -Rf tmp
-    - export PATH=$root/bin:$PATH
-    - cabal install -w "$root/bin/ghc" --lib regex-compat unboxed-ref parallel random-1.2.1 --allow-newer --package-env local.env --project-file=cabal.project.head-hackage
-    - export GHC_ENVIRONMENT="$(pwd)/local.env"
-    - "make HC=$root/bin/ghc BOOT_HC=$root/bin/ghc boot mode=fast -j$CPUS"
-    - "make HC=$root/bin/ghc BOOT_HC=$root/bin/ghc EXTRA_RUNTEST_OPTS='-cachegrind +RTS -V0 -RTS' NoFibRuns=1 mode=fast -j$CPUS 2>&1 | tee nofib.log"
-  artifacts:
-    expire_in: 12 week
-    when: always
-    paths:
-      - nofib/nofib.log
-
-############################################################
-# Ad-hoc performance testing
-############################################################
-
-perf:
-  stage: testing
-  needs:
-    - job: x86_64-linux-fedora33-release
-      optional: true
-    - job: nightly-x86_64-linux-fedora33-release
-      optional: true
-    - job: release-x86_64-linux-fedora33-release
-      optional: true
-  dependencies: null
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
-  rules:
-    - if: $CI_MERGE_REQUEST_ID
-    - if: '$CI_COMMIT_BRANCH == "master"'
-    - if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
-  tags:
-    - x86_64-linux-perf
-  script:
-    - root=$(pwd)/ghc
-    - |
-      mkdir tmp
-      tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
-      pushd tmp/ghc-*/
-      ./configure --prefix=$root
-      make install
-      popd
-      rm -Rf tmp
-    - export BOOT_HC=$(which ghc)
-    - export HC=$root/bin/ghc
-    - .gitlab/ci.sh perf_test
-  artifacts:
-    expire_in: 2 year
-    when: always
-    paths:
-      - out
-  rules:
-    - if: '$CI_MERGE_REQUEST_LABELS !~ /.*fast-ci.*/'
-
-############################################################
-# ABI testing
-############################################################
-
-abi-test:
-  stage: testing
-  needs:
-    - job: x86_64-linux-fedora33-release
-      optional: true
-    - job: nightly-x86_64-linux-fedora33-release
-      optional: true
-    - job: release-x86_64-linux-fedora33-release
-      optional: true
-  dependencies: null
-  image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
-  rules:
-    - if: $CI_MERGE_REQUEST_ID
-    - if: '$CI_COMMIT_BRANCH == "master"'
-    - if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
-  tags:
-    - x86_64-linux
-  script:
-    - root=$(pwd)/ghc
-    - |
-      mkdir tmp
-      tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
-      pushd tmp/ghc-*/
-      ./configure --prefix=$root
-      make install
-      popd
-      rm -Rf tmp
-    - export BOOT_HC=$(which ghc)
-    - export HC=$root/bin/ghc
-    - .gitlab/ci.sh abi_test
-  artifacts:
-    paths:
-      - out
-  rules:
-    - if: '$CI_MERGE_REQUEST_LABELS !~ /.*fast-ci.*/'
-
-
-############################################################
-# Documentation deployment via GitLab Pages
-############################################################
-
-pages:
-  stage: deploy
-  needs: [doc-tarball]
-  dependencies: null
-  image: ghcci/x86_64-linux-deb9:0.2
-  # See #18973
-  allow_failure: true
-  tags:
-    - x86_64-linux
-  script:
-    - mkdir -p public/doc
-    # haddock docs are not in the hadrian produce doc tarballs at the moment
-    # - tar -xf haddock.html.tar.xz -C public/doc
-    - tar -xf libraries.html.tar.xz -C public/doc
-    - tar -xf users_guide.html.tar.xz -C public/doc
-    - |
-      cat >public/index.html <<EOF
-      <!DOCTYPE HTML>
-      <meta charset="UTF-8">
-      <meta http-equiv="refresh" content="1; url=doc/">
-      EOF
-    - cp -f index.html public/doc
-  rules:
-    # N.B. only run this on ghc/ghc since the deployed pages are quite large
-    # and we only serve GitLab Pages for ghc/ghc.
-    - if: '$CI_COMMIT_BRANCH == "master" && $CI_PROJECT_NAMESPACE == "ghc"'
-    - if: '$CI_MERGE_REQUEST_LABELS =~ /.*publish-docs.*/'
-
-  artifacts:
-    paths:
-      - public
-
-.x86_64-linux-ubuntu20_04-cross_wasm32-wasi-release:
-  stage: full-build
-  rules:
-    - when: always
-  tags:
-    - x86_64-linux
-  image: registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-ubuntu20_04:$DOCKER_REV
-  before_script:
-    - sudo chown ghc:ghc -R .
-  variables:
-    BIN_DIST_NAME: ghc-x86_64-linux-ubuntu20_04-cross_wasm32-wasi-int_$BIGNUM_BACKEND-release
-    BUILD_FLAVOUR: perf
-    CONFIGURE_ARGS: --with-intree-gmp --with-system-libffi
-    XZ_OPT: "-9"
-    CONF_CC_OPTS_STAGE2: -Wno-int-conversion -Wno-strict-prototypes -mnontrapping-fptoint -msign-ext -mbulk-memory -mmutable-globals -mreference-types
-    CONF_CXX_OPTS_STAGE2: -fno-exceptions -Wno-int-conversion -Wno-strict-prototypes -mnontrapping-fptoint -msign-ext -mbulk-memory -mmutable-globals -mreference-types
-    CONF_GCC_LINKER_OPTS_STAGE2: -Wl,--error-limit=0,--growable-table,--stack-first -Wno-unused-command-line-argument
-    CROSS_EMULATOR: wasmtime
-    CROSS_TARGET: wasm32-wasi
-    HADRIAN_ARGS: --docs=none
-    TEST_ENV: x86_64-linux-ubuntu20_04-cross_wasm32-wasi-int_$BIGNUM_BACKEND-release
-  script:
-    - |
-      pushd libraries/process
-      curl https://patch-diff.githubusercontent.com/raw/haskell/process/pull/240.diff | git apply
-      popd
-      pushd utils/hsc2hs
-      curl https://patch-diff.githubusercontent.com/raw/haskell/hsc2hs/pull/68.diff | git apply
-      popd
-
-      pushd "$(mktemp -d)"
-      curl -L https://gitlab.haskell.org/ghc/ghc-wasm-meta/-/archive/master/ghc-wasm-meta-master.tar.gz | tar xz --strip-components=1
-      PREFIX=/tmp/.ghc-wasm SKIP_GHC=1 ./setup.sh
-      source /tmp/.ghc-wasm/env
-      popd
-
-      .gitlab/ci.sh setup
-      .gitlab/ci.sh configure
-      .gitlab/ci.sh build_hadrian
-      .gitlab/ci.sh test_hadrian
-
-  after_script:
-    - cat ci-timings
-
-  artifacts:
-    expire_in: 1 year
-    paths:
-      - ghc-x86_64-linux-ubuntu20_04-cross_wasm32-wasi-int_$BIGNUM_BACKEND-release.tar.xz
-    when: always
+  when: manual
 
-x86_64-linux-ubuntu20_04-cross_wasm32-wasi-int_gmp-release:
-  extends: .x86_64-linux-ubuntu20_04-cross_wasm32-wasi-release
-  variables:
-    BIGNUM_BACKEND: gmp
 
-x86_64-linux-ubuntu20_04-cross_wasm32-wasi-int_native-release:
-  extends: .x86_64-linux-ubuntu20_04-cross_wasm32-wasi-release
-  variables:
-    BIGNUM_BACKEND: native


=====================================
.gitlab/gen_ci.hs
=====================================
@@ -18,6 +18,7 @@ import Data.List (intercalate)
 import Data.Set (Set)
 import qualified Data.Set as S
 import System.Environment
+import Data.Maybe
 
 {-
 Note [Generating the CI pipeline]
@@ -336,6 +337,9 @@ instance (Ord k, Semigroup v) => Monoid (MonoidalMap k v) where
 mminsertWith :: Ord k => (a -> a -> a) -> k -> a -> MonoidalMap k a -> MonoidalMap k a
 mminsertWith f k v (MonoidalMap m) = MonoidalMap (Map.insertWith f k v m)
 
+mmlookup :: Ord k => k -> MonoidalMap k a -> Maybe a
+mmlookup k (MonoidalMap m) = Map.lookup k m
+
 type Variables = MonoidalMap String [String]
 
 (=:) :: String -> String -> Variables
@@ -566,6 +570,7 @@ data Job
         , jobArtifacts :: Artifacts
         , jobCache :: Cache
         , jobRules :: OnOffRules
+        , jobPlatform  :: (Arch, Opsys)
         }
 
 instance ToJSON Job where
@@ -589,9 +594,11 @@ instance ToJSON Job where
     ]
 
 -- | Build a job description from the system description and 'BuildConfig'
-job :: Arch -> Opsys -> BuildConfig -> (String, Job)
-job arch opsys buildConfig = (jobName, Job {..})
+job :: Arch -> Opsys -> BuildConfig -> NamedJob Job
+job arch opsys buildConfig = NamedJob { name = jobName, jobInfo = Job {..} }
   where
+    jobPlatform = (arch, opsys)
+
     jobRules = emptyRules
 
     jobName = testEnv arch opsys buildConfig
@@ -694,19 +701,19 @@ addVariable k v j = j { jobVariables = mminsertWith (++) k [v] (jobVariables j)
 -- Building the standard jobs
 --
 -- | Make a normal validate CI job
-validate :: Arch -> Opsys -> BuildConfig -> (String, Job)
+validate :: Arch -> Opsys -> BuildConfig -> NamedJob Job
 validate arch opsys bc =
   job arch opsys bc
 
 -- | Make a normal nightly CI job
 nightly arch opsys bc =
-  let (n, j) = job arch opsys bc
-  in ("nightly-" ++ n, addJobRule Nightly . keepArtifacts "8 weeks" . highCompression $ j)
+  let NamedJob n j = job arch opsys bc
+  in NamedJob { name = "nightly-" ++ n, jobInfo = addJobRule Nightly . keepArtifacts "8 weeks" . highCompression $ j}
 
 -- | Make a normal release CI job
 release arch opsys bc =
-  let (n, j) = job arch opsys (bc { buildFlavour = Release })
-  in ("release-" ++ n, addJobRule ReleaseOnly . keepArtifacts "1 year" . ignorePerfFailures . highCompression $ j)
+  let NamedJob n j = job arch opsys (bc { buildFlavour = Release })
+  in NamedJob { name = "release-" ++ n, jobInfo = addJobRule ReleaseOnly . keepArtifacts "1 year" . ignorePerfFailures . highCompression $ j}
 
 -- Specific job modification functions
 
@@ -749,17 +756,33 @@ addValidateRule t = modifyValidateJobs (addJobRule t)
 disableValidate :: JobGroup Job -> JobGroup Job
 disableValidate = addValidateRule Disable
 
+data NamedJob a = NamedJob { name :: String, jobInfo :: a } deriving Functor
+
+renameJob :: (String -> String) -> NamedJob a -> NamedJob a
+renameJob f (NamedJob n i) = NamedJob (f n) i
+
+instance ToJSON a => ToJSON (NamedJob a) where
+  toJSON nj = object
+    [ "name" A..= name nj
+    , "jobInfo" A..= jobInfo nj ]
+
 -- Jobs are grouped into either triples or pairs depending on whether the
 -- job is just validate and nightly, or also release.
-data JobGroup a = StandardTriple { v :: (String, a)
-                                 , n :: (String, a)
-                                 , r :: (String, a) }
-                | ValidateOnly   { v :: (String, a)
-                                 , n :: (String, a) } deriving Functor
+data JobGroup a = StandardTriple { v :: NamedJob a
+                                 , n :: NamedJob a
+                                 , r :: NamedJob a }
+                | ValidateOnly   { v :: NamedJob a
+                                 , n :: NamedJob a } deriving Functor
+
+instance ToJSON a => ToJSON (JobGroup a) where
+  toJSON jg = object
+    [ "n" A..= n jg
+    , "r" A..= r jg
+    ]
 
 rename :: (String -> String) -> JobGroup a -> JobGroup a
-rename f (StandardTriple (nv, v) (nn, n) (nr, r)) = StandardTriple (f nv, v) (f nn, n) (f nr, r)
-rename f (ValidateOnly (nv, v) (nn, n)) = ValidateOnly (f nv, v) (f nn, n)
+rename f (StandardTriple nv nn nr) = StandardTriple (renameJob f nv) (renameJob f nn) (renameJob f nr)
+rename f (ValidateOnly nv nn) = ValidateOnly (renameJob f nv) (renameJob f nn)
 
 -- | Construct a 'JobGroup' which consists of a validate, nightly and release build with
 -- a specific config.
@@ -780,13 +803,19 @@ validateBuilds :: Arch -> Opsys -> BuildConfig -> JobGroup Job
 validateBuilds a op bc = ValidateOnly (validate a op bc) (nightly a op bc)
 
 flattenJobGroup :: JobGroup a -> [(String, a)]
-flattenJobGroup (StandardTriple a b c) = [a,b,c]
-flattenJobGroup (ValidateOnly a b) = [a, b]
+flattenJobGroup (StandardTriple a b c) = map flattenNamedJob [a,b,c]
+flattenJobGroup (ValidateOnly a b) = map flattenNamedJob [a, b]
+
+flattenNamedJob :: NamedJob a -> (String, a)
+flattenNamedJob (NamedJob n i) = (n, i)
 
 
 -- | Specification for all the jobs we want to build.
 jobs :: Map String Job
-jobs = Map.fromList $ concatMap flattenJobGroup $
+jobs = Map.fromList $ concatMap flattenJobGroup job_groups
+
+job_groups :: [JobGroup Job]
+job_groups =
      [ disableValidate (standardBuilds Amd64 (Linux Debian10))
      , (standardBuildsWithConfig Amd64 (Linux Debian10) dwarf)
      , (validateBuilds Amd64 (Linux Debian10) nativeInt)
@@ -826,7 +855,6 @@ jobs = Map.fromList $ concatMap flattenJobGroup $
 
   where
     hackage_doc_job = rename (<> "-hackage") . modifyJobs (addVariable "HADRIAN_ARGS" "--haddock-base-url")
-
     tsan_jobs =
       modifyJobs
         ( addVariable "TSAN_OPTIONS" "suppressions=$CI_PROJECT_DIR/rts/.tsan-suppressions"
@@ -835,10 +863,57 @@ jobs = Map.fromList $ concatMap flattenJobGroup $
         . addVariable "HADRIAN_ARGS" "--docs=none") $
       validateBuilds Amd64 (Linux Debian10) tsan
 
+
+mkPlatform :: Arch -> Opsys -> String
+mkPlatform arch opsys = archName arch <> "-" <> opsysName opsys
+
+-- | This map tells us for a specific arch/opsys combo what the job name for
+-- nightly/release pipelines is. This is used by the ghcup metadata generation so that
+-- things like bindist names etc are kept in-sync.
+--
+-- For cases where there are just
+--
+-- Otherwise:
+--  * Prefer jobs which have a corresponding release pipeline
+--  * Explicitly require tie-breaking for other cases.
+platform_mapping :: Map String (JobGroup BindistInfo)
+platform_mapping = Map.map go $
+  Map.fromListWith combine [ (uncurry mkPlatform (jobPlatform (jobInfo $ v j)), j) | j <- job_groups ]
+  where
+    whitelist = [ "x86_64-linux-alpine3_12-int_native-validate+fully_static"
+                , "x86_64-linux-deb10-validate"
+                , "x86_64-linux-fedora33-release"
+                , "x86_64-windows-validate"
+                ]
+
+    combine a b
+      | name (v a) `elem` whitelist = a -- Explicitly selected
+      | name (v b) `elem` whitelist = b
+      | hasReleaseBuild a, not (hasReleaseBuild b) = a -- Has release build, but other doesn't
+      | hasReleaseBuild b, not (hasReleaseBuild a) = b
+      | otherwise = error (show (name (v a)) ++ show (name (v b)))
+
+    go = fmap (BindistInfo . unwords . fromJust . mmlookup "BIN_DIST_NAME" . jobVariables)
+
+    hasReleaseBuild (StandardTriple{}) = True
+    hasReleaseBuild (ValidateOnly{}) = False
+
+data BindistInfo = BindistInfo { bindistName :: String }
+
+instance ToJSON BindistInfo where
+  toJSON (BindistInfo n) = object [ "bindistName" A..= n ]
+
+
 main = do
-  as <- getArgs
+  ass <- getArgs
+  case ass of
+    ("gitlab":as) -> write_result as jobs
+    ("metadata":as) -> write_result as platform_mapping
+    _ -> error "gen_ci.hs <gitlab|metadata> [file.json]"
+
+write_result as obj =
   (case as of
     [] -> B.putStrLn
     (fp:_) -> B.writeFile fp)
-    (A.encode jobs)
+    (A.encode obj)
 


=====================================
.gitlab/generate_jobs
=====================================
@@ -3,7 +3,12 @@
 
 cd "$(dirname "${BASH_SOURCE[0]}")"
 tmp=$(mktemp)
-./gen_ci.hs $tmp
+./gen_ci.hs gitlab $tmp
 rm -f jobs.yaml
 echo "### THIS IS A GENERATED FILE, DO NOT MODIFY DIRECTLY" > jobs.yaml
 cat $tmp | jq | tee -a jobs.yaml
+
+tmp=$(mktemp)
+./gen_ci.hs metadata $tmp
+rm -f jobs-metadata.json
+cat $tmp | jq | tee -a jobs-metadata.json


=====================================
.gitlab/jobs-metadata.json
=====================================
@@ -0,0 +1,184 @@
+{
+  "aarch64-darwin": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-aarch64-darwin-validate"
+      },
+      "name": "nightly-aarch64-darwin-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-aarch64-darwin-release"
+      },
+      "name": "release-aarch64-darwin-release"
+    }
+  },
+  "aarch64-linux-deb10": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-aarch64-linux-deb10-validate"
+      },
+      "name": "nightly-aarch64-linux-deb10-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-aarch64-linux-deb10-release+no_split_sections"
+      },
+      "name": "release-aarch64-linux-deb10-release+no_split_sections"
+    }
+  },
+  "i386-linux-deb9": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-i386-linux-deb9-validate"
+      },
+      "name": "nightly-i386-linux-deb9-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-i386-linux-deb9-release+no_split_sections"
+      },
+      "name": "release-i386-linux-deb9-release+no_split_sections"
+    }
+  },
+  "x86_64-darwin": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-darwin-validate"
+      },
+      "name": "nightly-x86_64-darwin-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-darwin-release"
+      },
+      "name": "release-x86_64-darwin-release"
+    }
+  },
+  "x86_64-freebsd13": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-freebsd13-validate"
+      },
+      "name": "nightly-x86_64-freebsd13-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-freebsd13-release"
+      },
+      "name": "release-x86_64-freebsd13-release"
+    }
+  },
+  "x86_64-linux-alpine3_12": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-alpine3_12-int_native-validate+fully_static"
+      },
+      "name": "nightly-x86_64-linux-alpine3_12-int_native-validate+fully_static"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-alpine3_12-int_native-release+fully_static"
+      },
+      "name": "release-x86_64-linux-alpine3_12-int_native-release+fully_static"
+    }
+  },
+  "x86_64-linux-centos7": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-centos7-validate"
+      },
+      "name": "nightly-x86_64-linux-centos7-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-centos7-release+no_split_sections"
+      },
+      "name": "release-x86_64-linux-centos7-release+no_split_sections"
+    }
+  },
+  "x86_64-linux-deb10": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-deb10-validate"
+      },
+      "name": "nightly-x86_64-linux-deb10-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-deb10-release"
+      },
+      "name": "release-x86_64-linux-deb10-release"
+    }
+  },
+  "x86_64-linux-deb11": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-deb11-validate"
+      },
+      "name": "nightly-x86_64-linux-deb11-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-deb11-release"
+      },
+      "name": "release-x86_64-linux-deb11-release"
+    }
+  },
+  "x86_64-linux-deb9": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-deb9-validate"
+      },
+      "name": "nightly-x86_64-linux-deb9-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-deb9-release+no_split_sections"
+      },
+      "name": "release-x86_64-linux-deb9-release+no_split_sections"
+    }
+  },
+  "x86_64-linux-fedora33": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-fedora33-release"
+      },
+      "name": "nightly-x86_64-linux-fedora33-release"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-fedora33-release"
+      },
+      "name": "release-x86_64-linux-fedora33-release"
+    }
+  },
+  "x86_64-linux-ubuntu20_04": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-ubuntu20_04-validate"
+      },
+      "name": "nightly-x86_64-linux-ubuntu20_04-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-linux-ubuntu20_04-release"
+      },
+      "name": "release-x86_64-linux-ubuntu20_04-release"
+    }
+  },
+  "x86_64-windows": {
+    "n": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-windows-validate"
+      },
+      "name": "nightly-x86_64-windows-validate"
+    },
+    "r": {
+      "jobInfo": {
+        "bindistName": "ghc-x86_64-windows-release+no_split_sections"
+      },
+      "name": "release-x86_64-windows-release+no_split_sections"
+    }
+  }
+}


=====================================
.gitlab/mk_ghcup_metadata.py
=====================================
@@ -0,0 +1,283 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i python3 -p curl  "python3.withPackages (ps:[ps.pyyaml ps.python-gitlab ])"
+
+"""
+A tool for generating metadata suitable for GHCUp
+
+There are two ways to prepare metadata:
+
+* From a nightly/release pipeline
+*
+
+"""
+
+from subprocess import run, check_call
+from getpass import getpass
+import shutil
+from pathlib import Path
+from typing import NamedTuple, Callable, List, Dict, Optional
+import tempfile
+import re
+import pickle
+import os
+import yaml
+import gitlab
+from urllib.request import urlopen
+import hashlib
+import sys
+import json
+import urllib.parse
+import fetch_gitlab_artifacts
+
+def eprint(*args, **kwargs):
+    print(*args, file=sys.stderr, **kwargs)
+
+
+WORK_DIR = Path('.upload-libs')
+WORK_DIR.mkdir(exist_ok=True)
+OUT_DIR = WORK_DIR / 'docs'
+OUT_DIR.mkdir(exist_ok=True)
+
+ghcup_metadata_url = "https://raw.githubusercontent.com/haskell/ghcup-metadata/develop/ghcup-0.0.7.yaml"
+
+gl = gitlab.Gitlab('https://gitlab.haskell.org', per_page=100, private_token="glpat-9cshB5PFxNXs-o7LdgKB")
+
+
+metadata_file = ".gitlab/jobs-metadata.json"
+
+release_base = "https://downloads.haskell.org/~ghc/{version}/{bindistName}"
+
+eprint(f"Reading job metadata from {metadata_file}.")
+with open(metadata_file, 'r') as f:
+  job_mapping = json.load(f)
+
+eprint(f"Supported platforms: {job_mapping.keys()}")
+#gl.enable_debug()
+
+
+# Artifact precisely specifies a job what the bindist to download is called.
+class Artifact(NamedTuple):
+    job_name: str
+    name: str
+    subdir: str
+
+# Platform spec provides a specification which is agnostic to Job
+class PlatformSpec(NamedTuple):
+    name: str
+    subdir: str
+
+
+source_artifact = Artifact('source-tarball', 'ghc-{version}-src.tar.xz', 'ghc-{version}' )
+
+def debian(arch, n):
+    return linux_platform("{arch}-linux-deb{n}".format(arch=arch, n=n))
+
+def darwin(arch):
+    return PlatformSpec ( '{arch}-darwin'.format(arch=arch)
+                        , 'ghc-{version}-x86_64-unknown-darwin' )
+
+windowsArtifact = PlatformSpec ( 'x86_64-windows'
+                               , 'ghc-{version}-x86_64-unknown-mingw' )
+
+def centos(n):
+    return linux_platform("x86_64-linux-centos{n}".format(n=n))
+
+def fedora(n):
+    return linux_platform("x86_64-linux-fedora{n}".format(n=n))
+
+def alpine(n):
+    return linux_platform("x86_64-linux-alpine{n}".format(n=n))
+
+def linux_platform(opsys):
+    return PlatformSpec( opsys, 'ghc-{version}-x86_64-unknown-linux' )
+
+
+base_url = 'https://gitlab.haskell.org/ghc/ghc/-/jobs/{job_id}/artifacts/raw/{artifact_name}'
+
+
+hash_cache = {}
+
+def download_and_hash(url):
+    if url in hash_cache: return hash_cache[url]
+    eprint ("Opening {}".format(url))
+    response = urlopen(url)
+    sz = response.headers['content-length']
+    hasher = hashlib.sha256()
+    CHUNK = 2**22
+    for n,text in enumerate(iter(lambda: response.read(CHUNK), '')):
+        if not text: break
+        eprint("{:.2f}% {} / {} of {}".format (((n + 1) * CHUNK) / int(sz) * 100, (n + 1) * CHUNK, sz, url))
+        hasher.update(text)
+    digest = hasher.hexdigest()
+    hash_cache[url] = digest
+    return digest
+
+# Copied from fetch_gitlab.py for now
+#def job_triple(job_name):
+#    bindists = {
+#        'release-x86_64-windows-release': 'x86_64-unknown-mingw32',
+#        'release-x86_64-windows-int_native-release': 'x86_64-unknown-mingw32-int_native',
+#        'release-x86_64-ubuntu20_04-release': 'x86_64-ubuntu20_04-linux',
+#        'release-x86_64-linux-fedora33-release+debug_info': 'x86_64-fedora33-linux-dwarf',
+#        'release-x86_64-linux-fedora33-release': 'x86_64-fedora33-linux',
+#        'release-x86_64-linux-fedora27-release': 'x86_64-fedora27-linux',
+#        'release-x86_64-linux-deb11-release': 'x86_64-deb11-linux',
+#        'release-x86_64-linux-deb10-release+debug_info': 'x86_64-deb10-linux-dwarf',
+#        'release-x86_64-linux-deb10-release': 'x86_64-deb10-linux',
+#        'release-x86_64-linux-deb9-release': 'x86_64-deb9-linux',
+#        'release-x86_64-linux-centos7-release': 'x86_64-centos7-linux',
+#        'release-x86_64-linux-alpine3_12-release+fully_static': 'x86_64-alpine3_12-linux-static',
+#        'release-x86_64-linux-alpine3_12-int_native-release+fully_static': 'x86_64-alpine3_12-linux-static-int_native',
+#        'release-x86_64-darwin-release': 'x86_64-apple-darwin',
+#        'release-i386-linux-deb9-release': 'i386-deb9-linux',
+#        'release-armv7-linux-deb10-release': 'armv7-deb10-linux',
+#        'release-aarch64-linux-deb10-release': 'aarch64-deb10-linux',
+#        'release-aarch64-darwin-release': 'aarch64-apple-darwin',
+#
+#        'source-tarball': 'src',
+#        'package-hadrian-bootstrap-sources': 'hadrian-bootstrap-sources',
+#        'doc-tarball': 'docs',
+#        'hackage-doc-tarball': 'hackage_docs',
+#    }
+#
+#    # Some bindists use the +no_split_sections transformer due to upstream
+#    # toolchain bugs.
+#    bindists.update({
+#        f'{k}+no_split_sections': v
+#        for k,v in bindists.items()
+#    })
+#
+#    return bindists[job_name]
+
+
+def mk_one_metadata(release_mode, version, job_map, artifact):
+    job_id = job_map[artifact.job_name].id
+
+    url = base_url.format(job_id=job_id, artifact_name=urllib.parse.quote_plus(artifact.name.format(version=version)))
+
+
+    if release_mode:
+        final_url = release_base.format( version=version
+                                       , bindistName=urllib.parse.quote_plus(f"{fetch_gitlab_artifacts.job_triple(artifact.job_name)}.tar.xz"))
+    else:
+        final_url = url
+
+    eprint(f"Making metadata for: {artifact}")
+    eprint(f"Bindist URL: {url}")
+    eprint(f"Download URL: {final_url}")
+
+    # Download and hash from the release pipeline, this must not change anyway during upload.
+    h = download_and_hash(url)
+
+    res = { "dlUri": final_url, "dlSubdir": artifact.subdir.format(version=version), "dlHash" : h }
+    eprint(res)
+    return res
+
+# Turns a platform into an Artifact respecting pipeline_type
+def mk_from_platform(pipeline_type, platform):
+    info = job_mapping[platform.name][pipeline_type]
+    eprint(f"From {platform.name} / {pipeline_type} selecting {info['name']}")
+    return Artifact(info['name'] , f"{info['jobInfo']['bindistName']}.tar.xz", platform.subdir)
+
+
+
+def mk_new_yaml(release_mode, version, pipeline_type, job_map):
+    def mk(platform):
+        eprint("\n=== " + platform.name + " " + ('=' * (75 - len(platform.name))))
+        return mk_one_metadata(release_mode, version, job_map, mk_from_platform(pipeline_type, platform))
+
+    # Here are all the bindists we can distribute
+    centos7 = mk(centos(7))
+    fedora33 = mk(fedora(33))
+    darwin_x86 = mk(darwin("x86_64"))
+    darwin_arm64 = mk(darwin("aarch64"))
+    windows = mk(windowsArtifact)
+    alpine3_12 = mk(alpine("3_12"))
+    deb9 = mk(debian("x86_64", 9))
+    deb10 = mk(debian("x86_64", 10))
+    deb11 = mk(debian("x86_64", 11))
+    deb10_arm64 = mk(debian("aarch64", 10))
+    deb9_i386 = mk(debian("i386", 9))
+
+
+    source = mk_one_metadata(release_mode, version, job_map, source_artifact)
+
+    a64 = { "Linux_Debian": { "< 10": deb9
+                           , "(>= 10 && < 11)": deb10
+                           , ">= 11": deb11
+                           , "unknown_versioning": deb11 }
+          , "Linux_Ubuntu" : { "unknown_versioning": deb10
+                             , "( >= 16 && < 19 )": deb9
+                             }
+          , "Linux_Mint"   : { "< 20": deb9
+                             , ">= 20": deb10 }
+          , "Linux_CentOS"  : { "( >= 7 && < 8 )" : centos7
+                              , "unknown_versioning" : centos7  }
+          , "Linux_Fedora"  : { ">= 33": fedora33
+                              , "unknown_versioning": centos7 }
+          , "Linux_RedHat"  : { "unknown_versioning": centos7 }
+          , "Linux_UnknownLinux" : { "unknown_versioning": fedora33 }
+          , "Darwin" : { "unknown_versioning" : darwin_x86 }
+          , "Windows" : { "unknown_versioning" :  windows }
+          , "Linux_Alpine" : { "unknown_versioning": alpine3_12 }
+
+          }
+
+    a32 = { "Linux_Debian": { "<10": deb9_i386, "unknown_versioning": deb9_i386 }
+          , "Linux_Ubuntu": { "unknown_versioning": deb9_i386 }
+          , "Linux_Mint" : { "unknown_versioing": deb9_i386 }
+          , "Linux_UnknownLinux" : { "unknown_versioning": deb9_i386 }
+          }
+
+    arm64 = { "Linux_UnknownLinux": { "unknown_versioning": deb10_arm64 }
+            , "Darwin": { "unknown_versioning": darwin_arm64 }
+            }
+
+
+    return { "viTags": ["Latest", "TODO_base_version"]
+        , "viChangeLog": "https://downloads.haskell.org/~ghc/9.4.4/docs/users_guide/9.4.4-notes.html"
+        , "viSourceDL": source
+        , "viPostRemove": "*ghc-post-remove"
+        , "viArch": { "A_64": a64
+                    , "A_32": a32
+                    , "A_ARM64": arm64
+                    }
+        }
+
+
+def main() -> None:
+    import argparse
+
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument('--metadata', required=True, type=Path, help='Path to GHCUp metadata')
+    parser.add_argument('--pipeline-id', required=True, type=int, help='Which pipeline to generate metadata for')
+    parser.add_argument('--release-mode', action='store_true', help='Generate metadata which points to downloads folder')
+    parser.add_argument('--version', required=True, type=str, help='Version of the GHC compiler')
+    args = parser.parse_args()
+
+    project = gl.projects.get(1, lazy=True)
+    pipeline = project.pipelines.get(args.pipeline_id)
+    jobs = pipeline.jobs.list()
+    job_map = { job.name: job for job in jobs }
+    # Bit of a hacky way to determine what pipeline we are dealing with but
+    # the aarch64-darwin job should stay stable for a long time.
+    if 'nightly-aarch64-darwin-validate' in job_map:
+        pipeline_type = 'n'
+        if args.release_mode:
+            raise Exception("Incompatible arguments: nightly pipeline but using --release-mode")
+
+    elif 'release-aarch64-darwin-release' in job_map:
+        pipeline_type = 'r'
+    else:
+        raise Exception("Not a nightly nor release pipeline")
+    eprint(f"Pipeline Type: {pipeline_type}")
+
+    with open(args.metadata, 'r') as file:
+        ghcup_metadata = yaml.safe_load(file)
+        ghcup_metadata['ghcupDownloads']['GHC'][args.version] = mk_new_yaml(args.release_mode, args.version, pipeline_type, job_map)
+        print(yaml.dump(ghcup_metadata))
+
+
+if __name__ == '__main__':
+    main()
+


=====================================
.gitlab/rel_eng/default.nix
=====================================
@@ -0,0 +1,56 @@
+let sources = import ./nix/sources.nix; in
+
+{ nixpkgs ? (import sources.nixpkgs {}) }:
+
+with nixpkgs;
+let
+  fetch-gitlab-artifacts = nixpkgs.callPackage ./fetch-gitlab-artifacts {};
+  mk-ghcup-metadata = nixpkgs.callPackage ./mk-ghcup-metadata { fetch-gitlab=fetch-gitlab-artifacts;};
+
+
+  bindistPrepEnv = pkgs.buildFHSUserEnv {
+    name = "enter-fhs";
+    targetPkgs = pkgs: with pkgs; [
+      # all
+      gcc binutils gnumake gmp ncurses5 git elfutils
+      # source-release.sh
+      xorg.lndir curl python3 which automake autoconf m4 file
+      haskell.compiler.ghc8107 haskellPackages.happy haskellPackages.alex
+    ];
+    runScript = "$SHELL -x";
+  };
+
+  scripts = stdenv.mkDerivation {
+    name = "rel-eng-scripts";
+    nativeBuildInputs = [ makeWrapper ];
+    preferLocalBuild = true;
+    buildCommand = ''
+      mkdir -p $out/bin
+
+      makeWrapper ${./upload.sh} $out/bin/upload.sh \
+        --prefix PATH : ${moreutils}/bin \
+        --prefix PATH : ${lftp}/bin \
+        --prefix PATH : ${lzip}/bin \
+        --prefix PATH : ${zip}/bin \
+        --prefix PATH : ${s3cmd}/bin \
+        --prefix PATH : ${gnupg}/bin \
+        --prefix PATH : ${pinentry}/bin \
+        --prefix PATH : ${parallel}/bin \
+        --prefix PATH : ${python3}/bin \
+        --set ENTER_FHS_ENV ${bindistPrepEnv}/bin/enter-fhs \
+        --set BASH ${bash}/bin/bash
+
+      makeWrapper ${./upload_ghc_libs.py} $out/bin/upload-ghc-libs
+    '';
+  };
+
+in
+  symlinkJoin {
+    name = "ghc-rel-eng";
+    preferLocalBuild = true;
+    paths = [
+      scripts
+      fetch-gitlab-artifacts
+      mk-ghcup-metadata
+    ];
+  }


=====================================
.gitlab/rel_eng/fetch-gitlab-artifacts/.gitignore
=====================================
@@ -0,0 +1,3 @@
+result
+fetch-gitlab
+out


=====================================
.gitlab/rel_eng/fetch-gitlab-artifacts/README.mkd
=====================================
@@ -0,0 +1,23 @@
+# fetch-gitlab-artifacts
+
+This script is used to fetch and rename GHC binary distributions from GitLab
+Pipelines for upload to `downloads.haskell.org`.
+
+## Workflow
+
+1. [Configure]() a `python-gitlab` profile for <https://gitlab.haskell.org/>:
+   ```
+   $ cat > $HOME/.python-gitlab.cfg <<EOF
+   [haskell]
+   url = https://gitlab.haskell.org/
+   private_token = $PRIVATE_GITLAB_TOKEN
+   ssl_verify = true
+   api_version = 4
+   EOF
+   ```
+1. Push a release tag to ghc/ghc>
+1. Wait until the CI pipeline completes
+1. Run `fetch-gitlab -p $PIPELINE_ID -r $RELEASE_NAME` where `$PIPELINE_ID` is
+   the ID of the GitLab release pipeline and `$RELEASE_NAME` is the name of the
+   GHC release (e.g. `8.8.1` or `8.8.1-alpha1`)
+1. The binary distributions will be placed in the `out` directory.
\ No newline at end of file


=====================================
.gitlab/rel_eng/fetch-gitlab-artifacts/default.nix
=====================================
@@ -0,0 +1,13 @@
+{ nix-gitignore, python3Packages, unzip }:
+
+let
+  fetch-gitlab = { buildPythonPackage, python-gitlab, unzip }:
+    buildPythonPackage {
+      pname = "fetch-gitlab";
+      version = "0.0.1";
+      src = nix-gitignore.gitignoreSource [] ./.;
+      propagatedBuildInputs = [ python3Packages.python-gitlab unzip ];
+      preferLocalBuild = true;
+    };
+in 
+python3Packages.callPackage fetch-gitlab { inherit unzip; }


=====================================
.gitlab/rel_eng/fetch-gitlab-artifacts/fetch_gitlab.py
=====================================
@@ -0,0 +1,145 @@
+import logging
+from pathlib import Path
+import subprocess
+import gitlab
+import json
+
+logging.basicConfig(level=logging.INFO)
+
+def strip_prefix(s, prefix):
+    if s.startswith(prefix):
+        return s[len(prefix):]
+    else:
+        return None
+
+def job_triple(job_name):
+    bindists = {
+        'release-x86_64-windows-release': 'x86_64-unknown-mingw32',
+        'release-x86_64-windows-int_native-release': 'x86_64-unknown-mingw32-int_native',
+        'release-x86_64-ubuntu20_04-release': 'x86_64-ubuntu20_04-linux',
+        'release-x86_64-linux-fedora33-release+debug_info': 'x86_64-fedora33-linux-dwarf',
+        'release-x86_64-linux-fedora33-release': 'x86_64-fedora33-linux',
+        'release-x86_64-linux-fedora27-release': 'x86_64-fedora27-linux',
+        'release-x86_64-linux-deb11-release': 'x86_64-deb11-linux',
+        'release-x86_64-linux-deb10-release+debug_info': 'x86_64-deb10-linux-dwarf',
+        'release-x86_64-linux-deb10-release': 'x86_64-deb10-linux',
+        'release-x86_64-linux-deb9-release': 'x86_64-deb9-linux',
+        'release-x86_64-linux-centos7-release': 'x86_64-centos7-linux',
+        'release-x86_64-linux-alpine3_12-release+fully_static': 'x86_64-alpine3_12-linux-static',
+        'release-x86_64-linux-alpine3_12-int_native-release+fully_static': 'x86_64-alpine3_12-linux-static-int_native',
+        'release-x86_64-darwin-release': 'x86_64-apple-darwin',
+        'release-i386-linux-deb9-release': 'i386-deb9-linux',
+        'release-armv7-linux-deb10-release': 'armv7-deb10-linux',
+        'release-aarch64-linux-deb10-release': 'aarch64-deb10-linux',
+        'release-aarch64-darwin-release': 'aarch64-apple-darwin',
+
+        'source-tarball': 'src',
+        'package-hadrian-bootstrap-sources': 'hadrian-bootstrap-sources',
+        'doc-tarball': 'docs',
+        'hackage-doc-tarball': 'hackage_docs',
+    }
+
+    # Some bindists use the +no_split_sections transformer due to upstream
+    # toolchain bugs.
+    bindists.update({
+        f'{k}+no_split_sections': v
+        for k,v in bindists.items()
+    })
+
+    if job_name in bindists:
+        return bindists[job_name]
+    else:
+        #return strip_prefix(job.name, 'validate-')
+        return None
+
+def fetch_artifacts(release: str, pipeline_id: int,
+                    dest_dir: Path, gl: gitlab.Gitlab):
+    dest_dir.mkdir(exist_ok=True)
+    # Write the pipeline id into output directory
+    with open(f"{dest_dir}/metadata.json", 'w') as out: json.dump({ "pipeline_id": pipeline_id }, out)
+
+    proj = gl.projects.get('ghc/ghc')
+    pipeline = proj.pipelines.get(pipeline_id)
+    tmpdir = Path("fetch-gitlab")
+    tmpdir.mkdir(exist_ok=True)
+    for pipeline_job in pipeline.jobs.list(all=True):
+        if len(pipeline_job.artifacts) == 0:
+            logging.info(f'job {pipeline_job.name} ({pipeline_job.id}) has no artifacts')
+            continue
+
+        job = proj.jobs.get(pipeline_job.id)
+        triple = job_triple(job.name)
+        if triple is None:
+            logging.info(f'ignoring {job.name}')
+            continue
+
+        #artifactZips = [ artifact
+        #                 for artifact in job.artifacts
+        #                 if artifact['filename'] == 'artifacts.zip' ]
+        try:
+            destdir = tmpdir / job.name
+            zip_name = Path(f"{tmpdir}/{job.name}.zip")
+            if not zip_name.exists() or zip_name.stat().st_size == 0:
+                logging.info(f'downloading archive {zip_name} for job {job.name} (job {job.id})...')
+                with open(zip_name, 'wb') as f:
+                    job.artifacts(streamed=True, action=f.write)
+
+            if zip_name.stat().st_size == 0:
+                logging.info(f'artifact archive for job {job.name} (job {job.id}) is empty')
+                continue
+
+
+            subprocess.run(['unzip', '-bo', zip_name, '-d', destdir])
+            bindist_files = list(destdir.glob('ghc*.tar.xz'))
+
+            if job.name == 'source-tarball':
+                for f in bindist_files:
+                    dest = dest_dir / f.name
+                    logging.info(f'extracted {job.name} to {dest}')
+                    f.replace(dest)
+            elif job.name == 'package-hadrian-bootstrap-sources':
+                all_bootstrap_sources = destdir / 'hadrian-bootstrap-sources-all.tar.gz'
+                dest = dest_dir / 'hadrian-bootstrap-sources'
+                dest.mkdir()
+                subprocess.run(['tar', '-xf', all_bootstrap_sources, '-C', dest])
+                logging.info(f'extracted {job.name}/{all_bootstrap_sources} to {dest}')
+            elif job.name == 'doc-tarball':
+                dest = dest_dir / 'docs'
+                dest.mkdir()
+                doc_files = list(destdir.glob('*.tar.xz'))
+                for f in doc_files:
+                    subprocess.run(['tar', '-xf', f, '-C', dest])
+                    logging.info(f'extracted docs {f} to {dest}')
+                index_path = destdir / 'index.html'
+                index_path.replace(dest / 'index.html')
+            elif job.name == 'hackage-doc-tarball':
+                dest = dest_dir / 'hackage_docs'
+                logging.info(f'moved hackage_docs to {dest}')
+                (destdir / 'hackage_docs').replace(dest)
+            else:
+                dest = dest_dir / f'ghc-{release}-{triple}.tar.xz'
+                if dest.exists():
+                    logging.info(f'bindist {dest} already exists')
+                    continue
+                if len(bindist_files) == 0:
+                    logging.warn(f'Bindist does not exist')
+                    continue
+                bindist = bindist_files[0]
+                logging.info(f'extracted {job.name} to {dest}')
+                bindist.replace(dest)
+        except Exception as e:
+            logging.error(f'Error fetching job {job.name}: {e}')
+            pass
+
+def main():
+    import argparse
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--pipeline', '-p', required=True, type=int, help="pipeline id")
+    parser.add_argument('--release', '-r', required=True, type=str, help="release name")
+    parser.add_argument('--output', '-o', type=Path, default=Path.cwd(), help="output directory")
+    parser.add_argument('--profile', '-P', default='haskell',
+                        help='python-gitlab.cfg profile name')
+    args = parser.parse_args()
+    gl = gitlab.Gitlab.from_config(args.profile)
+    fetch_artifacts(args.release, args.pipeline,
+                    dest_dir=args.output, gl=gl)


=====================================
.gitlab/rel_eng/fetch-gitlab-artifacts/setup.py
=====================================
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+
+from distutils.core import setup
+
+setup(name='fetch-gitlab',
+      author='Ben Gamari',
+      author_email='ben at smart-cactus.org',
+      py_modules=['fetch_gitlab'],
+      entry_points={
+          'console_scripts': [
+              'fetch-gitlab=fetch_gitlab:main',
+          ]
+      }
+     )


=====================================
.gitlab/rel_eng/mk-ghcup-metadata/.gitignore
=====================================
@@ -0,0 +1,3 @@
+result
+fetch-gitlab
+out


=====================================
.gitlab/rel_eng/mk-ghcup-metadata/README.mkd
=====================================
@@ -0,0 +1,23 @@
+# fetch-gitlab-artifacts
+
+This script is used to fetch and rename GHC binary distributions from GitLab
+Pipelines for upload to `downloads.haskell.org`.
+
+## Workflow
+
+1. [Configure]() a `python-gitlab` profile for <https://gitlab.haskell.org/>:
+   ```
+   $ cat > $HOME/.python-gitlab.cfg <<EOF
+   [haskell]
+   url = https://gitlab.haskell.org/
+   private_token = $PRIVATE_GITLAB_TOKEN
+   ssl_verify = true
+   api_version = 4
+   EOF
+   ```
+1. Push a release tag to ghc/ghc>
+1. Wait until the CI pipeline completes
+1. Run `fetch-gitlab -p $PIPELINE_ID -r $RELEASE_NAME` where `$PIPELINE_ID` is
+   the ID of the GitLab release pipeline and `$RELEASE_NAME` is the name of the
+   GHC release (e.g. `8.8.1` or `8.8.1-alpha1`)
+1. The binary distributions will be placed in the `out` directory.
\ No newline at end of file


=====================================
.gitlab/rel_eng/mk-ghcup-metadata/default.nix
=====================================
@@ -0,0 +1,13 @@
+{ nix-gitignore, python3Packages, fetch-gitlab }:
+
+let
+  ghcup-metadata = { buildPythonPackage, python-gitlab, pyyaml }:
+    buildPythonPackage {
+      pname = "ghcup-metadata";
+      version = "0.0.1";
+      src = nix-gitignore.gitignoreSource [] ./.;
+      propagatedBuildInputs = [fetch-gitlab python-gitlab pyyaml ];
+      preferLocalBuild = true;
+    };
+in
+python3Packages.callPackage ghcup-metadata { }


=====================================
.gitlab/rel_eng/mk-ghcup-metadata/mk_ghcup_metadata.py
=====================================
@@ -0,0 +1,245 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i python3 -p curl  "python3.withPackages (ps:[ps.pyyaml ps.python-gitlab ])"
+
+"""
+A tool for generating metadata suitable for GHCUp
+
+There are two ways to prepare metadata:
+
+* From a nightly/release pipeline
+*
+
+"""
+
+from subprocess import run, check_call
+from getpass import getpass
+import shutil
+from pathlib import Path
+from typing import NamedTuple, Callable, List, Dict, Optional
+import tempfile
+import re
+import pickle
+import os
+import yaml
+import gitlab
+from urllib.request import urlopen
+import hashlib
+import sys
+import json
+import urllib.parse
+import fetch_gitlab
+
+def eprint(*args, **kwargs):
+    print(*args, file=sys.stderr, **kwargs)
+
+
+WORK_DIR = Path('.upload-libs')
+WORK_DIR.mkdir(exist_ok=True)
+OUT_DIR = WORK_DIR / 'docs'
+OUT_DIR.mkdir(exist_ok=True)
+
+ghcup_metadata_url = "https://raw.githubusercontent.com/haskell/ghcup-metadata/develop/ghcup-0.0.7.yaml"
+
+gl = gitlab.Gitlab('https://gitlab.haskell.org', per_page=100, private_token="glpat-9cshB5PFxNXs-o7LdgKB")
+
+
+metadata_file = ".gitlab/jobs-metadata.json"
+
+release_base = "https://downloads.haskell.org/~ghc/{version}/{bindistName}"
+
+eprint(f"Reading job metadata from {metadata_file}.")
+with open(metadata_file, 'r') as f:
+  job_mapping = json.load(f)
+
+eprint(f"Supported platforms: {job_mapping.keys()}")
+#gl.enable_debug()
+
+
+# Artifact precisely specifies a job what the bindist to download is called.
+class Artifact(NamedTuple):
+    job_name: str
+    name: str
+    subdir: str
+
+# Platform spec provides a specification which is agnostic to Job
+class PlatformSpec(NamedTuple):
+    name: str
+    subdir: str
+
+
+source_artifact = Artifact('source-tarball', 'ghc-{version}-src.tar.xz', 'ghc-{version}' )
+
+def debian(arch, n):
+    return linux_platform("{arch}-linux-deb{n}".format(arch=arch, n=n))
+
+def darwin(arch):
+    return PlatformSpec ( '{arch}-darwin'.format(arch=arch)
+                        , 'ghc-{version}-x86_64-unknown-darwin' )
+
+windowsArtifact = PlatformSpec ( 'x86_64-windows'
+                               , 'ghc-{version}-x86_64-unknown-mingw' )
+
+def centos(n):
+    return linux_platform("x86_64-linux-centos{n}".format(n=n))
+
+def fedora(n):
+    return linux_platform("x86_64-linux-fedora{n}".format(n=n))
+
+def alpine(n):
+    return linux_platform("x86_64-linux-alpine{n}".format(n=n))
+
+def linux_platform(opsys):
+    return PlatformSpec( opsys, 'ghc-{version}-x86_64-unknown-linux' )
+
+
+base_url = 'https://gitlab.haskell.org/ghc/ghc/-/jobs/{job_id}/artifacts/raw/{artifact_name}'
+
+
+hash_cache = {}
+
+def download_and_hash(url):
+    if url in hash_cache: return hash_cache[url]
+    eprint ("Opening {}".format(url))
+    response = urlopen(url)
+    sz = response.headers['content-length']
+    hasher = hashlib.sha256()
+    CHUNK = 2**22
+    for n,text in enumerate(iter(lambda: response.read(CHUNK), '')):
+        if not text: break
+        eprint("{:.2f}% {} / {} of {}".format (((n + 1) * CHUNK) / int(sz) * 100, (n + 1) * CHUNK, sz, url))
+        hasher.update(text)
+    digest = hasher.hexdigest()
+    hash_cache[url] = digest
+    return digest
+
+def mk_one_metadata(release_mode, version, job_map, artifact):
+    job_id = job_map[artifact.job_name].id
+
+    url = base_url.format(job_id=job_id, artifact_name=urllib.parse.quote_plus(artifact.name.format(version=version)))
+
+
+    if release_mode:
+        final_url = release_base.format( version=version
+                                       , bindistName=urllib.parse.quote_plus(f"{fetch_gitlab.job_triple(artifact.job_name)}.tar.xz"))
+    else:
+        final_url = url
+
+    eprint(f"Making metadata for: {artifact}")
+    eprint(f"Bindist URL: {url}")
+    eprint(f"Download URL: {final_url}")
+
+    # Download and hash from the release pipeline, this must not change anyway during upload.
+    h = download_and_hash(url)
+
+    res = { "dlUri": final_url, "dlSubdir": artifact.subdir.format(version=version), "dlHash" : h }
+    eprint(res)
+    return res
+
+# Turns a platform into an Artifact respecting pipeline_type
+def mk_from_platform(pipeline_type, platform):
+    info = job_mapping[platform.name][pipeline_type]
+    eprint(f"From {platform.name} / {pipeline_type} selecting {info['name']}")
+    return Artifact(info['name'] , f"{info['jobInfo']['bindistName']}.tar.xz", platform.subdir)
+
+
+
+def mk_new_yaml(release_mode, version, pipeline_type, job_map):
+    def mk(platform):
+        eprint("\n=== " + platform.name + " " + ('=' * (75 - len(platform.name))))
+        return mk_one_metadata(release_mode, version, job_map, mk_from_platform(pipeline_type, platform))
+
+    # Here are all the bindists we can distribute
+    centos7 = mk(centos(7))
+    fedora33 = mk(fedora(33))
+    darwin_x86 = mk(darwin("x86_64"))
+    darwin_arm64 = mk(darwin("aarch64"))
+    windows = mk(windowsArtifact)
+    alpine3_12 = mk(alpine("3_12"))
+    deb9 = mk(debian("x86_64", 9))
+    deb10 = mk(debian("x86_64", 10))
+    deb11 = mk(debian("x86_64", 11))
+    deb10_arm64 = mk(debian("aarch64", 10))
+    deb9_i386 = mk(debian("i386", 9))
+
+
+    source = mk_one_metadata(release_mode, version, job_map, source_artifact)
+
+    a64 = { "Linux_Debian": { "< 10": deb9
+                           , "(>= 10 && < 11)": deb10
+                           , ">= 11": deb11
+                           , "unknown_versioning": deb11 }
+          , "Linux_Ubuntu" : { "unknown_versioning": deb10
+                             , "( >= 16 && < 19 )": deb9
+                             }
+          , "Linux_Mint"   : { "< 20": deb9
+                             , ">= 20": deb10 }
+          , "Linux_CentOS"  : { "( >= 7 && < 8 )" : centos7
+                              , "unknown_versioning" : centos7  }
+          , "Linux_Fedora"  : { ">= 33": fedora33
+                              , "unknown_versioning": centos7 }
+          , "Linux_RedHat"  : { "unknown_versioning": centos7 }
+          , "Linux_UnknownLinux" : { "unknown_versioning": fedora33 }
+          , "Darwin" : { "unknown_versioning" : darwin_x86 }
+          , "Windows" : { "unknown_versioning" :  windows }
+          , "Linux_Alpine" : { "unknown_versioning": alpine3_12 }
+
+          }
+
+    a32 = { "Linux_Debian": { "<10": deb9_i386, "unknown_versioning": deb9_i386 }
+          , "Linux_Ubuntu": { "unknown_versioning": deb9_i386 }
+          , "Linux_Mint" : { "unknown_versioing": deb9_i386 }
+          , "Linux_UnknownLinux" : { "unknown_versioning": deb9_i386 }
+          }
+
+    arm64 = { "Linux_UnknownLinux": { "unknown_versioning": deb10_arm64 }
+            , "Darwin": { "unknown_versioning": darwin_arm64 }
+            }
+
+
+    return { "viTags": ["Latest", "TODO_base_version"]
+        , "viChangeLog": "https://downloads.haskell.org/~ghc/9.4.4/docs/users_guide/9.4.4-notes.html"
+        , "viSourceDL": source
+        , "viPostRemove": "*ghc-post-remove"
+        , "viArch": { "A_64": a64
+                    , "A_32": a32
+                    , "A_ARM64": arm64
+                    }
+        }
+
+
+def main() -> None:
+    import argparse
+
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument('--metadata', required=True, type=Path, help='Path to GHCUp metadata')
+    parser.add_argument('--pipeline-id', required=True, type=int, help='Which pipeline to generate metadata for')
+    parser.add_argument('--release-mode', action='store_true', help='Generate metadata which points to downloads folder')
+    parser.add_argument('--version', required=True, type=str, help='Version of the GHC compiler')
+    args = parser.parse_args()
+
+    project = gl.projects.get(1, lazy=True)
+    pipeline = project.pipelines.get(args.pipeline_id)
+    jobs = pipeline.jobs.list()
+    job_map = { job.name: job for job in jobs }
+    # Bit of a hacky way to determine what pipeline we are dealing with but
+    # the aarch64-darwin job should stay stable for a long time.
+    if 'nightly-aarch64-darwin-validate' in job_map:
+        pipeline_type = 'n'
+        if args.release_mode:
+            raise Exception("Incompatible arguments: nightly pipeline but using --release-mode")
+
+    elif 'release-aarch64-darwin-release' in job_map:
+        pipeline_type = 'r'
+    else:
+        raise Exception("Not a nightly nor release pipeline")
+    eprint(f"Pipeline Type: {pipeline_type}")
+
+    with open(args.metadata, 'r') as file:
+        ghcup_metadata = yaml.safe_load(file)
+        ghcup_metadata['ghcupDownloads']['GHC'][args.version] = mk_new_yaml(args.release_mode, args.version, pipeline_type, job_map)
+        print(yaml.dump(ghcup_metadata))
+
+
+if __name__ == '__main__':
+    main()
+


=====================================
.gitlab/rel_eng/mk-ghcup-metadata/setup.py
=====================================
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+
+from distutils.core import setup
+
+setup(name='ghcup-metadata',
+      author='Matthew Pickering',
+      author_email='matthew at well-typed.com',
+      py_modules=['mk_ghcup_metadata'],
+      entry_points={
+          'console_scripts': [
+              'ghcup-metadata=mk_ghcup_metadata:main',
+          ]
+      }
+     )


=====================================
.gitlab/rel_eng/nix/sources.json
=====================================
@@ -0,0 +1,68 @@
+{
+    "binutils-gdb": {
+        "branch": "master",
+        "repo": "https://sourceware.org/git/binutils-gdb.git",
+        "rev": "49c843e6d2d0577200e7c1d2d02855f21a3a9dde",
+        "type": "git"
+    },
+    "gdb-walkers": {
+        "branch": "master",
+        "description": "Bring mdb walkers to gdb, also add other helpful commands.",
+        "homepage": "",
+        "owner": "hardenedapple",
+        "repo": "gdb-walkers",
+        "rev": "c0701c4c87852bd09e21ca313c48dd4a649cfd0d",
+        "sha256": "1sd61a90lg8bkddl8lp15qady1wvbjmhjgm0d3lb813nwimlka9y",
+        "type": "tarball",
+        "url": "https://github.com/hardenedapple/gdb-walkers/archive/c0701c4c87852bd09e21ca313c48dd4a649cfd0d.tar.gz",
+        "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+    },
+    "niv": {
+        "branch": "master",
+        "description": "Easy dependency management for Nix projects",
+        "homepage": "https://github.com/nmattia/niv",
+        "owner": "nmattia",
+        "repo": "niv",
+        "rev": "82e5cd1ad3c387863f0545d7591512e76ab0fc41",
+        "sha256": "090l219mzc0gi33i3psgph6s2pwsc8qy4lyrqjdj4qzkvmaj65a7",
+        "type": "tarball",
+        "url": "https://github.com/nmattia/niv/archive/82e5cd1ad3c387863f0545d7591512e76ab0fc41.tar.gz",
+        "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+    },
+    "nixpkgs": {
+        "branch": "nixos-22.11",
+        "description": "Nix Packages collection",
+        "homepage": "",
+        "owner": "NixOS",
+        "repo": "nixpkgs",
+        "rev": "2d10e73416ec1449ef74aeac7faf2cf8c556ff5a",
+        "sha256": "00s89np0sqr3jxxp5h9nrpqy30fy4vsrmis6mmryrrmjqh09lpfv",
+        "type": "tarball",
+        "url": "https://github.com/NixOS/nixpkgs/archive/2d10e73416ec1449ef74aeac7faf2cf8c556ff5a.tar.gz",
+        "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+    },
+    "processor-trace": {
+        "branch": "master",
+        "description": "libipt - an Intel(R) Processor Trace decoder library",
+        "homepage": "",
+        "owner": "01org",
+        "repo": "processor-trace",
+        "rev": "c848a85c3104e2f5780741f85de5c9e65476ece2",
+        "sha256": "1ml8g6pm2brlcqp90yvgc780xf64d6k2km7fiqs88wvhlwsl7vzf",
+        "type": "tarball",
+        "url": "https://github.com/01org/processor-trace/archive/c848a85c3104e2f5780741f85de5c9e65476ece2.tar.gz",
+        "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+    },
+    "rr": {
+        "branch": "master",
+        "description": "Record and Replay Framework",
+        "homepage": "http://rr-project.org/",
+        "owner": "rr-debugger",
+        "repo": "rr",
+        "rev": "e77b5f8ca4b360daffd31cf72cb6b093fa9e0b62",
+        "sha256": "sha256:1gxphqcv1yw2ffmjp0d2cv0mpccr00pf9jhf44rq57jqdsvlfn2c",
+        "type": "tarball",
+        "url": "https://github.com/rr-debugger/rr/archive/3f87444659d1f063397fabc7791ed3b13b15c798.tar.gz",
+        "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+    }
+}


=====================================
.gitlab/rel_eng/nix/sources.nix
=====================================
@@ -0,0 +1,194 @@
+# This file has been generated by Niv.
+
+let
+
+  #
+  # The fetchers. fetch_<type> fetches specs of type <type>.
+  #
+
+  fetch_file = pkgs: name: spec:
+    let
+      name' = sanitizeName name + "-src";
+    in
+      if spec.builtin or true then
+        builtins_fetchurl { inherit (spec) url sha256; name = name'; }
+      else
+        pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
+
+  fetch_tarball = pkgs: name: spec:
+    let
+      name' = sanitizeName name + "-src";
+    in
+      if spec.builtin or true then
+        builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
+      else
+        pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
+
+  fetch_git = name: spec:
+    let
+      ref =
+        if spec ? ref then spec.ref else
+          if spec ? branch then "refs/heads/${spec.branch}" else
+            if spec ? tag then "refs/tags/${spec.tag}" else
+              abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
+      submodules = if spec ? submodules then spec.submodules else false;
+      submoduleArg =
+        let
+          nixSupportsSubmodules = builtins.compareVersions builtins.nixVersion "2.4" >= 0;
+          emptyArgWithWarning =
+            if submodules == true
+            then
+              builtins.trace
+                (
+                  "The niv input \"${name}\" uses submodules "
+                  + "but your nix's (${builtins.nixVersion}) builtins.fetchGit "
+                  + "does not support them"
+                )
+                {}
+            else {};
+        in
+          if nixSupportsSubmodules
+          then { inherit submodules; }
+          else emptyArgWithWarning;
+    in
+      builtins.fetchGit
+        ({ url = spec.repo; inherit (spec) rev; inherit ref; } // submoduleArg);
+
+  fetch_local = spec: spec.path;
+
+  fetch_builtin-tarball = name: throw
+    ''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
+        $ niv modify ${name} -a type=tarball -a builtin=true'';
+
+  fetch_builtin-url = name: throw
+    ''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
+        $ niv modify ${name} -a type=file -a builtin=true'';
+
+  #
+  # Various helpers
+  #
+
+  # https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
+  sanitizeName = name:
+    (
+      concatMapStrings (s: if builtins.isList s then "-" else s)
+        (
+          builtins.split "[^[:alnum:]+._?=-]+"
+            ((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
+        )
+    );
+
+  # The set of packages used when specs are fetched using non-builtins.
+  mkPkgs = sources: system:
+    let
+      sourcesNixpkgs =
+        import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
+      hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
+      hasThisAsNixpkgsPath = <nixpkgs> == ./.;
+    in
+      if builtins.hasAttr "nixpkgs" sources
+      then sourcesNixpkgs
+      else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
+        import <nixpkgs> {}
+      else
+        abort
+          ''
+            Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
+            add a package called "nixpkgs" to your sources.json.
+          '';
+
+  # The actual fetching function.
+  fetch = pkgs: name: spec:
+
+    if ! builtins.hasAttr "type" spec then
+      abort "ERROR: niv spec ${name} does not have a 'type' attribute"
+    else if spec.type == "file" then fetch_file pkgs name spec
+    else if spec.type == "tarball" then fetch_tarball pkgs name spec
+    else if spec.type == "git" then fetch_git name spec
+    else if spec.type == "local" then fetch_local spec
+    else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
+    else if spec.type == "builtin-url" then fetch_builtin-url name
+    else
+      abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
+
+  # If the environment variable NIV_OVERRIDE_${name} is set, then use
+  # the path directly as opposed to the fetched source.
+  replace = name: drv:
+    let
+      saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
+      ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
+    in
+      if ersatz == "" then drv else
+        # this turns the string into an actual Nix path (for both absolute and
+        # relative paths)
+        if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
+
+  # Ports of functions for older nix versions
+
+  # a Nix version of mapAttrs if the built-in doesn't exist
+  mapAttrs = builtins.mapAttrs or (
+    f: set: with builtins;
+    listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
+  );
+
+  # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
+  range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
+
+  # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
+  stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
+
+  # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
+  stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
+  concatMapStrings = f: list: concatStrings (map f list);
+  concatStrings = builtins.concatStringsSep "";
+
+  # https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
+  optionalAttrs = cond: as: if cond then as else {};
+
+  # fetchTarball version that is compatible between all the versions of Nix
+  builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
+    let
+      inherit (builtins) lessThan nixVersion fetchTarball;
+    in
+      if lessThan nixVersion "1.12" then
+        fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
+      else
+        fetchTarball attrs;
+
+  # fetchurl version that is compatible between all the versions of Nix
+  builtins_fetchurl = { url, name ? null, sha256 }@attrs:
+    let
+      inherit (builtins) lessThan nixVersion fetchurl;
+    in
+      if lessThan nixVersion "1.12" then
+        fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
+      else
+        fetchurl attrs;
+
+  # Create the final "sources" from the config
+  mkSources = config:
+    mapAttrs (
+      name: spec:
+        if builtins.hasAttr "outPath" spec
+        then abort
+          "The values in sources.json should not have an 'outPath' attribute"
+        else
+          spec // { outPath = replace name (fetch config.pkgs name spec); }
+    ) config.sources;
+
+  # The "config" used by the fetchers
+  mkConfig =
+    { sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
+    , sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
+    , system ? builtins.currentSystem
+    , pkgs ? mkPkgs sources system
+    }: rec {
+      # The sources, i.e. the attribute set of spec name to spec
+      inherit sources;
+
+      # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
+      inherit pkgs;
+    };
+
+in
+mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }


=====================================
.gitlab/rel_eng/upload.sh
=====================================
@@ -0,0 +1,250 @@
+#!/usr/bin/env bash
+
+set -e
+
+# This is a script for preparing and uploading a release of GHC.
+#
+# Usage,
+#   1. Update $ver
+#   2. Set $SIGNING_KEY to your key id (prefixed with '=')
+#   3. Create a directory and place the source and binary tarballs there
+#   4. Run this script from that directory
+#
+# You can also invoke the script with an argument to perform only
+# a subset of the usual release,
+#
+#   upload.sh recompress             produce lzip tarballs from xz tarballs
+#
+#   upload.sh gen_hashes             generate signed hashes of the release
+#                                    tarballs
+#
+#   upload.sh prepare_docs           (deprecated) prepare the documentation directory
+#                                    (this should be unecessary as the script which
+#                                     fetches artifacts should create this folder from
+#                                     the doc-tarball job)
+#
+#   upload.sh upload_docs            upload documentation to hackage from the hackage_docs folder
+#
+#   upload.sh upload                 upload the tarballs and documentation
+#                                    to downloads.haskell.org
+#
+# Prerequisites: moreutils
+
+if [ -z "$SIGNING_KEY" ]; then
+    SIGNING_KEY="=Benjamin Gamari <ben at well-typed.com>"
+fi
+
+
+# Infer release name from directory name
+if [ -z "$rel_name" ]; then
+    rel_name="$(basename $(pwd))"
+fi
+
+# Infer version from tarball names
+if [ -z "$ver" ]; then
+    ver="$(ls ghc-*.tar.* | sed -ne 's/ghc-\([0-9]\+\.[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).\+/\1/p' | head -n1)"
+    if [ -z "$ver" ]; then echo "Failed to infer \$ver"; exit 1; fi
+fi
+
+host="gitlab-storage.haskell.org"
+
+usage() {
+    echo "Usage: [rel_name=<name>] ver=7.10.3-rc2 $0 <action>"
+    echo
+    echo "where,"
+    echo "  ver                gives the version number (e.g. the name of the tarballs, in the case of"
+    echo "                     a release candidate something like 7.10.3.20150820, otherwise just 7.10.3)"
+    echo "  rel_name           gives the release name (e.g. in the case of a release candidate 7.10.3-rc2"
+    echo "                     otherwise just 7.10.3)"
+    echo "and <action> is one of,"
+    echo "  [nothing]          do everything below"
+    echo "  recompress         produce lzip and gzip tarballs from xz tarballs"
+    echo "  gen_hashes         generated hashes of the release tarballs"
+    echo "  sign               sign hashes of the release tarballs"
+    echo "  prepare_docs       prepare the documentation directory"
+    echo "  upload_docs        upload documentation downloads.haskell.org"
+    echo "  upload             upload the tarballs and documentation to downloads.haskell.org"
+    echo "  purge_all          purge entire release from the CDN"
+    echo "  purge_file file    purge a given file from the CDN"
+    echo "  verify             verify the signatures in this directory"
+    echo
+}
+
+if [ -z "$ver" ]; then
+    usage
+    exit 1
+fi
+if [ -z "$rel_name" ]; then
+    rel_name="$ver"
+fi
+
+# returns the set of files that must have hashes generated.
+function hash_files() {
+    echo $(find -maxdepth 1 \
+         -iname '*.xz' \
+      -o -iname '*.lz' \
+      -o -iname '*.bz2' \
+      -o -iname '*.zip' \
+    )
+    echo $(find -maxdepth 1 -iname '*.patch')
+}
+
+function gen_hashes() {
+    echo -n "Hashing..."
+    sha1sum $(hash_files) >| SHA1SUMS &
+    sha256sum $(hash_files) >| SHA256SUMS &
+    wait
+    echo "done"
+}
+
+function sign() {
+    # Kill DISPLAY lest pinentry won't work
+    DISPLAY=
+    eval "$(gpg-agent --daemon --sh --pinentry-program $(which pinentry))"
+    for i in $(hash_files) SHA1SUMS SHA256SUMS; do
+        if [ -e $i -a -e $i.sig -a $i.sig -nt $i ]; then
+            echo "Skipping signing of $i"
+            continue
+        elif [ -e $i.sig ] && gpg2 --verify $i.sig; then
+            # Don't resign if current signature is valid
+            touch $i.sig
+            continue
+        fi
+        echo "Signing $i"
+        rm -f $i.sig
+        gpg2 --use-agent --detach-sign --local-user="$SIGNING_KEY" $i
+    done
+}
+
+function verify() {
+    if [ $(find -iname '*.sig' | wc -l) -eq 0 ]; then
+        echo "No signatures to verify"
+        return
+    fi
+
+    for i in *.sig; do
+        echo
+        echo Verifying $i
+        gpg2 --verify $i $(basename $i .sig)
+    done
+}
+
+function upload() {
+    verify
+    chmod ugo+r,o-w -R .
+    dir=$(echo $rel_name | sed s/-release//)
+    lftp -c " \
+	    open -u ghc: sftp://$host && \
+	    mirror -P20 -c --reverse --exclude=fetch-gitlab --exclude=out . ghc/$dir && \
+	    wait all;"
+    chmod ugo-w $(ls *.xz *.bz2 *.zip)
+}
+
+function purge_all() {
+    # Purge CDN cache
+    curl -X PURGE http://downloads.haskell.org/ghc/
+    curl -X PURGE http://downloads.haskell.org/~ghc/
+    curl -X PURGE http://downloads.haskell.org/ghc/$dir
+    curl -X PURGE http://downloads.haskell.org/ghc/$dir/
+    curl -X PURGE http://downloads.haskell.org/~ghc/$dir
+    curl -X PURGE http://downloads.haskell.org/~ghc/$dir/
+    for i in *; do
+        purge_file $i
+    done
+}
+
+function purge_file() {
+    curl -X PURGE http://downloads.haskell.org/~ghc/$rel_name/$i
+    curl -X PURGE http://downloads.haskell.org/~ghc/$rel_name/$i/
+    curl -X PURGE http://downloads.haskell.org/~ghc/$rel_name/$i/docs/
+    curl -X PURGE http://downloads.haskell.org/ghc/$rel_name/$i
+    curl -X PURGE http://downloads.haskell.org/ghc/$rel_name/$i/
+    curl -X PURGE http://downloads.haskell.org/ghc/$rel_name/$i/docs/
+}
+
+function prepare_docs() {
+    echo "THIS COMMAND IS DEPRECATED, THE DOCS FOLDER SHOULD BE PREPARED BY THE FETCH SCRIPT"
+    local tmp
+    rm -Rf docs
+    if [ -z "$GHC_TREE" ]; then
+        tmp="$(mktemp -d)"
+        tar -xf "ghc-$ver-src.tar.xz" -C "$tmp"
+        GHC_TREE="$tmp/ghc-$ver"
+    fi
+    mkdocs="$GHC_TREE/distrib/mkDocs/mkDocs"
+    if [ ! -e "$mkdocs" ]; then
+        echo "Couldn't find GHC mkDocs at $mkdocs."
+        echo "Perhaps you need to override GHC_TREE?"
+        rm -Rf "$tmp"
+        exit 1
+    fi
+    windows_bindist="$(ls ghc-$ver-x86_64-unknown-mingw32.tar.xz | head -n1)"
+    linux_bindist="$(ls ghc-$ver-x86_64-deb9-linux.tar.xz | head -n1)"
+    echo "Windows bindist: $windows_bindist"
+    echo "Linux bindist: $linux_bindist"
+    $ENTER_FHS_ENV $mkdocs $linux_bindist $windows_bindist
+    if [ -d "$tmp" ]; then rm -Rf "$tmp"; fi
+
+    mkdir -p docs/html
+    tar -Jxf "$linux_bindist"
+    cp -R "ghc-$ver/docs/users_guide/build-html/users_guide docs/html/users_guide"
+    #cp -R ghc-$ver/utils/haddock/doc/haddock docs/html/haddock
+    rm -R "ghc-$ver"
+
+    tar -Jxf docs/libraries.html.tar.xz -C docs/html
+    mv docs/index.html docs/html
+}
+
+function recompress() {
+    combine <(basename -s .xz *.xz) not <(basename -s .lz *.lz) | \
+        parallel 'echo "Recompressing {}.xz to {}.lz"; unxz -c {}.xz | lzip - -o {}.lz'
+
+    for darwin_bindist in $(ls ghc-*-darwin.tar.xz); do
+        local dest="$(basename $darwin_bindist .xz).bz2"
+        if [[ ! -f "$dest" ]]; then
+            echo "Recompressing Darwin bindist to bzip2..."
+            unxz -c "$darwin_bindist" | bzip2 > "$dest"
+        fi
+    done
+
+    for windows_bindist in $(ls ghc-*-mingw32*.tar.xz); do
+      local tmp="$(mktemp -d tmp.XXX)"
+      local dest="$(realpath $(basename $windows_bindist .tar.xz).zip)"
+      echo $dest
+      if [[ ! -f "$dest" ]]; then
+          echo "Recompressing Windows bindist to zip..."
+          tar -C "$tmp" -xf "$windows_bindist"
+          ls $tmp
+          (cd "$tmp"; zip -9 -r "$dest" *)
+      fi
+      rm -R "$tmp"
+    done
+}
+
+function upload_docs() {
+    local tmp="$(mktemp -d)"
+    tar -xf ghc-$ver-src.tar.xz -C "$tmp"
+    GHC_TREE="$tmp/ghc-$ver"
+    local args=$@
+    if [[ -n "$PUBLISH" ]]; then
+        echo "Publishing to Hackage..."
+        args+=( "--publish" )
+    fi
+    "$GHC_TREE/.gitlab/upload_ghc_libs.py" upload --docs=hackage_docs ${args[@]}
+}
+
+if [ "x$1" == "x" ]; then
+    recompress
+    gen_hashes
+    sign
+    if [ ! -d docs ]; then
+        prepare_docs || ( rm -R docs; exit 1 )
+    fi
+    if [ -d hackage_docs ]; then
+      upload_docs
+    fi
+    upload
+    purge_all
+else
+    $@
+fi


=====================================
.gitlab/upload_ghc_libs.py → .gitlab/rel_eng/upload_ghc_libs.py
=====================================



View it on GitLab: https://gitlab.haskell.org/ghc/ghc/-/compare/ba05a470cbea1b17beb11d511a60ab133c7184e8...db5cf3c97ab6c237de4c4b448aff391e5bae9952

-- 
View it on GitLab: https://gitlab.haskell.org/ghc/ghc/-/compare/ba05a470cbea1b17beb11d511a60ab133c7184e8...db5cf3c97ab6c237de4c4b448aff391e5bae9952
You're receiving this email because of your account on gitlab.haskell.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://mail.haskell.org/pipermail/ghc-commits/attachments/20230112/0f2a8d1d/attachment-0001.html>


More information about the ghc-commits mailing list