[Git][ghc/ghc][ghc-8.10] 13 commits: gitlab-ci: Backport CI rework from master
Ben Gamari
gitlab at gitlab.haskell.org
Fri Mar 20 19:20:26 UTC 2020
Ben Gamari pushed to branch ghc-8.10 at Glasgow Haskell Compiler / GHC
Commits:
de890c82 by Ben Gamari at 2020-03-18T13:53:53Z
gitlab-ci: Backport CI rework from master
- - - - -
671ac3f6 by Ben Gamari at 2020-03-18T15:57:52Z
users-guide: Fix :default: fields
- - - - -
00d25137 by Ben Gamari at 2020-03-18T17:42:15Z
release notes: Fix undefined references
- - - - -
a5caf1a2 by Ben Gamari at 2020-03-18T17:43:11Z
rts: Expose interface for configuring EventLogWriters
This exposes a set of interfaces from the GHC API for configuring
EventLogWriters. These can be used by consumers like
[ghc-eventlog-socket](https://github.com/bgamari/ghc-eventlog-socket).
(cherry picked from commit e43e6ece1418f84e50d572772394ab639a083e79)
- - - - -
5295fd5a by Ben Gamari at 2020-03-18T17:46:52Z
users-guide: Fix unknown link targets
- - - - -
544e24e1 by Ben Gamari at 2020-03-18T19:35:34Z
docs/compare-flags: Don't use python f-strings
- - - - -
24966161 by Ben Gamari at 2020-03-18T19:35:38Z
compare-flags: Don't rely on encoding flag of subprocess.check_output
Apparently it isn't supported by some slightly older Python versions.
- - - - -
0fd7e009 by Ben Gamari at 2020-03-18T19:35:42Z
compare-flags: Fix output
- - - - -
cd4990ca by Ben Gamari at 2020-03-18T22:15:45Z
Drop compare-flags
- - - - -
06ee06fb by Ben Gamari at 2020-03-19T01:32:17Z
Add test-metrics.sh from master
- - - - -
b9fb1ac4 by Ben Gamari at 2020-03-19T13:36:05Z
Backport get-win32-tarballs configure changes
- - - - -
8eb82c89 by Ben Gamari at 2020-03-20T03:45:02Z
Bump process submodule
- - - - -
06889a6f by Ben Gamari at 2020-03-20T03:45:28Z
gitlab-ci: Allow armv7 to fail
- - - - -
23 changed files:
- .gitlab-ci.yml
- + .gitlab/ci.sh
- − .gitlab/prepare-system.sh
- + .gitlab/test-metrics.sh
- − .gitlab/win32-init.sh
- configure.ac
- docs/users_guide/8.10.1-notes.rst
- − docs/users_guide/compare-flags.py
- docs/users_guide/runtime_control.rst
- docs/users_guide/using-warnings.rst
- hadrian/src/Rules/Documentation.hs
- includes/rts/EventLogWriter.h
- libraries/process
- + mk/get-win32-tarballs.py
- − mk/get-win32-tarballs.sh
- rts/Trace.c
- rts/eventlog/EventLog.c
- rts/eventlog/EventLog.h
- rts/eventlog/EventLogWriter.c
- + testsuite/tests/rts/InitEventLogging.hs
- + testsuite/tests/rts/InitEventLogging.stdout
- + testsuite/tests/rts/InitEventLogging_c.c
- testsuite/tests/rts/all.T
Changes:
=====================================
.gitlab-ci.yml
=====================================
@@ -5,17 +5,16 @@ variables:
DOCKER_REV: 408eff66aef6ca2b44446c694c5a56d6ca0460cc
# Sequential version number capturing the versions of all tools fetched by
- # .gitlab/win32-init.sh.
+ # .gitlab/ci.sh.
WINDOWS_TOOLCHAIN_VERSION: 1
# Disable shallow clones; they break our linting rules
GIT_DEPTH: 0
-before_script:
- - git submodule sync --recursive
- - git submodule update --init --recursive
- - git checkout .gitmodules
- - "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
+ # Overridden by individual jobs
+ CONFIGURE_ARGS: ""
+
+ GIT_SUBMODULE_STRATEGY: "recursive"
stages:
- lint # Source linting
@@ -36,7 +35,18 @@ stages:
- tags
- web
+.nightly: &nightly
+ only:
+ variables:
+ - $NIGHTLY
+ artifacts:
+ when: always
+ expire_in: 8 weeks
+
.release: &release
+ variables:
+ BUILD_FLAVOUR: "perf"
+ FLAVOUR: "perf"
artifacts:
when: always
expire_in: 1 year
@@ -125,8 +135,7 @@ typecheck-testsuite:
- base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
- "echo Linting submodule changes between $base..$CI_COMMIT_SHA"
- git submodule foreach git remote update
- # TODO: Fix submodule linter
- - submodchecker . $(git rev-list $base..$CI_COMMIT_SHA) || true
+ - submodchecker . $(git rev-list $base..$CI_COMMIT_SHA)
dependencies: []
tags:
- lint
@@ -170,11 +179,7 @@ lint-submods-branch:
tags:
- lint
script:
- - |
- grep TBA libraries/*/changelog.md && (
- echo "Error: Found \"TBA\"s in changelogs."
- exit 1
- ) || exit 0
+ - bash .gitlab/linters/check-changelogs.sh
lint-changelogs:
extends: .lint-changelogs
@@ -200,25 +205,10 @@ lint-release-changelogs:
variables:
FLAVOUR: "validate"
script:
- - cabal update
- - git clean -xdf && git submodule foreach git clean -xdf
- - .gitlab/prepare-system.sh
- - if [[ -d ./cabal-cache ]]; then cp -R ./.cabal-cache ~/.cabal-cache; fi
- - ./boot
- - ./configure $CONFIGURE_ARGS
- - hadrian/build.cabal.sh --flavour=$FLAVOUR -j`mk/detect-cpu-count.sh` --docs=no-sphinx binary-dist
- - mv _build/bindist/ghc*.tar.xz ghc.tar.xz
- - export TOP=$(pwd)
- - cd _build/bindist/ghc-*/ && ./configure --prefix=$TOP/_build/install && make install && cd ../../../
- - |
- # Prepare to push git notes.
- export METRICS_FILE=$CI_PROJECT_DIR/performance-metrics.tsv
- git config user.email "ben+ghc-ci at smart-cactus.org"
- git config user.name "GHC GitLab CI"
- - hadrian/build.cabal.sh --flavour=$FLAVOUR -j`mk/detect-cpu-count.sh` --docs=no-sphinx test --summary-junit=./junit.xml --test-compiler=$TOP/_build/install/bin/ghc || (.gitlab/push-test-metrics.sh && false)
- - |
- # Push git notes.
- .gitlab/push-test-metrics.sh
+ - .gitlab/ci.sh setup
+ - .gitlab/ci.sh configure
+ - .gitlab/ci.sh build_hadrian
+ - .gitlab/ci.sh test_hadrian
cache:
key: hadrian
paths:
@@ -243,6 +233,8 @@ lint-release-changelogs:
- git submodule update --init --recursive
- git checkout .gitmodules
- "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
+ after_script:
+ - .gitlab/ci.sh clean
tags:
- x86_64-linux
@@ -275,7 +267,7 @@ hadrian-ghc-in-ghci:
- cabal update
- cd hadrian; cabal new-build --project-file=ci.project; cd ..
- git clean -xdf && git submodule foreach git clean -xdf
- - .gitlab/prepare-system.sh
+ - .gitlab/ci.sh setup
- if [[ -d ./cabal-cache ]]; then cp -R ./.cabal-cache ~/.cabal-cache; fi
- ./boot
- ./configure $CONFIGURE_ARGS
@@ -294,27 +286,12 @@ hadrian-ghc-in-ghci:
<<: *only-default
variables:
TEST_TYPE: test
- before_script:
- - git clean -xdf && git submodule foreach git clean -xdf
+ MAKE_ARGS: "-Werror"
script:
- - ./boot
- - ./configure $CONFIGURE_ARGS
- - |
- THREADS=`mk/detect-cpu-count.sh`
- make V=0 -j$THREADS WERROR=-Werror
- - make binary-dist-prep TAR_COMP_OPTS="-1"
- - make test_bindist TEST_PREP=YES
- - |
- # Prepare to push git notes.
- METRICS_FILE=$CI_PROJECT_DIR/performance-metrics.tsv
- git config user.email "ben+ghc-ci at smart-cactus.org"
- git config user.name "GHC GitLab CI"
- - |
- THREADS=`mk/detect-cpu-count.sh`
- make $TEST_TYPE THREADS=$THREADS JUNIT_FILE=../../junit.xml METRICS_FILE=$METRICS_FILE || (METRICS_FILE=$METRICS_FILE .gitlab/push-test-metrics.sh && false)
- - |
- # Push git notes.
- METRICS_FILE=$METRICS_FILE .gitlab/push-test-metrics.sh
+ - .gitlab/ci.sh setup
+ - .gitlab/ci.sh configure
+ - .gitlab/ci.sh build_make
+ - .gitlab/ci.sh test_make
dependencies: []
artifacts:
reports:
@@ -325,6 +302,79 @@ hadrian-ghc-in-ghci:
- junit.xml
- performance-metrics.tsv
+#################################
+# x86_64-freebsd
+#################################
+
+.build-x86_64-freebsd:
+ extends: .validate
+ tags:
+ - x86_64-freebsd
+ allow_failure: true
+ variables:
+ # N.B. we use iconv from ports as I see linker errors when we attempt
+ # to use the "native" iconv embedded in libc as suggested by the
+ # porting guide [1].
+ # [1] https://www.freebsd.org/doc/en/books/porters-handbook/using-iconv.html)
+ CONFIGURE_ARGS: "--with-gmp-includes=/usr/local/include --with-gmp-libraries=/usr/local/lib --with-iconv-includes=/usr/local/include --with-iconv-libraries=/usr/local/lib"
+ GHC_VERSION: 8.6.3
+ CABAL_INSTALL_VERSION: 3.0.0.0
+ BIN_DIST_PREP_TAR_COMP: "ghc-x86_64-portbld-freebsd.tar.xz"
+ TEST_ENV: "x86_64-freebsd"
+ BUILD_FLAVOUR: "validate"
+ after_script:
+ - cp -Rf $HOME/.cabal cabal-cache
+ - .gitlab/ci.sh clean
+ artifacts:
+ when: always
+ expire_in: 2 week
+ cache:
+ key: "freebsd-$GHC_VERSION"
+ paths:
+ - cabal-cache
+ - toolchain
+
+# Disabled due to lack of builder capacity
+.validate-x86_64-freebsd:
+ extends: .build-x86_64-freebsd
+ stage: full-build
+
+nightly-x86_64-freebsd:
+ <<: *nightly
+ extends: .build-x86_64-freebsd
+ stage: full-build
+
+.build-x86_64-freebsd-hadrian:
+ extends: .validate-hadrian
+ stage: full-build
+ tags:
+ - x86_64-freebsd
+ allow_failure: true
+ variables:
+ CONFIGURE_ARGS: "--with-gmp-includes=/usr/local/include --with-gmp-libraries=/usr/local/lib --with-iconv-includes=/usr/local/include --with-iconv-libraries=/usr/local/lib"
+ HADRIAN_ARGS: "--docs=no-sphinx"
+ GHC_VERSION: 8.6.3
+ CABAL_INSTALL_VERSION: 3.0.0.0
+ BIN_DIST_PREP_TAR_COMP: "ghc-x86_64-portbld-freebsd.tar.xz"
+ TEST_ENV: "x86_64-freebsd-hadrian"
+ FLAVOUR: "validate"
+ after_script:
+ - cp -Rf $HOME/.cabal cabal-cache
+ - .gitlab/ci.sh clean
+ artifacts:
+ when: always
+ expire_in: 2 week
+ cache:
+ key: "freebsd-$GHC_VERSION"
+ paths:
+ - cabal-cache
+ - toolchain
+
+# Disabled due to lack of builder capacity
+.validate-x86_64-freebsd-hadrian:
+ extends: .build-x86_64-freebsd-hadrian
+ stage: full-build
+
#################################
# x86_64-darwin
#################################
@@ -335,28 +385,19 @@ validate-x86_64-darwin:
tags:
- x86_64-darwin
variables:
- GHC_VERSION: 8.8.3
- CABAL_INSTALL_VERSION: 2.4.1.0
+ GHC_VERSION: 8.6.5
+ CABAL_INSTALL_VERSION: 3.0.0.0
BIN_DIST_PREP_TAR_COMP: "ghc-x86_64-apple-darwin.tar.xz"
MACOSX_DEPLOYMENT_TARGET: "10.7"
# Only Sierra and onwards supports clock_gettime. See #12858
ac_cv_func_clock_gettime: "no"
- # Only Mojave and onwards supports utimensat. See #17895
- ac_cv_func_utimensat: "no"
LANG: "en_US.UTF-8"
- CONFIGURE_ARGS: --with-intree-gmp
+ CONFIGURE_ARGS: "--with-intree-gmp"
TEST_ENV: "x86_64-darwin"
- before_script:
- - git clean -xdf && git submodule foreach git clean -xdf
- - git submodule sync --recursive
- - git submodule update --init --recursive
- - git checkout .gitmodules
- - "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
-
- - bash .gitlab/darwin-init.sh
- - PATH="`pwd`/toolchain/bin:$PATH"
+ BUILD_FLAVOUR: "perf"
after_script:
- cp -Rf $HOME/.cabal cabal-cache
+ - .gitlab/ci.sh clean
artifacts:
when: always
expire_in: 2 week
@@ -373,33 +414,21 @@ validate-x86_64-darwin:
tags:
- x86_64-darwin
variables:
- GHC_VERSION: 8.8.3
+ GHC_VERSION: 8.6.3
MACOSX_DEPLOYMENT_TARGET: "10.7"
ac_cv_func_clock_gettime: "no"
LANG: "en_US.UTF-8"
CONFIGURE_ARGS: --with-intree-gmp
TEST_ENV: "x86_64-darwin-hadrian"
FLAVOUR: "validate"
- before_script:
- - git clean -xdf && git submodule foreach git clean -xdf
- - git submodule sync --recursive
- - git submodule update --init --recursive
- - git checkout .gitmodules
- - "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
-
- - bash .gitlab/darwin-init.sh
- - PATH="`pwd`/toolchain/bin:$PATH"
script:
- - cabal update
- - ./boot
- - ./configure $CONFIGURE_ARGS
- - hadrian/build.cabal.sh --flavour=$FLAVOUR -j`mk/detect-cpu-count.sh` --docs=no-sphinx binary-dist
- - mv _build/bindist/ghc*.tar.xz ghc.tar.xz
- - export TOP=$(pwd)
- - cd _build/bindist/ghc-*/ && ./configure --prefix=$TOP/_build/install && make install && cd ../../../
- - hadrian/build.cabal.sh --flavour=$FLAVOUR -j`mk/detect-cpu-count.sh` --docs=no-sphinx test --summary-junit=./junit.xml --test-compiler=$TOP/_build/install/bin/ghc
+ - .gitlab/ci.sh setup
+ - .gitlab/ci.sh configure
+ - .gitlab/ci.sh build_hadrian
+ - .gitlab/ci.sh test_hadrian
after_script:
- cp -Rf $HOME/.cabal cabal-cache
+ - .gitlab/ci.sh clean
artifacts:
when: always
expire_in: 2 week
@@ -413,19 +442,15 @@ validate-x86_64-darwin:
extends: .validate
tags:
- x86_64-linux
+ variables:
+ BUILD_FLAVOUR: "perf"
before_script:
- - git clean -xdf && git submodule foreach git clean -xdf
- - git submodule sync --recursive
- - git submodule update --init --recursive
- - git checkout .gitmodules
- - "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
# Build hyperlinked sources for documentation when building releases
- |
if [[ -n "$CI_COMMIT_TAG" ]]; then
- echo "EXTRA_HADDOCK_OPTS += --hyperlinked-source --quickjump" >> mk/build.mk
+ HADDOCK_HYPERLINKED_SOURCES=1
fi
- - .gitlab/prepare-system.sh
# workaround for docker permissions
- sudo chown ghc:ghc -R .
after_script:
@@ -460,14 +485,10 @@ validate-aarch64-linux-deb9:
expire_in: 2 week
nightly-aarch64-linux-deb9:
+ <<: *nightly
extends: .build-aarch64-linux-deb9
- artifacts:
- expire_in: 2 year
variables:
TEST_TYPE: slowtest
- only:
- variables:
- - $NIGHTLY
#################################
# armv7-linux-deb9
@@ -477,7 +498,6 @@ nightly-aarch64-linux-deb9:
extends: .validate-linux
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/armv7-linux-deb9:$DOCKER_REV"
- allow_failure: true
variables:
TEST_ENV: "armv7-linux-deb9"
BIN_DIST_PREP_TAR_COMP: "ghc-armv7-linux-deb9.tar.xz"
@@ -489,19 +509,16 @@ nightly-aarch64-linux-deb9:
validate-armv7-linux-deb9:
extends: .build-armv7-linux-deb9
+ allow_failure: true
artifacts:
when: always
expire_in: 2 week
nightly-armv7-linux-deb9:
+ <<: *nightly
extends: .build-armv7-linux-deb9
- artifacts:
- expire_in: 2 year
variables:
TEST_TYPE: slowtest
- only:
- variables:
- - $NIGHTLY
#################################
# i386-linux-deb9
@@ -524,15 +541,10 @@ validate-i386-linux-deb9:
expire_in: 2 week
nightly-i386-linux-deb9:
+ <<: *nightly
extends: .build-i386-linux-deb9
variables:
TEST_TYPE: slowtest
- artifacts:
- when: always
- expire_in: 2 week
- only:
- variables:
- - $NIGHTLY
#################################
# x86_64-linux-deb9
@@ -561,20 +573,16 @@ release-x86_64-linux-deb9:
stage: full-build
nightly-x86_64-linux-deb9:
+ <<: *nightly
extends: .build-x86_64-linux-deb9
stage: full-build
- artifacts:
- expire_in: 2 year
variables:
TEST_TYPE: slowtest
- only:
- variables:
- - $NIGHTLY
# N.B. Has DEBUG assertions enabled in stage2
validate-x86_64-linux-deb9-debug:
extends: .build-x86_64-linux-deb9
- stage: build
+ stage: full-build
variables:
BUILD_FLAVOUR: validate
# Ensure that stage2 also has DEBUG enabled
@@ -583,7 +591,7 @@ validate-x86_64-linux-deb9-debug:
BUILD_SPHINX_PDF: "YES"
TEST_TYPE: slowtest
TEST_ENV: "x86_64-linux-deb9-debug"
- BIN_DIST_PREP_COMP: "ghc-x86_64-deb9-linux-debug.tar.xz"
+ BIN_DIST_PREP_TAR_COMP: "ghc-x86_64-deb9-linux-debug.tar.xz"
artifacts:
when: always
expire_in: 2 week
@@ -597,39 +605,34 @@ validate-x86_64-linux-deb9-debug:
TEST_ENV: "x86_64-linux-deb9-llvm"
nightly-x86_64-linux-deb9-llvm:
+ <<: *nightly
extends: .build-x86_64-linux-deb9
stage: full-build
variables:
BUILD_FLAVOUR: perf-llvm
TEST_ENV: "x86_64-linux-deb9-llvm"
- only:
- variables:
- - $NIGHTLY
validate-x86_64-linux-deb9-integer-simple:
extends: .build-x86_64-linux-deb9
stage: full-build
variables:
+ BUILD_FLAVOUR: validate
INTEGER_LIBRARY: integer-simple
- TEST_ENV: "x86_64-linux-deb9-integer-simple"
+ TEST_ENV: "x86_64-linux-deb9-integer-simple-validate"
BIN_DIST_PREP_TAR_COMP: "ghc-x86_64-deb9-linux-integer-simple.tar.xz"
nightly-x86_64-linux-deb9-integer-simple:
+ <<: *nightly
extends: .build-x86_64-linux-deb9
stage: full-build
variables:
INTEGER_LIBRARY: integer-simple
TEST_ENV: "x86_64-linux-deb9-integer-simple"
TEST_TYPE: slowtest
- artifacts:
- expire_in: 2 year
- only:
- variables:
- - $NIGHTLY
validate-x86_64-linux-deb9-dwarf:
extends: .build-x86_64-linux-deb9
- stage: build
+ stage: full-build
variables:
CONFIGURE_ARGS: "--enable-dwarf-unwind"
BUILD_FLAVOUR: dwarf
@@ -656,14 +659,10 @@ validate-x86_64-linux-deb9-dwarf:
stage: full-build
nightly-x86_64-linux-deb10:
+ <<: *nightly
extends: .build-x86_64-linux-deb10
- artifacts:
- expire_in: 2 weeks
variables:
TEST_TYPE: slowtest
- only:
- variables:
- - $NIGHTLY
release-x86_64-linux-deb10:
<<: *release
@@ -698,19 +697,21 @@ release-x86_64-linux-deb8:
# x86_64-linux-alpine
#################################
-.build-x86_64-linux-alpine:
- extends: .validate-linux
+.build-x86_64-linux-alpine-hadrian:
+ extends: .validate-linux-hadrian
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-alpine:$DOCKER_REV"
# There are currently a few failing tests
allow_failure: true
variables:
- BUILD_SPHINX_PDF: "NO"
TEST_ENV: "x86_64-linux-alpine"
BIN_DIST_PREP_TAR_COMP: "ghc-x86_64-alpine-linux.tar.xz"
# Can't use ld.gold due to #13958.
CONFIGURE_ARGS: "--disable-ld-override"
- INTEGER_LIBRARY: "integer-simple"
+ HADRIAN_ARGS: "--docs=no-sphinx"
+ # encoding004 due to lack of locale support
+ # T10458 due to fact that dynamic linker tries to reload libAS
+ BROKEN_TESTS: "encoding004 T10458"
cache:
key: linux-x86_64-alpine
artifacts:
@@ -719,13 +720,11 @@ release-x86_64-linux-deb8:
release-x86_64-linux-alpine:
<<: *release
- extends: .build-x86_64-linux-alpine
+ extends: .build-x86_64-linux-alpine-hadrian
nightly-x86_64-linux-alpine:
- extends: .build-x86_64-linux-alpine
- only:
- variables:
- - $NIGHTLY
+ <<: *nightly
+ extends: .build-x86_64-linux-alpine-hadrian
#################################
# x86_64-linux-centos7
@@ -775,58 +774,49 @@ validate-x86_64-linux-fedora27:
.build-windows:
<<: *only-default
+ # For the reasons given in #17777 this build isn't reliable.
+ allow_failure: true
before_script:
- git clean -xdf
- - git submodule foreach git clean -xdf
-
- # Use a local temporary directory to ensure that concurrent builds don't
- # interfere with one another
- - |
- mkdir tmp
- set TMP=%cd%\tmp
- set TEMP=%cd%\tmp
- - set PATH=C:\msys64\usr\bin;%PATH%
- - git submodule sync --recursive
- - git submodule update --init --recursive
- - git checkout .gitmodules
- - "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
- - bash .gitlab/win32-init.sh
+ # Setup toolchain
+ - bash .gitlab/ci.sh setup
after_script:
- - rd /s /q tmp
- - robocopy /np /nfl /ndl /e "%APPDATA%\cabal" cabal-cache
- - bash -c 'make clean || true'
+ - |
+ Copy-Item -Recurse -Path $Env:APPDATA\cabal -Destination cabal-cache
+ - bash .gitlab/ci.sh clean
dependencies: []
variables:
- FORCE_SYMLINKS: 1
+ #FORCE_SYMLINKS: 1
LANG: "en_US.UTF-8"
SPHINXBUILD: "/mingw64/bin/sphinx-build.exe"
+ CABAL_INSTALL_VERSION: 3.0.0.0
+ GHC_VERSION: "8.8.3"
cache:
paths:
- cabal-cache
- - ghc-8.6.5
+ - toolchain
- ghc-tarballs
.build-windows-hadrian:
extends: .build-windows
stage: full-build
variables:
- GHC_VERSION: "8.8.3"
FLAVOUR: "validate"
+ # skipping perf tests for now since we build a quick-flavoured GHC,
+ # which might result in some broken perf tests?
+ HADRIAN_ARGS: "--docs=no-sphinx --skip-perf"
+
# due to #16574 this currently fails
allow_failure: true
+
script:
- - |
- python boot
- bash -c './configure --enable-tarballs-autodownload GHC=`pwd`/toolchain/bin/ghc HAPPY=`pwd`/toolchain/bin/happy ALEX=`pwd`/toolchain/bin/alex'
- - bash -c "PATH=`pwd`/toolchain/bin:$PATH hadrian/build.cabal.sh --flavour=$FLAVOUR -j`mk/detect-cpu-count.sh` --flavour=Quick --docs=no-sphinx binary-dist"
- - mv _build/bindist/ghc*.tar.xz ghc.tar.xz
- - bash -c "export TOP=$(pwd); cd _build/bindist/ghc-*/ && PATH=$TOP/toolchain/bin:$PATH ./configure --prefix=$TOP/_build/install && make install && cd ../../../"
- - bash -c "export TOP=$(pwd); PATH=$TOP/toolchain/bin:$PATH hadrian/build.cabal.sh --flavour=$FLAVOUR -j`mk/detect-cpu-count.sh` --flavour=quick test --summary-junit=./junit.xml --skip-perf --test-compiler=$TOP/_build/install/bin/ghc"
- # skipping perf tests for now since we build a quick-flavoured GHC,
- # which might result in some broken perf tests?
+ - bash .gitlab/ci.sh configure
+ - bash .gitlab/ci.sh build_hadrian
+ - bash .gitlab/ci.sh test_hadrian
tags:
- - x86_64-windows
+ - new-x86_64-windows
+ - test
artifacts:
reports:
junit: junit.xml
@@ -845,34 +835,27 @@ validate-x86_64-windows-hadrian:
key: "x86_64-windows-hadrian-$WINDOWS_TOOLCHAIN_VERSION"
nightly-i386-windows-hadrian:
+ <<: *nightly
extends: .build-windows-hadrian
variables:
MSYSTEM: MINGW32
TEST_ENV: "i386-windows-hadrian"
- only:
- variables:
- - $NIGHTLY
cache:
key: "i386-windows-hadrian-$WINDOWS_TOOLCHAIN_VERSION"
.build-windows-make:
extends: .build-windows
stage: full-build
- allow_failure: true
variables:
BUILD_FLAVOUR: "quick"
- GHC_VERSION: "8.8.3"
BIN_DIST_PREP_TAR_COMP: "ghc-x86_64-mingw32.tar.xz"
script:
- - |
- python boot
- bash -c './configure --enable-tarballs-autodownload GHC=`pwd`/toolchain/bin/ghc HAPPY=`pwd`/toolchain/bin/happy ALEX=`pwd`/toolchain/bin/alex $CONFIGURE_ARGS'
- - bash -c "PATH=`pwd`/toolchain/bin:$PATH make -j`mk/detect-cpu-count.sh`"
- - bash -c "PATH=`pwd`/toolchain/bin:$PATH make binary-dist-prep TAR_COMP_OPTS=-1"
- - bash -c "PATH=`pwd`/toolchain/bin:$PATH make test_bindist TEST_PREP=YES"
- - bash -c 'make V=0 test PYTHON=/mingw64/bin/python3 THREADS=`mk/detect-cpu-count.sh` JUNIT_FILE=../../junit.xml'
+ - bash .gitlab/ci.sh configure
+ - bash .gitlab/ci.sh build_make
+ - bash .gitlab/ci.sh test_make
tags:
- - x86_64-windows
+ - new-x86_64-windows
+ - test
artifacts:
when: always
expire_in: 2 week
@@ -880,77 +863,69 @@ nightly-i386-windows-hadrian:
junit: junit.xml
paths:
# N.B. variable interpolation apparently doesn't work on Windows so
- # this can't be $BIN_DIST_TAR_COMP
+ # this can't be $BIN_DIST_PREP_TAR_COMP
- "ghc-x86_64-mingw32.tar.xz"
- junit.xml
-validate-x86_64-windows:
+.build-x86_64-windows-make:
extends: .build-windows-make
variables:
MSYSTEM: MINGW64
- CONFIGURE_ARGS: "--target=x86_64-unknown-mingw32"
TEST_ENV: "x86_64-windows"
cache:
key: "x86_64-windows-$WINDOWS_TOOLCHAIN_VERSION"
+validate-x86_64-windows:
+ extends: .build-x86_64-windows-make
+
nightly-x86_64-windows:
- extends: .build-windows-make
+ <<: *nightly
+ extends: .build-x86_64-windows-make
stage: full-build
variables:
BUILD_FLAVOUR: "validate"
- MSYSTEM: MINGW64
- CONFIGURE_ARGS: "--target=x86_64-unknown-mingw32"
- only:
- variables:
- - $NIGHTLY
- cache:
- key: "x86_64-windows-$WINDOWS_TOOLCHAIN_VERSION"
# Normal Windows validate builds are profiled; that won't do for releases.
release-x86_64-windows:
<<: *release
extends: validate-x86_64-windows
variables:
- MSYSTEM: MINGW64
BUILD_FLAVOUR: "perf"
- CONFIGURE_ARGS: "--target=x86_64-unknown-mingw32"
- TEST_ENV: "x86_64-windows"
-
+ #
release-x86_64-windows-integer-simple:
<<: *release
extends: validate-x86_64-windows
variables:
INTEGER_LIBRARY: integer-simple
BUILD_FLAVOUR: "perf"
- CONFIGURE_ARGS: "--target=x86_64-unknown-mingw32"
- TEST_ENV: "x86_64-windows"
-release-i386-windows:
- <<: *release
+
+.build-i386-windows-make:
extends: .build-windows-make
variables:
MSYSTEM: MINGW32
- BUILD_FLAVOUR: "perf"
- CONFIGURE_ARGS: "--target=i386-unknown-mingw32"
# Due to #15934
BUILD_PROF_LIBS: "NO"
TEST_ENV: "i386-windows"
+ # Due to #17736
+ allow_failure: true
cache:
key: "i386-windows-$WINDOWS_TOOLCHAIN_VERSION"
-nightly-i386-windows:
- extends: .build-windows-make
- only:
- variables:
- - $NIGHTLY
+validate-i386-windows:
+ extends: .build-i386-windows-make
variables:
- MSYSTEM: MINGW32
- CONFIGURE_ARGS: "--target=i386-unknown-mingw32"
- # Due to #15934
- BUILD_PROF_LIBS: "NO"
- TEST_ENV: "i386-windows"
- cache:
- key: "i386-windows-$WINDOWS_TOOLCHAIN_VERSION"
+ BUILD_FLAVOUR: "perf"
+
+release-i386-windows:
+ <<: *release
+ extends: .build-i386-windows-make
+ variables:
+ BUILD_FLAVOUR: "perf"
+
+nightly-i386-windows:
+ <<: *nightly
+ extends: .build-i386-windows-make
############################################################
# Cleanup
@@ -1006,7 +981,7 @@ doc-tarball:
- validate-x86_64-linux-deb9-debug
- validate-x86_64-windows
variables:
- LINUX_BINDIST: "ghc-x86_64-deb9-linux.tar.xz"
+ LINUX_BINDIST: "ghc-x86_64-deb9-linux-debug.tar.xz"
WINDOWS_BINDIST: "ghc-x86_64-mingw32.tar.xz"
# Due to Windows allow_failure
allow_failure: true
@@ -1046,7 +1021,7 @@ source-tarball:
- ghc-*.tar.xz
- version
script:
- - mk/get-win32-tarballs.sh download all
+ - python3 mk/get-win32-tarballs.py download all
- ./boot
- ./configure
- make sdist
@@ -1089,10 +1064,8 @@ hackage-label:
- $CI_MERGE_REQUEST_LABELS =~ /.*user-facing.*/
nightly-hackage:
+ <<: *nightly
extends: .hackage
- only:
- variables:
- - $NIGHTLY
############################################################
# Nofib testing
=====================================
.gitlab/ci.sh
=====================================
@@ -0,0 +1,453 @@
+#!/usr/bin/env bash
+# shellcheck disable=SC2230
+
+# This is the primary driver of the GitLab CI infrastructure.
+
+set -e -o pipefail
+
+# Configuration:
+hackage_index_state="@1579718451"
+
+# Colors
+BLACK="0;30"
+GRAY="1;30"
+RED="0;31"
+LT_RED="1;31"
+BROWN="0;33"
+LT_BROWN="1;33"
+GREEN="0;32"
+LT_GREEN="1;32"
+BLUE="0;34"
+LT_BLUE="1;34"
+PURPLE="0;35"
+LT_PURPLE="1;35"
+CYAN="0;36"
+LT_CYAN="1;36"
+WHITE="1;37"
+LT_GRAY="0;37"
+
+# GitLab Pipelines log section delimiters
+# https://gitlab.com/gitlab-org/gitlab-foss/issues/14664
+start_section() {
+ name="$1"
+ echo -e "section_start:$(date +%s):$name\015\033[0K"
+}
+
+end_section() {
+ name="$1"
+ echo -e "section_end:$(date +%s):$name\015\033[0K"
+}
+
+echo_color() {
+ local color="$1"
+ local msg="$2"
+ echo -e "\033[${color}m${msg}\033[0m"
+}
+
+error() { echo_color "${RED}" "$1"; }
+warn() { echo_color "${LT_BROWN}" "$1"; }
+info() { echo_color "${LT_BLUE}" "$1"; }
+
+fail() { error "error: $1"; exit 1; }
+
+function run() {
+ info "Running $*..."
+ "$@" || ( error "$* failed"; return 1; )
+}
+
+TOP="$(pwd)"
+
+function mingw_init() {
+ case "$MSYSTEM" in
+ MINGW32)
+ triple="i386-unknown-mingw32"
+ boot_triple="i386-unknown-mingw32" # triple of bootstrap GHC
+ ;;
+ MINGW64)
+ triple="x86_64-unknown-mingw32"
+ boot_triple="x86_64-unknown-mingw32" # triple of bootstrap GHC
+ ;;
+ *)
+ fail "win32-init: Unknown MSYSTEM $MSYSTEM"
+ ;;
+ esac
+
+ # Bring mingw toolchain into PATH.
+ # This is extracted from /etc/profile since this script inexplicably fails to
+ # run under gitlab-runner.
+ # shellcheck disable=SC1091
+ source /etc/msystem
+ MINGW_MOUNT_POINT="${MINGW_PREFIX}"
+ PATH="$MINGW_MOUNT_POINT/bin:$PATH"
+
+ # We always use mingw64 Python to avoid path length issues like #17483.
+ export PYTHON="/mingw64/bin/python3"
+}
+
+# This will contain GHC's local native toolchain
+toolchain="$TOP/toolchain"
+mkdir -p "$toolchain/bin"
+PATH="$toolchain/bin:$PATH"
+
+export METRICS_FILE="$CI_PROJECT_DIR/performance-metrics.tsv"
+
+cores="$(mk/detect-cpu-count.sh)"
+
+# Use a local temporary directory to ensure that concurrent builds don't
+# interfere with one another
+mkdir -p "$TOP/tmp"
+export TMP="$TOP/tmp"
+export TEMP="$TOP/tmp"
+
+function darwin_setup() {
+ # It looks like we already have python2 here and just installing python3
+ # does not work.
+ brew upgrade python
+ brew install ghc cabal-install ncurses gmp
+
+ pip3 install sphinx
+ # PDF documentation disabled as MacTeX apparently doesn't include xelatex.
+ #brew cask install mactex
+}
+
+function show_tool() {
+ local tool="$1"
+ info "$tool = ${!tool}"
+ ${!tool} --version
+}
+
+function set_toolchain_paths() {
+ needs_toolchain=1
+ case "$(uname)" in
+ Linux) needs_toolchain="" ;;
+ *) ;;
+ esac
+
+ if [[ -n "$needs_toolchain" ]]; then
+ # These are populated by setup_toolchain
+ GHC="$toolchain/bin/ghc$exe"
+ CABAL="$toolchain/bin/cabal$exe"
+ HAPPY="$toolchain/bin/happy$exe"
+ ALEX="$toolchain/bin/alex$exe"
+ else
+ GHC="$(which ghc)"
+ CABAL="/usr/local/bin/cabal"
+ HAPPY="$HOME/.cabal/bin/happy"
+ ALEX="$HOME/.cabal/bin/alex"
+ fi
+ export GHC
+ export CABAL
+ export HAPPY
+ export ALEX
+
+ # FIXME: Temporarily use ghc from ports
+ case "$(uname)" in
+ FreeBSD) GHC="/usr/local/bin/ghc" ;;
+ *) ;;
+ esac
+}
+
+# Extract GHC toolchain
+function setup() {
+ if [ -d "$TOP/cabal-cache" ]; then
+ info "Extracting cabal cache..."
+ mkdir -p "$cabal_dir"
+ cp -Rf cabal-cache/* "$cabal_dir"
+ fi
+
+ if [[ -n "$needs_toolchain" ]]; then
+ setup_toolchain
+ fi
+ case "$(uname)" in
+ Darwin) darwin_setup ;;
+ *) ;;
+ esac
+
+ # Make sure that git works
+ git config user.email "ghc-ci at gitlab-haskell.org"
+ git config user.name "GHC GitLab CI"
+
+ info "====================================================="
+ info "Toolchain versions"
+ info "====================================================="
+ show_tool GHC
+ show_tool CABAL
+ show_tool HAPPY
+ show_tool ALEX
+}
+
+function fetch_ghc() {
+ local v="$GHC_VERSION"
+ if [[ -z "$v" ]]; then
+ fail "GHC_VERSION is not set"
+ fi
+
+ if [ ! -e "$GHC" ]; then
+ start_section "fetch GHC"
+ url="https://downloads.haskell.org/~ghc/${GHC_VERSION}/ghc-${GHC_VERSION}-${boot_triple}.tar.xz"
+ info "Fetching GHC binary distribution from $url..."
+ curl "$url" > ghc.tar.xz || fail "failed to fetch GHC binary distribution"
+ tar -xJf ghc.tar.xz || fail "failed to extract GHC binary distribution"
+ case "$(uname)" in
+ MSYS_*|MINGW*)
+ cp -r "ghc-${GHC_VERSION}"/* "$toolchain"
+ ;;
+ *)
+ pushd "ghc-${GHC_VERSION}"
+ ./configure --prefix="$toolchain"
+ "$MAKE" install
+ popd
+ ;;
+ esac
+ rm -Rf "ghc-${GHC_VERSION}" ghc.tar.xz
+ end_section "fetch GHC"
+ fi
+
+}
+
+function fetch_cabal() {
+ local v="$CABAL_INSTALL_VERSION"
+ if [[ -z "$v" ]]; then
+ fail "CABAL_INSTALL_VERSION is not set"
+ fi
+
+ if [ ! -e "$CABAL" ]; then
+ start_section "fetch GHC"
+ case "$(uname)" in
+ # N.B. Windows uses zip whereas all others use .tar.xz
+ MSYS_*|MINGW*)
+ case "$MSYSTEM" in
+ MINGW32) cabal_arch="i386" ;;
+ MINGW64) cabal_arch="x86_64" ;;
+ *) fail "unknown MSYSTEM $MSYSTEM" ;;
+ esac
+ url="https://downloads.haskell.org/~cabal/cabal-install-$v/cabal-install-$v-$cabal_arch-unknown-mingw32.zip"
+ info "Fetching cabal binary distribution from $url..."
+ curl "$url" > "$TMP/cabal.zip"
+ unzip "$TMP/cabal.zip"
+ mv cabal.exe "$CABAL"
+ ;;
+ *)
+ local base_url="https://downloads.haskell.org/~cabal/cabal-install-$v/"
+ case "$(uname)" in
+ Darwin) cabal_url="$base_url/cabal-install-$v-x86_64-apple-darwin17.7.0.tar.xz" ;;
+ FreeBSD)
+ #cabal_url="$base_url/cabal-install-$v-x86_64-portbld-freebsd.tar.xz" ;;
+ cabal_url="http://home.smart-cactus.org/~ben/ghc/cabal-install-3.0.0.0-x86_64-portbld-freebsd.tar.xz" ;;
+ *) fail "don't know where to fetch cabal-install for $(uname)"
+ esac
+ echo "Fetching cabal-install from $cabal_url"
+ curl "$cabal_url" > cabal.tar.xz
+ tar -xJf cabal.tar.xz
+ mv cabal "$toolchain/bin"
+ ;;
+ esac
+ end_section "fetch GHC"
+ fi
+}
+
+# For non-Docker platforms we prepare the bootstrap toolchain
+# here. For Docker platforms this is done in the Docker image
+# build.
+function setup_toolchain() {
+ fetch_ghc
+ fetch_cabal
+ cabal_install="$CABAL v2-install --index-state=$hackage_index_state --installdir=$toolchain/bin"
+ # Avoid symlinks on Windows
+ case "$(uname)" in
+ MSYS_*|MINGW*) cabal_install="$cabal_install --install-method=copy" ;;
+ *) ;;
+ esac
+
+ if [ ! -e "$HAPPY" ]; then
+ info "Building happy..."
+ cabal update
+ $cabal_install happy
+ fi
+
+ if [ ! -e "$ALEX" ]; then
+ info "Building alex..."
+ cabal update
+ $cabal_install alex
+ fi
+}
+
+function cleanup_submodules() {
+ start_section "clean submodules"
+ info "Cleaning submodules..."
+ # On Windows submodules can inexplicably get into funky states where git
+ # believes that the submodule is initialized yet its associated repository
+ # is not valid. Avoid failing in this case with the following insanity.
+ git submodule sync --recursive || git submodule deinit --force --all
+ git submodule update --init --recursive
+ git submodule foreach git clean -xdf
+ end_section "clean submodules"
+}
+
+function prepare_build_mk() {
+ if [[ -z "$BUILD_FLAVOUR" ]]; then fail "BUILD_FLAVOUR is not set"; fi
+ if [[ -z ${BUILD_SPHINX_HTML:-} ]]; then BUILD_SPHINX_HTML=YES; fi
+ if [[ -z ${BUILD_SPHINX_PDF:-} ]]; then BUILD_SPHINX_PDF=YES; fi
+ if [[ -z ${INTEGER_LIBRARY:-} ]]; then INTEGER_LIBRARY=integer-gmp; fi
+
+ cat > mk/build.mk <<EOF
+V=1
+HADDOCK_DOCS=YES
+LATEX_DOCS=YES
+HSCOLOUR_SRCS=YES
+BUILD_SPHINX_HTML=$BUILD_SPHINX_HTML
+BUILD_SPHINX_PDF=$BUILD_SPHINX_PDF
+BeConservative=YES
+INTEGER_LIBRARY=$INTEGER_LIBRARY
+XZ_CMD=$XZ
+
+BuildFlavour=$BUILD_FLAVOUR
+ifneq "\$(BuildFlavour)" ""
+include mk/flavours/\$(BuildFlavour).mk
+endif
+GhcLibHcOpts+=-haddock
+EOF
+
+ if [ -n "$HADDOCK_HYPERLINKED_SOURCES" ]; then
+ echo "EXTRA_HADDOCK_OPTS += --hyperlinked-source --quickjump" >> mk/build.mk
+ fi
+
+ case "$(uname)" in
+ Darwin) echo "libraries/integer-gmp_CONFIGURE_OPTS += --configure-option=--with-intree-gmp" >> mk/build.mk ;;
+ *) ;;
+ esac
+
+ info "build.mk is:"
+ cat mk/build.mk
+}
+
+function configure() {
+ start_section "booting"
+ run python3 boot
+ end_section "booting"
+
+ local target_args=""
+ if [[ -n "$triple" ]]; then
+ target_args="--target=$triple"
+ fi
+
+ start_section "configuring"
+ run ./configure \
+ --enable-tarballs-autodownload \
+ $target_args \
+ $CONFIGURE_ARGS \
+ GHC="$GHC" \
+ HAPPY="$HAPPY" \
+ ALEX="$ALEX" \
+ || ( cat config.log; fail "configure failed" )
+ end_section "configuring"
+}
+
+function build_make() {
+ prepare_build_mk
+ if [[ -z "$BIN_DIST_PREP_TAR_COMP" ]]; then
+ fail "BIN_DIST_PREP_TAR_COMP is not set"
+ fi
+
+ echo "include mk/flavours/${BUILD_FLAVOUR}.mk" > mk/build.mk
+ echo 'GhcLibHcOpts+=-haddock' >> mk/build.mk
+ run "$MAKE" -j"$cores" $MAKE_ARGS
+ run "$MAKE" -j"$cores" binary-dist-prep TAR_COMP_OPTS=-1
+ ls -lh "$BIN_DIST_PREP_TAR_COMP"
+}
+
+function fetch_perf_notes() {
+ info "Fetching perf notes..."
+ "$TOP/.gitlab/test-metrics.sh" pull
+}
+
+function push_perf_notes() {
+ info "Pushing perf notes..."
+ "$TOP/.gitlab/test-metrics.sh" push
+}
+
+function test_make() {
+ run "$MAKE" test_bindist TEST_PREP=YES
+ run "$MAKE" V=0 test \
+ THREADS="$cores" \
+ JUNIT_FILE=../../junit.xml
+}
+
+function build_hadrian() {
+ if [ -z "$FLAVOUR" ]; then
+ fail "FLAVOUR not set"
+ fi
+
+ run_hadrian binary-dist
+
+ mv _build/bindist/ghc*.tar.xz ghc.tar.xz
+}
+
+function test_hadrian() {
+ cd _build/bindist/ghc-*/
+ run ./configure --prefix="$TOP"/_build/install
+ run "$MAKE" install
+ cd ../../../
+
+ run_hadrian \
+ test \
+ --summary-junit=./junit.xml \
+ --test-compiler="$TOP"/_build/install/bin/ghc
+}
+
+function clean() {
+ rm -R tmp
+ run "$MAKE" --quiet clean || true
+ run rm -Rf _build
+}
+
+function run_hadrian() {
+ run hadrian/build.cabal.sh \
+ --flavour="$FLAVOUR" \
+ -j"$cores" \
+ $HADRIAN_ARGS \
+ $@
+}
+
+# A convenience function to allow debugging in the CI environment.
+function shell() {
+ local cmd=$@
+ if [ -z "$cmd" ]; then
+ cmd="bash -i"
+ fi
+ run $cmd
+}
+
+# Determine Cabal data directory
+case "$(uname)" in
+ MSYS_*|MINGW*) exe=".exe"; cabal_dir="$APPDATA/cabal" ;;
+ *) cabal_dir="$HOME/.cabal"; exe="" ;;
+esac
+
+# Platform-specific environment initialization
+MAKE="make"
+case "$(uname)" in
+ MSYS_*|MINGW*) mingw_init ;;
+ Darwin) boot_triple="x86_64-apple-darwin" ;;
+ FreeBSD)
+ boot_triple="x86_64-portbld-freebsd"
+ MAKE="gmake"
+ ;;
+ Linux) ;;
+ *) fail "uname $(uname) is not supported" ;;
+esac
+
+set_toolchain_paths
+
+case $1 in
+ setup) setup && cleanup_submodules ;;
+ configure) configure ;;
+ build_make) build_make ;;
+ test_make) fetch_perf_notes; test_make; push_perf_notes ;;
+ build_hadrian) build_hadrian ;;
+ test_hadrian) fetch_perf_notes; test_hadrian; push_perf_notes ;;
+ run_hadrian) run_hadrian $@ ;;
+ clean) clean ;;
+ shell) shell $@ ;;
+ *) fail "unknown mode $1" ;;
+esac
=====================================
.gitlab/prepare-system.sh deleted
=====================================
@@ -1,99 +0,0 @@
-#!/usr/bin/env bash
-# vim: sw=2 et
-set -euo pipefail
-
-fail() {
- echo "ERROR: $*" >&2
- exit 1
-}
-
-hackage_index_state="@1522046735"
-
-if [[ -z ${BUILD_SPHINX_HTML:-} ]]; then BUILD_SPHINX_HTML=YES; fi
-if [[ -z ${BUILD_SPHINX_PDF:-} ]]; then BUILD_SPHINX_PDF=YES; fi
-if [[ -z ${INTEGER_LIBRARY:-} ]]; then INTEGER_LIBRARY=integer-gmp; fi
-if [[ -z ${BUILD_FLAVOUR:-} ]]; then BUILD_FLAVOUR=perf; fi
-
-if [[ -z ${XZ:-} ]]; then
- if which pxz; then
- XZ="pxz"
- elif which xz; then
- # Check whether --threads is supported
- if echo "hello" | xz --threads=$CORES >/dev/null; then
- XZ="xz --threads=$CORES"
- else
- XZ="xz"
- fi
- else
- echo "error: neither pxz nor xz were found"
- exit 1
- fi
-fi
-echo "Using $XZ for compression..."
-
-
-cat > mk/build.mk <<EOF
-V=1
-HADDOCK_DOCS=YES
-LATEX_DOCS=YES
-HSCOLOUR_SRCS=YES
-BUILD_SPHINX_HTML=$BUILD_SPHINX_HTML
-BUILD_SPHINX_PDF=$BUILD_SPHINX_PDF
-BeConservative=YES
-INTEGER_LIBRARY=$INTEGER_LIBRARY
-XZ_CMD=$XZ
-EOF
-
-cat <<EOF >> mk/build.mk
-BuildFlavour=$BUILD_FLAVOUR
-ifneq "\$(BuildFlavour)" ""
-include mk/flavours/\$(BuildFlavour).mk
-endif
-GhcLibHcOpts+=-haddock
-EOF
-
-case "$(uname)" in
- Linux)
- if [[ -n ${TARGET:-} ]]; then
- if [[ $TARGET = FreeBSD ]]; then
- # cross-compiling to FreeBSD
- echo 'HADDOCK_DOCS = NO' >> mk/build.mk
- echo 'WERROR=' >> mk/build.mk
- # https://circleci.com/docs/2.0/env-vars/#interpolating-environment-variables-to-set-other-environment-variables
- echo 'export PATH=/opt/ghc/bin:$PATH' >> $BASH_ENV
- else
- fail "TARGET=$target not supported"
- fi
- fi
- ;;
-
- Darwin)
- if [[ -n ${TARGET:-} ]]; then
- fail "uname=$(uname) not supported for cross-compilation"
- fi
- # It looks like we already have python2 here and just installing python3
- # does not work.
- brew upgrade python
- brew install ghc cabal-install ncurses gmp
-
- pip3 install sphinx
- # PDF documentation disabled as MacTeX apparently doesn't include xelatex.
- #brew cask install mactex
-
- cabal update
- cabal install --reinstall alex happy haddock hscolour --index-state=$hackage_index_state
- # put them on the $PATH, don't fail if already installed
- ln -s $HOME/.cabal/bin/alex /usr/local/bin/alex || true
- ln -s $HOME/.cabal/bin/happy /usr/local/bin/happy || true
- ln -s $HOME/.cabal/bin/HsColour /usr/local/bin/HsColour || true
- echo "libraries/integer-gmp_CONFIGURE_OPTS += --configure-option=--with-intree-gmp" >> mk/build.mk
- ;;
- *)
- fail "uname=$(uname) not supported"
-esac
-
-echo "================================================="
-echo "Build.mk:"
-echo ""
-cat mk/build.mk
-echo "================================================="
=====================================
.gitlab/test-metrics.sh
=====================================
@@ -0,0 +1,89 @@
+#!/usr/bin/env bash
+# vim: sw=2 et
+set -euo pipefail
+
+NOTES_ORIGIN="https://gitlab.haskell.org/ghc/ghc-performance-notes.git"
+NOTES_ORIGIN_PUSH="git at gitlab.haskell.org:ghc/ghc-performance-notes.git"
+REF="perf"
+
+run() {
+ echo "$@"
+ $@
+}
+
+fail() {
+ echo "ERROR: $*" >&2
+ exit 1
+}
+
+function pull() {
+ local ref="refs/notes/$REF"
+ run git fetch -f $NOTES_ORIGIN $ref:$ref
+ echo "perf notes ref $ref is $(git rev-parse $ref)"
+}
+
+function setup_ssh() {
+ # Add gitlab as a known host.
+ mkdir -p ~/.ssh
+ echo "|1|+AUrMGS1elvPeLNt+NHGa5+c6pU=|4XvfRsQftO1OgZD4c0JJ7oNaii8= ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDXilA5l4kOZPx0nM6xDATF+t4fS6te0eYPDwBI/jLWD9cJVtCnsrwMl5ar+/NfmcD0jnCYztUiVHuXyTaWPJYSQpwltfpTeqpo9/z/0MxkPtSl1uMP2cLbDiqA01OWveChktOXwU6hRQ+7MmO+dNRS/iXrRmYrGv/p1W811QgLBLS9fefEdF25n+0dP71L7Ov7riOawlDmd0C11FraE/R8HX6gs6lbXta1kisdxGyKojYSiCtobUaJxRoatMfUP0a9rwTAyl8tf56LgB+igjMky879VAbL7eQ/AmfHYPrSGJ/YlWP6Jj23Dnos5nOVlWL/rVTs9Y/NakLpPwMs75KTC0Pd74hdf2e3folDdAi2kLrQgO2SI6so7rOYZ+mFkCM751QdDVy4DzjmDvSgSIVf9SV7RQf7e7unE7pSZ/ILupZqz9KhR1MOwVO+ePa5qJMNSdC204PIsRWkIO5KP0QLl507NI9Ri84+aODoHD7gDIWNhU08J2P8/E6r0wcC8uWaxh+HaOjI9BkHjqRYsrgfn54BAuO9kw1cDvyi3c8n7VFlNtvQP15lANwim3gr9upV+r95KEPJCgZMYWJBDPIVtp4GdYxCfXxWj5oMXbA5pf0tNixwNJjAsY7I6RN2htHbuySH36JybOZk+gCj6mQkxpCT/tKaUn14hBJWLq7Q+Q==" >> ~/.ssh/known_hosts
+ echo "|1|JZkdAPJmpX6SzGeqhmQLfMWLGQA=|4vTELroOlbFxbCr0WX+PK9EcpD0= ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJknufU+I6A5Nm58lmse4/o11Ai2UzYbYe7782J1+kRk" >> ~/.ssh/known_hosts
+
+ # Setup ssh keys.
+ eval `ssh-agent`
+ echo "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDJPR1vrZgeGTXmgJw2PsJfMjf22LcDnVVwt3l0rwTZ+8Q2J0bHaYxMRKBco1sON6LGcZepw0Hy76RQ87v057pTz18SXvnfE7U/B6v9qBk0ILJz+4BOX9sEhxu2XmScp/wMxkG9IoyruMlsxXzd1sz09o+rzzx24U2Rp27PRm08vG0oipve6BWLbYEqYrE4/nCufqOJmGd56fju7OTU0lTpEkGDEDWGMxutaX2CbTbDju7qy07Ld8BjSc9aHfvuQaslUbj3ex3EF8EXahURzGpHQn/UFFzVGMokFumiJCAagHQb7cj6jOkKseZLaysbA/mTBQsOzjWiRmkN23bQf1wF ben+ghc-ci at smart-cactus.org" > ~/.ssh/perf_rsa.pub
+ touch ~/.ssh/perf_rsa
+ chmod 0600 ~/.ssh/perf_rsa
+ echo "$PERF_NOTE_KEY" >> ~/.ssh/perf_rsa
+ ssh-add ~/.ssh/perf_rsa
+}
+
+# Reset the git notes and append the metrics file to the notes, then push and return the result.
+# This is favoured over a git notes merge as it avoids potential data loss/duplication from the merge strategy.
+function reset_append_note_push {
+ pull || true
+ run git notes --ref=$REF append -F $METRICS_FILE HEAD
+ run git push $NOTES_ORIGIN_PUSH refs/notes/$REF
+}
+
+function push() {
+ # Check that private key is available (Set on all GitLab protected branches).
+ if [ -z ${PERF_NOTE_KEY+"$PERF_NOTE_KEY"} ]
+ then
+ echo "Not pushing performance git notes: PERF_NOTE_KEY is not set."
+ exit 0
+ fi
+
+ # TEST_ENV must be set.
+ if [ -z ${TEST_ENV+"$TEST_ENV"} ]
+ then
+ fail "Not pushing performance git notes: TEST_ENV must be set."
+ fi
+
+ # Assert that the METRICS_FILE exists and can be read.
+ if [ -z ${METRICS_FILE+"$METRICS_FILE"} ]
+ then
+ fail "\$METRICS_FILE not set."
+ fi
+ if ! [ -r $METRICS_FILE ]
+ then
+ fail "Metrics file not found: $METRICS_FILE"
+ fi
+
+ setup_ssh
+
+ # Push the metrics file as a git note. This may fail if another task pushes a note first. In that case
+ # the latest note is fetched and appended.
+ MAX_RETRY=20
+ until reset_append_note_push || [ $MAX_RETRY -le 0 ]
+ do
+ ((MAX_RETRY--))
+ echo ""
+ echo "Failed to push git notes. Fetching, appending, and retrying... $MAX_RETRY retries left."
+ done
+}
+
+case $1 in
+ push) push ;;
+ pull) pull ;;
+ *) fail "Invalid mode $1" ;;
+esac
=====================================
.gitlab/win32-init.sh deleted
=====================================
@@ -1,67 +0,0 @@
-#!/bin/bash
-
-set -e
-
-toolchain=`pwd`/toolchain
-PATH="$toolchain/bin:/mingw64/bin:$PATH"
-
-if [ -d "`pwd`/cabal-cache" ]; then
- cp -Rf cabal-cache $APPDATA/cabal
-fi
-
-if [ ! -e $toolchain/bin/ghc ]; then
- case $MSYSTEM in
- MINGW32)
- triple="i386-unknown-mingw32"
- ;;
- MINGW64)
- triple="x86_64-unknown-mingw32"
- ;;
- *)
- echo "win32-init: Unknown MSYSTEM $MSYSTEM"
- exit 1
- ;;
- esac
- curl https://downloads.haskell.org/~ghc/$GHC_VERSION/ghc-$GHC_VERSION-$triple.tar.xz | tar -xJ
- mv ghc-$GHC_VERSION toolchain
-fi
-
-if [ ! -e $toolchain/bin/cabal ]; then
- url="https://downloads.haskell.org/~cabal/cabal-install-2.4.1.0/cabal-install-2.4.1.0-x86_64-unknown-mingw32.zip"
- curl $url > /tmp/cabal.zip
- unzip /tmp/cabal.zip
- mv cabal.exe $toolchain/bin
-fi
-
-if [ ! -e $toolchain/bin/happy ]; then
- cabal update
- cabal install happy
- cp $APPDATA/cabal/bin/happy $toolchain/bin
-fi
-
-if [ ! -e $toolchain/bin/alex ]; then
- cabal update
- cabal install alex
- cp $APPDATA/cabal/bin/alex $toolchain/bin
-fi
-
-if [[ -z ${INTEGER_LIBRARY:-} ]]; then INTEGER_LIBRARY=integer-gmp; fi
-cat > mk/build.mk <<EOF
-include mk/flavours/${BUILD_FLAVOUR}.mk
-
-V=1
-HADDOCK_DOCS=YES
-LATEX_DOCS=YES
-HSCOLOUR_SRCS=YES
-BUILD_SPHINX_HTML=YES
-BUILD_SPHINX_PDF=NO
-BeConservative=YES
-INTEGER_LIBRARY=$INTEGER_LIBRARY
-GhcLibHcOpts+=-haddock
-EOF
-
-echo "================================================="
-echo "Build.mk:"
-echo ""
-cat mk/build.mk
-echo "================================================="
=====================================
configure.ac
=====================================
@@ -347,7 +347,7 @@ set_up_tarballs() {
else
action="download"
fi
- mk/get-win32-tarballs.sh $action $HostArch > missing-win32-tarballs
+ $PYTHON mk/get-win32-tarballs.py $action $mingw_arch > missing-win32-tarballs
case $? in
0)
rm missing-win32-tarballs
@@ -359,7 +359,7 @@ set_up_tarballs() {
echo
echo " * run configure with the --enable-tarballs-autodownload option"
echo
- echo " * run mk/get-win32-tarballs.sh download ${HostArch}"
+ echo " * run mk/get-win32-tarballs.py download $mingw_arch"
echo
echo " * manually download the files listed in ./missing-win32-tarballs and place"
echo " them in the ghc-tarballs directory."
=====================================
docs/users_guide/8.10.1-notes.rst
=====================================
@@ -176,7 +176,7 @@ Language
good story for graceful degradation in these situations. These situations
should occur much less frequently now and degradation happens much more
smoothly, while still producing useful, sound results (see
- :ghc-flag:`-fmax-pmcheck-models`).
+ :ghc-flag:`-fmax-pmcheck-models=⟨n⟩`).
Compiler
~~~~~~~~
@@ -230,8 +230,8 @@ Compiler
and much more. See the :ref:`user guide <dynflags_plugins>` for
more details as well as an example.
-- Deprecated flag :ghc-flag:`-fmax-pmcheck-iterations` in favor of
- :ghc-flag:`-fmax-pmcheck-models`, which uses a completely different mechanism.
+- Deprecated flag ``-fmax-pmcheck-iterations`` in favor of
+ :ghc-flag:`-fmax-pmcheck-models=⟨n⟩`, which uses a completely different mechanism.
- GHC now writes ``.o`` files atomically, resulting in reduced chances
of truncated files when a build is cancelled or the computer crashes.
=====================================
docs/users_guide/compare-flags.py deleted
=====================================
@@ -1,91 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-"""
-Linter to verify that all flags reported by GHC's --show-options mode
-are documented in the user's guide.
-"""
-
-import sys
-import subprocess
-from typing import Set
-from pathlib import Path
-
-# A list of known-undocumented flags. This should be considered to be a to-do
-# list of flags that need to be documented.
-EXPECTED_UNDOCUMENTED_PATH = \
- Path(__file__).parent / 'expected-undocumented-flags.txt'
-
-EXPECTED_UNDOCUMENTED = \
- {line for line in open(EXPECTED_UNDOCUMENTED_PATH).read().split()}
-
-def expected_undocumented(flag: str) -> bool:
- if flag in EXPECTED_UNDOCUMENTED:
- return True
- if flag.startswith('-Werror'):
- return True
- if flag.startswith('-Wno-') \
- or flag.startswith('-dno') \
- or flag.startswith('-fno') \
- or flag.startswith('-XNo'):
- return True
- if flag.startswith('-Wwarn=') \
- or flag.startswith('-Wno-warn='):
- return True
-
- return False
-
-def read_documented_flags(doc_flags) -> Set[str]:
- # Map characters that mark the end of a flag
- # to whitespace.
- trans = str.maketrans({
- '=': ' ',
- '[': ' ',
- '⟨': ' ',
- })
- return {line.translate(trans).split()[0]
- for line in doc_flags.read().split('\n')
- if line != ''}
-
-def read_ghc_flags(ghc_path: str) -> Set[str]:
- ghc_output = subprocess.check_output([ghc_path, '--show-options'],
- encoding='UTF-8')
- return {flag
- for flag in ghc_output.split('\n')
- if not expected_undocumented(flag)
- if flag != ''}
-
-def main() -> None:
- import argparse
- parser = argparse.ArgumentParser()
- parser.add_argument('--ghc', type=argparse.FileType('r'),
- help='path of GHC executable')
- parser.add_argument('--doc-flags', type=argparse.FileType('r'),
- help='path of ghc-flags.txt output from Sphinx')
- args = parser.parse_args()
-
- doc_flags = read_documented_flags(args.doc_flags)
- ghc_flags = read_ghc_flags(args.ghc.name)
-
- failed = False
-
- undocumented = ghc_flags - doc_flags
- if len(undocumented) > 0:
- print(f'Found {len(undocumented)} flags not documented in the users guide:')
- print('\n'.join(f' {flag}' for flag in sorted(undocumented)))
- print()
- failed = True
-
- now_documented = EXPECTED_UNDOCUMENTED.intersection(doc_flags)
- if len(now_documented) > 0:
- print(f'Found flags that are documented yet listed in {EXPECTED_UNDOCUMENTED_PATH}:')
- print('\n'.join(f' {flag}' for flag in sorted(now_documented)))
- print()
- failed = True
-
- if failed:
- sys.exit(1)
-
-
-if __name__ == '__main__':
- main()
=====================================
docs/users_guide/runtime_control.rst
=====================================
@@ -174,6 +174,8 @@ e.g., on stack overflow. The hooks for these are as follows:
The message printed if ``malloc`` fails.
+.. _event_log_output_api:
+
Event log output
################
@@ -190,7 +192,7 @@ Furthermore GHC lets you specify the way event log data (see :rts-flag:`-l
.. c:member:: bool writeEventLog(void *eventlog, size_t eventlog_size)
- Hands buffered event log data to your event log writer.
+ Hands buffered event log data to your event log writer. Return true on success.
Required for a custom :c:type:`EventLogWriter`.
.. c:member:: void flushEventLog(void)
@@ -202,6 +204,35 @@ Furthermore GHC lets you specify the way event log data (see :rts-flag:`-l
Called when event logging is about to stop. This can be ``NULL``.
+To use an :c:type:`EventLogWriter` the RTS API provides the following functions:
+
+.. c:function:: EventLogStatus eventLogStatus(void)
+
+ Query whether the current runtime system supports the eventlog (e.g. whether
+ the current executable was linked with :ghc-flag:`-eventlog`) and, if it
+ is supported, whether it is currently logging.
+
+.. c:function:: bool startEventLogging(const EventLogWriter *writer)
+
+ Start logging events to the given :c:type:`EventLogWriter`. Returns true on
+ success or false is another writer has already been configured.
+
+.. c:function:: void endEventLogging()
+
+ Tear down the active :c:type:`EventLogWriter`.
+
+where the ``enum`` :c:type:`EventLogStatus` is:
+
+.. c:type:: EventLogStatus
+
+ * ``EVENTLOG_NOT_SUPPORTED``: The runtime system wasn't compiled with
+ eventlog support.
+ * ``EVENTLOG_NOT_CONFIGURED``: An :c:type:`EventLogWriter` has not yet been
+ configured.
+ * ``EVENTLOG_RUNNING``: An :c:type:`EventLogWriter` has been configured and
+ is running.
+
+
.. _rts-options-misc:
Miscellaneous RTS options
@@ -226,7 +257,7 @@ Miscellaneous RTS options
catch unhandled exceptions using the Windows exception handling mechanism.
This option is primarily useful for when you are using the Haskell code as a
DLL, and don't want the RTS to ungracefully terminate your application on
- erros such as segfaults.
+ errors such as segfaults.
.. rts-flag:: --generate-crash-dumps
@@ -351,8 +382,8 @@ performance.
collections. Under this collection strategy oldest-generation garbage
collection can proceed concurrently with mutation.
- Note that :rts-flag:`--nonmoving-gc` cannot be used with ``-G1`` nor
- :rts-flag:`-c`.
+ Note that :rts-flag:`--nonmoving-gc` cannot be used with ``-G1``,
+ :rts-flag:`profiling <-hc>` nor :rts-flag:`-c`.
.. rts-flag:: -xn
@@ -632,6 +663,26 @@ performance.
This is an experimental feature, please let us know if it causes
problems and/or could benefit from further tuning.
+.. rts-flag:: -Iw ⟨seconds⟩
+
+ :default: 0 seconds
+
+ .. index::
+ single: idle GC
+
+ By default, if idle GC is enabled in the threaded runtime, a major
+ GC will be performed every time the process goes idle for a
+ sufficiently long duration (see :rts-flag:`-I ⟨seconds⟩`). For
+ large server processes accepting regular but infrequent requests
+ (e.g., once per second), an expensive, major GC may run after
+ every request. As an alternative to shutting off idle GC entirely
+ (with ``-I0``), a minimum wait time between idle GCs can be
+ specified with this flag. For example, ``-Iw60`` will ensure that
+ an idle GC runs at most once per minute.
+
+ This is an experimental feature, please let us know if it causes
+ problems and/or could benefit from further tuning.
+
.. rts-flag:: -ki ⟨size⟩
:default: 1k
@@ -821,10 +872,10 @@ performance.
By default, the flag will cause a warning to be emitted to stderr
when the sync time exceeds the specified time. This behaviour can
- be overriden, however: the ``longGCSync()`` hook is called when
+ be overridden, however: the ``longGCSync()`` hook is called when
the sync time is exceeded during the sync period, and the
``longGCSyncEnd()`` hook at the end. Both of these hooks can be
- overriden in the ``RtsConfig`` when the runtime is started with
+ overridden in the ``RtsConfig`` when the runtime is started with
``hs_init_ghc()``. The default implementations of these hooks
(``LongGcSync()`` and ``LongGCSyncEnd()`` respectively) print
warnings to stderr.
@@ -1096,7 +1147,7 @@ When the program is linked with the :ghc-flag:`-eventlog` option
logs a default set of events, suitable for use with tools like ThreadScope.
Per default the events are written to :file:`{program}.eventlog` though
- the mechanism for writing event log data can be overriden with a custom
+ the mechanism for writing event log data can be overridden with a custom
`EventLogWriter`.
For some special use cases you may want more control over which
@@ -1291,7 +1342,7 @@ recommended for everyday use!
.. rts-flag:: -Z
- Turn *off* "update-frame squeezing" at garbage-collection time.
+ Turn *off* update frame squeezing on context switch.
(There's no particularly good reason to turn it off, except to
ensure the accuracy of certain data collected regarding thunk entry
counts.)
=====================================
docs/users_guide/using-warnings.rst
=====================================
@@ -221,9 +221,10 @@ of ``-W(no-)*``.
encountered on the command line.
:type: dynamic
:reverse: -Wno-unrecognised-warning-flags
- :default: on
:category:
+ :default: on
+
Enables warnings when the compiler encounters a ``-W...`` flag that is not
recognised.
@@ -253,9 +254,10 @@ of ``-W(no-)*``.
:ghc-flag:`-fdefer-typed-holes`.
:type: dynamic
:reverse: -Wno-typed-holes
- :default: on
:category:
+ :default: on
+
Determines whether the compiler reports typed holes warnings. Has no
effect unless typed holes errors are deferred until runtime. See
:ref:`typed-holes` and :ref:`defer-type-errors`
=====================================
hadrian/src/Rules/Documentation.hs
=====================================
@@ -16,7 +16,6 @@ import Context
import Expression (getContextData, interpretInContext, (?), package)
import Flavour
import Oracles.ModuleFiles
-import Oracles.Setting (topDirectory)
import Packages
import Settings
import Target
@@ -111,11 +110,6 @@ documentationRules = do
need $ map (root -/-) targets
- when (SphinxPDFs `Set.member` doctargets)
- $ checkUserGuideFlags $ pdfRoot -/- "users_guide" -/- "ghc-flags.txt"
- when (SphinxHTML `Set.member` doctargets)
- $ checkUserGuideFlags $ root -/- htmlRoot -/- "users_guide" -/- "ghc-flags.txt"
-
where archiveTarget "libraries" = Haddocks
archiveTarget _ = SphinxHTML
@@ -129,17 +123,6 @@ checkSphinxWarnings out = do
when ("reference target not found" `isInfixOf` log)
$ fail "Undefined reference targets found in Sphinx log."
--- | Check that all GHC flags are documented in the users guide.
-checkUserGuideFlags :: FilePath -> Action ()
-checkUserGuideFlags documentedFlagList = do
- scriptPath <- (</> "docs/users_guide/compare-flags.py") <$> topDirectory
- ghcPath <- (</>) <$> topDirectory <*> programPath (vanillaContext Stage1 ghc)
- runBuilder Python
- [ scriptPath
- , "--doc-flags", documentedFlagList
- , "--ghc", ghcPath
- ] [documentedFlagList] []
-
------------------------------------- HTML -------------------------------------
=====================================
includes/rts/EventLogWriter.h
=====================================
@@ -23,7 +23,7 @@ typedef struct {
// Initialize an EventLogWriter (may be NULL)
void (* initEventLogWriter) (void);
- // Write a series of events
+ // Write a series of events returning true on success.
bool (* writeEventLog) (void *eventlog, size_t eventlog_size);
// Flush possibly existing buffers (may be NULL)
@@ -38,3 +38,29 @@ typedef struct {
* a file `program.eventlog`.
*/
extern const EventLogWriter FileEventLogWriter;
+
+enum EventLogStatus {
+ /* The runtime system wasn't compiled with eventlog support. */
+ EVENTLOG_NOT_SUPPORTED,
+ /* An EventLogWriter has not yet been configured */
+ EVENTLOG_NOT_CONFIGURED,
+ /* An EventLogWriter has been configured and is running. */
+ EVENTLOG_RUNNING,
+};
+
+/*
+ * Query whether the current runtime system supports eventlogging.
+ */
+enum EventLogStatus eventLogStatus(void);
+
+/*
+ * Initialize event logging using the given EventLogWriter.
+ * Returns true on success or false if an EventLogWriter is already configured
+ * or eventlogging isn't supported by the runtime.
+ */
+bool startEventLogging(const EventLogWriter *writer);
+
+/*
+ * Stop event logging and destroy the current EventLogWriter.
+ */
+void endEventLogging(void);
=====================================
libraries/process
=====================================
@@ -1 +1 @@
-Subproject commit 21149358df25d742cc79ce55510aa82f246e7044
+Subproject commit 758d2f799020bc93b95494e3f54e7056d49041ae
=====================================
mk/get-win32-tarballs.py
=====================================
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from pathlib import Path
+import urllib.request
+import subprocess
+import argparse
+
+TARBALL_VERSION = '0.1'
+BASE_URL = "https://downloads.haskell.org/ghc/mingw/{}".format(TARBALL_VERSION)
+DEST = Path('ghc-tarballs/mingw-w64')
+ARCHS = ['i686', 'x86_64', 'sources']
+
+def file_url(arch: str, fname: str) -> str:
+ return "{base}/{arch}/{fname}".format(
+ base=BASE_URL,
+ arch=arch,
+ fname=fname)
+
+def fetch(url: str, dest: Path):
+ print('Fetching', url, '=>', dest)
+ urllib.request.urlretrieve(url, dest)
+
+def fetch_arch(arch: str):
+ req = urllib.request.urlopen(file_url(arch, 'MANIFEST'))
+ files = req.read().decode('UTF-8').split('\n')
+ d = DEST / arch
+ if not d.is_dir():
+ d.mkdir(parents=True)
+ fetch(file_url(arch, 'SHA256SUMS'), d / 'SHA256SUMS')
+ for fname in files:
+ if not (d / fname).is_file():
+ fetch(file_url(arch, fname), d / fname)
+
+ verify(arch)
+
+def verify(arch: str):
+ cmd = ['sha256sum', '--quiet', '--check', '--ignore-missing', 'SHA256SUMS']
+ subprocess.check_call(cmd, cwd=DEST / arch)
+
+def main() -> None:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('mode', choices=['verify', 'download'])
+ parser.add_argument(
+ 'arch',
+ choices=ARCHS + ['all'],
+ help="Architecture to fetch (either i686, x86_64, sources, or all)")
+ args = parser.parse_args()
+
+ action = fetch_arch if args.mode == 'download' else verify
+ if args.arch == 'all':
+ for arch in ARCHS:
+ action(arch)
+ else:
+ action(args.arch)
+
+if __name__ == '__main__':
+ main()
=====================================
mk/get-win32-tarballs.sh deleted
=====================================
@@ -1,326 +0,0 @@
-#!/usr/bin/env bash
-
-tarball_dir='ghc-tarballs'
-missing_files=0
-pkg_variant="phyx"
-
-# see #12502
-if test -z "$FIND"; then FIND="find"; fi
-
-fail() {
- echo >&2
- echo "$1" >&2
- exit 1
-}
-
-download_file() {
- local file_url="$1"
- local dest_file="$2"
- local description="$3"
- local extra_curl_opts="$4"
- local backup_url="$5"
- local dest_dir="$(dirname $dest_file)"
-
- if ! test -f "${dest_file}"
- then
- local curl_cmd="curl -f -L ${file_url} -o ${dest_file} --create-dirs -# ${extra_curl_opts}"
- if test -n "${backup_url}"; then
- local curl_cmd_bnk="curl -f -L ${backup_url} -o ${dest_file} --create-dirs -# ${extra_curl_opts}"
- else
- local curl_cmd_bnk="true"
- fi
-
- if test "$download" = "0"
- then
- echo "ERROR: Missing ${description}" >&2
- echo "${file_url}"
- missing_files=1
- return
- else
- echo "Downloading ${description} to ${dest_dir}..."
- $curl_cmd || (echo "Checking repo.msys2.org instead of Haskell.org..." && $curl_cmd_bnk) || {
- rm -f "${dest_file}"
- fail "ERROR: Download failed."
- exit 1
- }
- fi
- fi
-
- local sig_file="${dest_file}.sig"
- if test "$sigs" = "1" -a ! -f "$sig_file"
- then
- echo "Downloading ${description} (signature) to ${dest_dir}..."
- local curl_cmd="curl -f -L ${file_url}.sig -o ${sig_file} --create-dirs -# ${extra_curl_opts}"
- if test -n "${backup_url}"; then
- local curl_cmd_bnk="curl -f -L "${backup_url}.sig" -o ${sig_file} --create-dirs -# ${extra_curl_opts}"
- else
- local curl_cmd_bnk="true"
- fi
- $curl_cmd || (echo "Checking repo.msys2.org instead of Haskell.org..." && $curl_cmd_bnk) || {
- rm -f "${dest_file}.sig"
- fail "ERROR: Download failed."
- exit 1
- }
- fi
-
- if test "$verify" = "1"
- then
- grep "${dest_file}$" mk/win32-tarballs.md5sum | md5sum --quiet -c - ||
- fail "ERROR: ${description} appears to be corrupted, please delete it and try again."
- fi
-}
-
-download_mingw() {
- local mingw_base_url_primary="https://downloads.haskell.org/~ghc/mingw"
- local mingw_base_url_secondary="http://repo.msys2.org/mingw"
-
- if test "$mingw_arch" = "sources"
- then
- mingw_url_tmp=`echo "$1" | sed -e 's/-any\.pkg\.tar\.xz/\.src\.tar\.gz/' \
- -e 's/-sources-/-/' \
- -e 's/-libwinpthread-git-/-winpthreads-git-/' `
- local mingw_url="${mingw_base_url_primary}/${mingw_url_tmp}"
- local mingw_url_backup="${mingw_base_url_secondary}/${mingw_url_tmp}"
- else
- local mingw_url="${mingw_base_url_primary}/$1"
- local mingw_url_backup="${mingw_base_url_secondary}/$1"
- fi
-
- local mingw_toolchain="$(basename $mingw_url)"
- local mingw_w64="${tarball_dir}/${tarball_dest_dir}/${mingw_toolchain}"
-
- download_file "${mingw_url}" "${mingw_w64}" "${mingw_toolchain}" "" "${mingw_url_backup}"
-
- # Mark the tree as needing updates by deleting the folder
- if test -d inplace/mingw && test inplace/mingw -ot "$mingw_w64" ; then
- echo "In-tree MinGW-w64 tree requires updates..."
- rm -rf inplace/mingw
- fi
-}
-
-download_tarballs() {
- local package_prefix="mingw-w64"
- local format_url="/${mingw_arch}/${package_prefix}-${mingw_arch}"
-
- download_mingw "${format_url}-crt-git-7.0.0.5491.fe45801e-1-any.pkg.tar.xz"
- download_mingw "${format_url}-winpthreads-git-7.0.0.5480.e14d23be-1-any.pkg.tar.xz"
- download_mingw "${format_url}-headers-git-7.0.0.5490.9ec54ed1-1-any.pkg.tar.xz"
- download_mingw "${format_url}-libwinpthread-git-7.0.0.5480.e14d23be-1-any.pkg.tar.xz"
- download_mingw "${format_url}-zlib-1.2.8-9-any.pkg.tar.xz"
- download_mingw "${format_url}-isl-0.21-1-any.pkg.tar.xz"
- download_mingw "${format_url}-mpfr-4.0.2-2-any.pkg.tar.xz"
- download_mingw "${format_url}-gmp-6.1.2-1-any.pkg.tar.xz"
- download_mingw "${format_url}-binutils-2.32-3-$pkg_variant.pkg.tar.xz"
- download_mingw "${format_url}-libidn2-2.2.0-1-any.pkg.tar.xz"
- download_mingw "${format_url}-gcc-9.2.0-1-$pkg_variant.pkg.tar.xz"
- download_mingw "${format_url}-mpc-1.1.0-1-any.pkg.tar.xz"
- download_mingw "${format_url}-windows-default-manifest-6.4-3-any.pkg.tar.xz"
-
- # Upstream is unfortunately quite inconsistent in naming
- if test "$mingw_arch" != "sources"; then
- download_mingw "${format_url}-gcc-libs-9.2.0-1-$pkg_variant.pkg.tar.xz"
- fi
-
- if ! test "$missing_files" = "0"
- then
- exit 2
- fi
-}
-
-download_i386() {
- mingw_arch="i686"
- tarball_dest_dir="mingw-w64/i686"
- download_tarballs
-}
-
-download_x86_64() {
- mingw_arch="x86_64"
- tarball_dest_dir="mingw-w64/x86_64"
- download_tarballs
-}
-
-download_sources() {
- mingw_arch="sources"
- tarball_dest_dir="mingw-w64/sources"
- download_tarballs
-}
-
-sync_binaries_and_sources() {
- gpg --recv-key 5F92EFC1A47D45A1
-
- # ensure sources are downloaded
- sigs=1
- download_i386
- download_x86_64
- verify=0
- download_sources
-
- for f in $($FIND ghc-tarballs/mingw-w64 -iname '*.sig'); do
- echo "Verifying $f"
- gpg --verify $f
- done
-
- md5sum `$FIND ghc-tarballs -type f -a -not -iname '*.sig'` >| mk/win32-tarballs.md5sum
- chmod -R ugo+rX ghc-tarballs
-
- rsync -av ghc-tarballs/mingw-w64/* downloads.haskell.org:public_html/mingw
- for f in $($FIND ghc-tarballs/mingw-w64); do
- curl -XPURGE http://downloads.haskell.org/~ghc/mingw/$f
- done
-}
-
-patch_single_file () {
- local patcher_base="$1"
- local filename=$(readlink -f "$2")
- local filepath=$(dirname "$filename")
- local patcher="$patcher_base/iat-patcher.exe"
- $patcher install "$filename" > /dev/null
- rm -f "$filename.bak"
- for file in $patcher_base/*.dll; do cp -f "$file" "${filepath}"; done
- echo "Patched '$filename'"
-}
-
-patch_tarball () {
- local tarball_name="$1"
- local filename=$(basename "$tarball_name")
- local filepath=$(dirname "$tarball_name")
- local newfile=`echo "$filepath/$filename" | sed -e 's/-any/-phyx/'`
- local arch=""
-
- echo "=> ${filename}"
-
- case $1 in
- *x86_64*)
- arch="x86_64"
- ;;
- *i686*)
- arch="i686"
- ;;
- *)
- echo "unknown architecture detected. Stopping."
- exit 1
- ;;
- esac
-
- local base="$(pwd)"
- local patcher_base="$(pwd)/ghc-tarballs/ghc-jailbreak/$arch"
- local tmpdir="ghc-tarballs/tmpdir"
- mkdir -p $tmpdir
- cd $tmpdir
- tar xJf "$base/$tarball_name"
- find . -iname "*.exe" -exec bash -c \
- 'patch_single_file "'"${patcher_base}"'" "$0"' {} \;
- tar cJf "$base/$newfile" .
- cd "$base"
- rm -rf $tmpdir
- gpg --output "$base/${newfile}.sig" --detach-sig "$base/$newfile"
- rm -f "$base/$tarball_name"
-}
-
-show_hashes_for_binaries() {
- $FIND ghc-tarballs/ -iname "*.*" | xargs md5sum | grep -v "\.sig" | sed -s "s/\*//"
-}
-
-usage() {
- echo "$0 - Download GHC mingw toolchain tarballs"
- echo
- echo "Usage: $0 <action> [<arch>]"
- echo
- echo "Where <action> is one of,"
- echo ""
- echo " download download the necessary tarballs for the given architecture"
- echo " fetch download the necessary tarballs for the given architecture but doesn't verify their md5."
- echo " grab download the necessary tarballs using patched toolchains for the given architecture but doesn't verify their md5."
- echo " verify verify the existence and correctness of the necessary tarballs"
- echo " patch jailbreak the binaries in the tarballs and remove MAX_PATH limitations."
- echo " hash generate md5 hashes for inclusion in win32-tarballs.md5sum"
- echo " sync upload packages downloaded with 'fetch mirror' to haskell.org"
- echo ""
- echo "and <arch> is one of i386, x86_64,all or mirror (which includes sources)"
-}
-
-case $1 in
- download)
- download=1
- verify=1
- sigs=0
- ;;
- fetch)
- download=1
- verify=
- ;;
- grab)
- download=1
- verify=0
- pkg_variant="any"
- ;;
- verify)
- download=0
- verify=1
- ;;
- sync)
- download=1
- verify=0
- sync=1
- ;;
- hash)
- show_hashes_for_binaries
- exit 1
- ;;
- # This routine will download the latest ghc-jailbreak and unpack binutils and
- # the ghc tarballs and patches every .exe in each. Along with this is copies
- # two dlls in every folder that it patches a .exe in. Afterwards it re-creates
- # the tarballs and generates a new signature file.
- patch)
- export -f patch_tarball
- export -f patch_single_file
-
- echo "Downloading ghc-jailbreak..."
- curl -f -L https://mistuke.blob.core.windows.net/binaries/ghc-jailbreak-0.3.tar.gz \
- -o ghc-tarballs/ghc-jailbreak/ghc-jailbreak.tar.gz --create-dirs -#
- tar -C ghc-tarballs/ghc-jailbreak/ -xf ghc-tarballs/ghc-jailbreak/ghc-jailbreak.tar.gz
-
- find ghc-tarballs/mingw-w64/ \( -iname "*binutils*.tar.xz" \
- -o -iname "*gcc*.tar.xz" \) \
- -exec bash -c 'patch_tarball "$0"' {} \;
-
- rm -rf ghc-tarballs/ghc-jailbreak
-
- echo "Finished tarball generation, toolchain has been pre-patched."
- exit 0
- ;;
- *)
- usage
- exit 1
- ;;
-esac
-
-case $2 in
- i386)
- download_i386
- ;;
- x86_64)
- download_x86_64
- ;;
- all)
- download_i386
- download_x86_64
- ;;
- mirror)
- sigs=1
- download_i386
- download_x86_64
- verify=0
- sigs=0
- download_sources
- show_hashes_for_binaries
- ;;
- *)
- if test "$sync" = "1"; then
- sync_binaries_and_sources
- else
- usage
- exit 1
- fi
- ;;
-esac
=====================================
rts/Trace.c
=====================================
@@ -40,21 +40,12 @@ int TRACE_cap;
static Mutex trace_utx;
#endif
-static bool eventlog_enabled;
-
/* ---------------------------------------------------------------------------
Starting up / shutting down the tracing facilities
--------------------------------------------------------------------------- */
-static const EventLogWriter *getEventLogWriter(void)
-{
- return rtsConfig.eventlog_writer;
-}
-
void initTracing (void)
{
- const EventLogWriter *eventlog_writer = getEventLogWriter();
-
#if defined(THREADED_RTS)
initMutex(&trace_utx);
#endif
@@ -95,15 +86,14 @@ void initTracing (void)
TRACE_spark_full ||
TRACE_user;
- eventlog_enabled = RtsFlags.TraceFlags.tracing == TRACE_EVENTLOG &&
- eventlog_writer != NULL;
-
/* Note: we can have any of the TRACE_* flags turned on even when
eventlog_enabled is off. In the DEBUG way we may be tracing to stderr.
*/
+ initEventLogging();
- if (eventlog_enabled) {
- initEventLogging(eventlog_writer);
+ if (RtsFlags.TraceFlags.tracing == TRACE_EVENTLOG
+ && rtsConfig.eventlog_writer != NULL) {
+ startEventLogging(rtsConfig.eventlog_writer);
}
}
@@ -121,17 +111,10 @@ void freeTracing (void)
}
}
+// Used to reset tracing in a forked child
void resetTracing (void)
{
- const EventLogWriter *eventlog_writer;
- eventlog_writer = getEventLogWriter();
-
- if (eventlog_enabled) {
- abortEventLogging(); // abort eventlog inherited from parent
- if (eventlog_writer != NULL) {
- initEventLogging(eventlog_writer); // child starts its own eventlog
- }
- }
+ restartEventLogging();
}
void flushTrace (void)
=====================================
rts/eventlog/EventLog.c
=====================================
@@ -26,7 +26,9 @@
#include <unistd.h>
#endif
-static const EventLogWriter *event_log_writer;
+bool eventlog_enabled;
+
+static const EventLogWriter *event_log_writer = NULL;
#define EVENT_LOG_SIZE 2 * (1024 * 1024) // 2MB
@@ -516,16 +518,22 @@ postHeaderEvents(void)
postInt32(&eventBuf, EVENT_DATA_BEGIN);
}
-void
-initEventLogging(const EventLogWriter *ev_writer)
+static uint32_t
+get_n_capabilities(void)
{
- uint32_t n_caps;
+#if defined(THREADED_RTS)
+ // XXX n_capabilities may not have been initialized yet
+ return (n_capabilities != 0) ? n_capabilities : RtsFlags.ParFlags.nCapabilities;
+#else
+ return 1;
+#endif
+}
+void
+initEventLogging()
+{
init_event_types();
- event_log_writer = ev_writer;
- initEventLogWriter();
-
int num_descs = sizeof(EventDesc) / sizeof(char*);
if (num_descs != NUM_GHC_EVENT_TAGS) {
barf("EventDesc array has the wrong number of elements (%d, NUM_GHC_EVENT_TAGS=%d)",
@@ -542,18 +550,28 @@ initEventLogging(const EventLogWriter *ev_writer)
* Use a single buffer to store the header with event types, then flush
* the buffer so all buffers are empty for writing events.
*/
-#if defined(THREADED_RTS)
- // XXX n_capabilities hasn't been initialized yet
- n_caps = RtsFlags.ParFlags.nCapabilities;
-#else
- n_caps = 1;
-#endif
- moreCapEventBufs(0, n_caps);
+ moreCapEventBufs(0, get_n_capabilities());
initEventsBuf(&eventBuf, EVENT_LOG_SIZE, (EventCapNo)(-1));
#if defined(THREADED_RTS)
initMutex(&eventBufMutex);
#endif
+}
+
+enum EventLogStatus
+eventLogStatus(void)
+{
+ if (eventlog_enabled) {
+ return EVENTLOG_RUNNING;
+ } else {
+ return EVENTLOG_NOT_CONFIGURED;
+ }
+}
+
+static bool
+startEventLogging_(void)
+{
+ initEventLogWriter();
postHeaderEvents();
@@ -564,14 +582,42 @@ initEventLogging(const EventLogWriter *ev_writer)
*/
printAndClearEventBuf(&eventBuf);
- for (uint32_t c = 0; c < n_caps; ++c) {
+ for (uint32_t c = 0; c < get_n_capabilities(); ++c) {
postBlockMarker(&capEventBuf[c]);
}
+ return true;
+}
+
+bool
+startEventLogging(const EventLogWriter *ev_writer)
+{
+ if (eventlog_enabled || event_log_writer) {
+ return false;
+ }
+
+ eventlog_enabled = true;
+ event_log_writer = ev_writer;
+ return startEventLogging_();
+}
+
+// Called during forkProcess in the child to restart the eventlog writer.
+void
+restartEventLogging(void)
+{
+ freeEventLogging();
+ stopEventLogWriter();
+ initEventLogging(); // allocate new per-capability buffers
+ if (event_log_writer != NULL) {
+ startEventLogging_(); // child starts its own eventlog
+ }
}
void
endEventLogging(void)
{
+ if (!eventlog_enabled)
+ return;
+
// Flush all events remaining in the buffers.
for (uint32_t c = 0; c < n_capabilities; ++c) {
printAndClearEventBuf(&capEventBuf[c]);
@@ -586,6 +632,8 @@ endEventLogging(void)
printAndClearEventBuf(&eventBuf);
stopEventLogWriter();
+ event_log_writer = NULL;
+ eventlog_enabled = false;
}
void
@@ -626,13 +674,6 @@ freeEventLogging(void)
}
}
-void
-abortEventLogging(void)
-{
- freeEventLogging();
- stopEventLogWriter();
-}
-
/*
* Post an event message to the capability's eventlog buffer.
* If the buffer is full, prints out the buffer and clears it.
@@ -1440,7 +1481,7 @@ void printAndClearEventBuf (EventsBuf *ebuf)
size_t elog_size = ebuf->pos - ebuf->begin;
if (!writeEventLog(ebuf->begin, elog_size)) {
debugBelch(
- "printAndClearEventLog: could not flush event log"
+ "printAndClearEventLog: could not flush event log\n"
);
resetEventsBuf(ebuf);
return;
@@ -1524,4 +1565,17 @@ void postEventType(EventsBuf *eb, EventType *et)
postInt32(eb, EVENT_ET_END);
}
+#else
+
+enum EventLogStatus eventLogStatus(void)
+{
+ return EVENTLOG_NOT_SUPPORTED;
+}
+
+bool startEventLogging(const EventLogWriter *writer STG_UNUSED) {
+ return false;
+}
+
+void endEventLogging(void) {}
+
#endif /* TRACING */
=====================================
rts/eventlog/EventLog.h
=====================================
@@ -22,8 +22,10 @@
*/
extern char *EventTagDesc[];
-void initEventLogging(const EventLogWriter *writer);
-void endEventLogging(void);
+extern bool eventlog_enabled;
+
+void initEventLogging(void);
+void restartEventLogging(void);
void freeEventLogging(void);
void abortEventLogging(void); // #4512 - after fork child needs to abort
void flushEventLog(void); // event log inherited from parent
=====================================
rts/eventlog/EventLogWriter.c
=====================================
@@ -122,6 +122,7 @@ stopEventLogFileWriter(void)
{
if (event_log_file != NULL) {
fclose(event_log_file);
+ event_log_file = NULL;
}
}
=====================================
testsuite/tests/rts/InitEventLogging.hs
=====================================
@@ -0,0 +1,11 @@
+{-# LANGUAGE ForeignFunctionInterface #-}
+
+-- Test that the startEventLog interface works as expected.
+main :: IO ()
+main = do
+ putStrLn "Starting eventlog..."
+ c_init_eventlog
+ putStrLn "done"
+
+foreign import ccall unsafe "init_eventlog"
+ c_init_eventlog :: IO ()
=====================================
testsuite/tests/rts/InitEventLogging.stdout
=====================================
@@ -0,0 +1,8 @@
+Starting eventlog...
+done
+init
+write
+write
+write
+write
+stop
=====================================
testsuite/tests/rts/InitEventLogging_c.c
=====================================
@@ -0,0 +1,33 @@
+#include <stdio.h>
+#include <Rts.h>
+
+void test_init(void) {
+ printf("init\n");
+}
+
+bool test_write(void *eventlog, size_t eventlog_size) {
+ printf("write\n");
+ return true;
+}
+
+void test_flush(void) {
+ printf("flush\n");
+}
+
+void test_stop(void) {
+ printf("stop\n");
+}
+
+const EventLogWriter writer = {
+ .initEventLogWriter = test_init,
+ .writeEventLog = test_write,
+ .flushEventLog = test_flush,
+ .stopEventLogWriter = test_stop
+};
+
+void init_eventlog(void) {
+ if (!startEventLogging(&writer)) {
+ printf("failed to start eventlog\n");
+ }
+}
+
=====================================
testsuite/tests/rts/all.T
=====================================
@@ -411,3 +411,6 @@ test('T17088',
[only_ways(['normal']), extra_run_opts('+RTS -c -A256k -RTS')],
compile_and_run, ['-rtsopts -O2'])
+test('InitEventLogging',
+ [only_ways(['normal']), extra_run_opts('+RTS -RTS')],
+ compile_and_run, ['-eventlog InitEventLogging_c.c'])
View it on GitLab: https://gitlab.haskell.org/ghc/ghc/compare/4c0a45d1043de427a1f179019e87723b1374bf19...06889a6f2e2adfd308339a836074216108cd7149
--
View it on GitLab: https://gitlab.haskell.org/ghc/ghc/compare/4c0a45d1043de427a1f179019e87723b1374bf19...06889a6f2e2adfd308339a836074216108cd7149
You're receiving this email because of your account on gitlab.haskell.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://mail.haskell.org/pipermail/ghc-commits/attachments/20200320/1e7f97d4/attachment-0001.html>
More information about the ghc-commits
mailing list