summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatthew Pickering <matthewtpickering@gmail.com>2023-01-11 16:50:38 +0000
committerMarge Bot <ben+marge-bot@smart-cactus.org>2023-01-16 20:51:25 -0500
commit0b358d0c722f4bccbe6c3cafdad2ffe5c2b6fff5 (patch)
treeab454de5ff50c9a0c03b7db191182ecedecbfd8e
parent8039feb926e8fcd7eca3a54d9c98a05052742a92 (diff)
downloadhaskell-0b358d0c722f4bccbe6c3cafdad2ffe5c2b6fff5.tar.gz
rel_eng: Add release engineering scripts into ghc tree
It is better to keep these scripts in the tree as they depend on the CI configuration and so on. By keeping them in tree we can keep them up-to-date as the CI config changes and also makes it easier to backport changes to the release script between release branches in future. The final motivation is that it makes generating GHCUp metadata possible.
-rw-r--r--.gitlab-ci.yml2
-rw-r--r--.gitlab/rel_eng/default.nix54
-rw-r--r--.gitlab/rel_eng/fetch-gitlab-artifacts/.gitignore3
-rw-r--r--.gitlab/rel_eng/fetch-gitlab-artifacts/README.mkd23
-rw-r--r--.gitlab/rel_eng/fetch-gitlab-artifacts/default.nix13
-rw-r--r--.gitlab/rel_eng/fetch-gitlab-artifacts/fetch_gitlab.py145
-rw-r--r--.gitlab/rel_eng/fetch-gitlab-artifacts/setup.py14
-rw-r--r--.gitlab/rel_eng/nix/sources.json68
-rw-r--r--.gitlab/rel_eng/nix/sources.nix194
-rwxr-xr-x.gitlab/rel_eng/upload.sh250
-rwxr-xr-x.gitlab/rel_eng/upload_ghc_libs.py (renamed from .gitlab/upload_ghc_libs.py)0
11 files changed, 765 insertions, 1 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 31d84df7df..0719402e1b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -560,7 +560,7 @@ hackage-doc-tarball:
- tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C ghc*/
script:
- cd ghc*/
- - mv .gitlab/upload_ghc_libs.py .
+ - mv .gitlab/rel_eng/upload_ghc_libs.py .
- .gitlab/ci.sh setup
- .gitlab/ci.sh configure
- ./upload_ghc_libs.py prepare --bindist ghc*linux/
diff --git a/.gitlab/rel_eng/default.nix b/.gitlab/rel_eng/default.nix
new file mode 100644
index 0000000000..42435ba476
--- /dev/null
+++ b/.gitlab/rel_eng/default.nix
@@ -0,0 +1,54 @@
+let sources = import ./nix/sources.nix; in
+
+{ nixpkgs ? (import sources.nixpkgs {}) }:
+
+with nixpkgs;
+let
+ fetch-gitlab-artifacts = nixpkgs.callPackage ./fetch-gitlab-artifacts {};
+
+
+ bindistPrepEnv = pkgs.buildFHSUserEnv {
+ name = "enter-fhs";
+ targetPkgs = pkgs: with pkgs; [
+ # all
+ gcc binutils gnumake gmp ncurses5 git elfutils
+ # source-release.sh
+ xorg.lndir curl python3 which automake autoconf m4 file
+ haskell.compiler.ghc8107 haskellPackages.happy haskellPackages.alex
+ ];
+ runScript = "$SHELL -x";
+ };
+
+ scripts = stdenv.mkDerivation {
+ name = "rel-eng-scripts";
+ nativeBuildInputs = [ makeWrapper ];
+ preferLocalBuild = true;
+ buildCommand = ''
+ mkdir -p $out/bin
+
+ makeWrapper ${./upload.sh} $out/bin/upload.sh \
+ --prefix PATH : ${moreutils}/bin \
+ --prefix PATH : ${lftp}/bin \
+ --prefix PATH : ${lzip}/bin \
+ --prefix PATH : ${zip}/bin \
+ --prefix PATH : ${s3cmd}/bin \
+ --prefix PATH : ${gnupg}/bin \
+ --prefix PATH : ${pinentry}/bin \
+ --prefix PATH : ${parallel}/bin \
+ --prefix PATH : ${python3}/bin \
+ --set ENTER_FHS_ENV ${bindistPrepEnv}/bin/enter-fhs \
+ --set BASH ${bash}/bin/bash
+
+ makeWrapper ${./upload_ghc_libs.py} $out/bin/upload-ghc-libs
+ '';
+ };
+
+in
+ symlinkJoin {
+ name = "ghc-rel-eng";
+ preferLocalBuild = true;
+ paths = [
+ scripts
+ fetch-gitlab-artifacts
+ ];
+ }
diff --git a/.gitlab/rel_eng/fetch-gitlab-artifacts/.gitignore b/.gitlab/rel_eng/fetch-gitlab-artifacts/.gitignore
new file mode 100644
index 0000000000..1b01e3c7e9
--- /dev/null
+++ b/.gitlab/rel_eng/fetch-gitlab-artifacts/.gitignore
@@ -0,0 +1,3 @@
+result
+fetch-gitlab
+out
diff --git a/.gitlab/rel_eng/fetch-gitlab-artifacts/README.mkd b/.gitlab/rel_eng/fetch-gitlab-artifacts/README.mkd
new file mode 100644
index 0000000000..24d34ac4aa
--- /dev/null
+++ b/.gitlab/rel_eng/fetch-gitlab-artifacts/README.mkd
@@ -0,0 +1,23 @@
+# fetch-gitlab-artifacts
+
+This script is used to fetch and rename GHC binary distributions from GitLab
+Pipelines for upload to `downloads.haskell.org`.
+
+## Workflow
+
+1. [Configure]() a `python-gitlab` profile for <https://gitlab.haskell.org/>:
+ ```
+ $ cat > $HOME/.python-gitlab.cfg <<EOF
+ [haskell]
+ url = https://gitlab.haskell.org/
+ private_token = $PRIVATE_GITLAB_TOKEN
+ ssl_verify = true
+ api_version = 4
+ EOF
+ ```
+1. Push a release tag to ghc/ghc>
+1. Wait until the CI pipeline completes
+1. Run `fetch-gitlab -p $PIPELINE_ID -r $RELEASE_NAME` where `$PIPELINE_ID` is
+ the ID of the GitLab release pipeline and `$RELEASE_NAME` is the name of the
+ GHC release (e.g. `8.8.1` or `8.8.1-alpha1`)
+1. The binary distributions will be placed in the `out` directory. \ No newline at end of file
diff --git a/.gitlab/rel_eng/fetch-gitlab-artifacts/default.nix b/.gitlab/rel_eng/fetch-gitlab-artifacts/default.nix
new file mode 100644
index 0000000000..5f552b2bfe
--- /dev/null
+++ b/.gitlab/rel_eng/fetch-gitlab-artifacts/default.nix
@@ -0,0 +1,13 @@
+{ nix-gitignore, python3Packages, unzip }:
+
+let
+ fetch-gitlab = { buildPythonPackage, python-gitlab, unzip }:
+ buildPythonPackage {
+ pname = "fetch-gitlab";
+ version = "0.0.1";
+ src = nix-gitignore.gitignoreSource [] ./.;
+ propagatedBuildInputs = [ python3Packages.python-gitlab unzip ];
+ preferLocalBuild = true;
+ };
+in
+python3Packages.callPackage fetch-gitlab { inherit unzip; }
diff --git a/.gitlab/rel_eng/fetch-gitlab-artifacts/fetch_gitlab.py b/.gitlab/rel_eng/fetch-gitlab-artifacts/fetch_gitlab.py
new file mode 100644
index 0000000000..256a5cc5d4
--- /dev/null
+++ b/.gitlab/rel_eng/fetch-gitlab-artifacts/fetch_gitlab.py
@@ -0,0 +1,145 @@
+import logging
+from pathlib import Path
+import subprocess
+import gitlab
+import json
+
+logging.basicConfig(level=logging.INFO)
+
+def strip_prefix(s, prefix):
+ if s.startswith(prefix):
+ return s[len(prefix):]
+ else:
+ return None
+
+def job_triple(job_name):
+ bindists = {
+ 'release-x86_64-windows-release': 'x86_64-unknown-mingw32',
+ 'release-x86_64-windows-int_native-release': 'x86_64-unknown-mingw32-int_native',
+ 'release-x86_64-ubuntu20_04-release': 'x86_64-ubuntu20_04-linux',
+ 'release-x86_64-linux-fedora33-release+debug_info': 'x86_64-fedora33-linux-dwarf',
+ 'release-x86_64-linux-fedora33-release': 'x86_64-fedora33-linux',
+ 'release-x86_64-linux-fedora27-release': 'x86_64-fedora27-linux',
+ 'release-x86_64-linux-deb11-release': 'x86_64-deb11-linux',
+ 'release-x86_64-linux-deb10-release+debug_info': 'x86_64-deb10-linux-dwarf',
+ 'release-x86_64-linux-deb10-release': 'x86_64-deb10-linux',
+ 'release-x86_64-linux-deb9-release': 'x86_64-deb9-linux',
+ 'release-x86_64-linux-centos7-release': 'x86_64-centos7-linux',
+ 'release-x86_64-linux-alpine3_12-release+fully_static': 'x86_64-alpine3_12-linux-static',
+ 'release-x86_64-linux-alpine3_12-int_native-release+fully_static': 'x86_64-alpine3_12-linux-static-int_native',
+ 'release-x86_64-darwin-release': 'x86_64-apple-darwin',
+ 'release-i386-linux-deb9-release': 'i386-deb9-linux',
+ 'release-armv7-linux-deb10-release': 'armv7-deb10-linux',
+ 'release-aarch64-linux-deb10-release': 'aarch64-deb10-linux',
+ 'release-aarch64-darwin-release': 'aarch64-apple-darwin',
+
+ 'source-tarball': 'src',
+ 'package-hadrian-bootstrap-sources': 'hadrian-bootstrap-sources',
+ 'doc-tarball': 'docs',
+ 'hackage-doc-tarball': 'hackage_docs',
+ }
+
+ # Some bindists use the +no_split_sections transformer due to upstream
+ # toolchain bugs.
+ bindists.update({
+ f'{k}+no_split_sections': v
+ for k,v in bindists.items()
+ })
+
+ if job_name in bindists:
+ return bindists[job_name]
+ else:
+ #return strip_prefix(job.name, 'validate-')
+ return None
+
+def fetch_artifacts(release: str, pipeline_id: int,
+ dest_dir: Path, gl: gitlab.Gitlab):
+ dest_dir.mkdir(exist_ok=True)
+ # Write the pipeline id into output directory
+ with open(f"{dest_dir}/metadata.json", 'w') as out: json.dump({ "pipeline_id": pipeline_id }, out)
+
+ proj = gl.projects.get('ghc/ghc')
+ pipeline = proj.pipelines.get(pipeline_id)
+ tmpdir = Path("fetch-gitlab")
+ tmpdir.mkdir(exist_ok=True)
+ for pipeline_job in pipeline.jobs.list(all=True):
+ if len(pipeline_job.artifacts) == 0:
+ logging.info(f'job {pipeline_job.name} ({pipeline_job.id}) has no artifacts')
+ continue
+
+ job = proj.jobs.get(pipeline_job.id)
+ triple = job_triple(job.name)
+ if triple is None:
+ logging.info(f'ignoring {job.name}')
+ continue
+
+ #artifactZips = [ artifact
+ # for artifact in job.artifacts
+ # if artifact['filename'] == 'artifacts.zip' ]
+ try:
+ destdir = tmpdir / job.name
+ zip_name = Path(f"{tmpdir}/{job.name}.zip")
+ if not zip_name.exists() or zip_name.stat().st_size == 0:
+ logging.info(f'downloading archive {zip_name} for job {job.name} (job {job.id})...')
+ with open(zip_name, 'wb') as f:
+ job.artifacts(streamed=True, action=f.write)
+
+ if zip_name.stat().st_size == 0:
+ logging.info(f'artifact archive for job {job.name} (job {job.id}) is empty')
+ continue
+
+
+ subprocess.run(['unzip', '-bo', zip_name, '-d', destdir])
+ bindist_files = list(destdir.glob('ghc*.tar.xz'))
+
+ if job.name == 'source-tarball':
+ for f in bindist_files:
+ dest = dest_dir / f.name
+ logging.info(f'extracted {job.name} to {dest}')
+ f.replace(dest)
+ elif job.name == 'package-hadrian-bootstrap-sources':
+ all_bootstrap_sources = destdir / 'hadrian-bootstrap-sources-all.tar.gz'
+ dest = dest_dir / 'hadrian-bootstrap-sources'
+ dest.mkdir()
+ subprocess.run(['tar', '-xf', all_bootstrap_sources, '-C', dest])
+ logging.info(f'extracted {job.name}/{all_bootstrap_sources} to {dest}')
+ elif job.name == 'doc-tarball':
+ dest = dest_dir / 'docs'
+ dest.mkdir()
+ doc_files = list(destdir.glob('*.tar.xz'))
+ for f in doc_files:
+ subprocess.run(['tar', '-xf', f, '-C', dest])
+ logging.info(f'extracted docs {f} to {dest}')
+ index_path = destdir / 'index.html'
+ index_path.replace(dest / 'index.html')
+ elif job.name == 'hackage-doc-tarball':
+ dest = dest_dir / 'hackage_docs'
+ logging.info(f'moved hackage_docs to {dest}')
+ (destdir / 'hackage_docs').replace(dest)
+ else:
+ dest = dest_dir / f'ghc-{release}-{triple}.tar.xz'
+ if dest.exists():
+ logging.info(f'bindist {dest} already exists')
+ continue
+ if len(bindist_files) == 0:
+ logging.warn(f'Bindist does not exist')
+ continue
+ bindist = bindist_files[0]
+ logging.info(f'extracted {job.name} to {dest}')
+ bindist.replace(dest)
+ except Exception as e:
+ logging.error(f'Error fetching job {job.name}: {e}')
+ pass
+
+def main():
+ import argparse
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--pipeline', '-p', required=True, type=int, help="pipeline id")
+ parser.add_argument('--release', '-r', required=True, type=str, help="release name")
+ parser.add_argument('--output', '-o', type=Path, default=Path.cwd(), help="output directory")
+ parser.add_argument('--profile', '-P', default='haskell',
+ help='python-gitlab.cfg profile name')
+ args = parser.parse_args()
+ gl = gitlab.Gitlab.from_config(args.profile)
+ fetch_artifacts(args.release, args.pipeline,
+ dest_dir=args.output, gl=gl)
diff --git a/.gitlab/rel_eng/fetch-gitlab-artifacts/setup.py b/.gitlab/rel_eng/fetch-gitlab-artifacts/setup.py
new file mode 100644
index 0000000000..7a0bd1a53e
--- /dev/null
+++ b/.gitlab/rel_eng/fetch-gitlab-artifacts/setup.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+
+from distutils.core import setup
+
+setup(name='fetch-gitlab',
+ author='Ben Gamari',
+ author_email='ben@smart-cactus.org',
+ py_modules=['fetch_gitlab'],
+ entry_points={
+ 'console_scripts': [
+ 'fetch-gitlab=fetch_gitlab:main',
+ ]
+ }
+ )
diff --git a/.gitlab/rel_eng/nix/sources.json b/.gitlab/rel_eng/nix/sources.json
new file mode 100644
index 0000000000..7ff202a76f
--- /dev/null
+++ b/.gitlab/rel_eng/nix/sources.json
@@ -0,0 +1,68 @@
+{
+ "binutils-gdb": {
+ "branch": "master",
+ "repo": "https://sourceware.org/git/binutils-gdb.git",
+ "rev": "49c843e6d2d0577200e7c1d2d02855f21a3a9dde",
+ "type": "git"
+ },
+ "gdb-walkers": {
+ "branch": "master",
+ "description": "Bring mdb walkers to gdb, also add other helpful commands.",
+ "homepage": "",
+ "owner": "hardenedapple",
+ "repo": "gdb-walkers",
+ "rev": "c0701c4c87852bd09e21ca313c48dd4a649cfd0d",
+ "sha256": "1sd61a90lg8bkddl8lp15qady1wvbjmhjgm0d3lb813nwimlka9y",
+ "type": "tarball",
+ "url": "https://github.com/hardenedapple/gdb-walkers/archive/c0701c4c87852bd09e21ca313c48dd4a649cfd0d.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "niv": {
+ "branch": "master",
+ "description": "Easy dependency management for Nix projects",
+ "homepage": "https://github.com/nmattia/niv",
+ "owner": "nmattia",
+ "repo": "niv",
+ "rev": "82e5cd1ad3c387863f0545d7591512e76ab0fc41",
+ "sha256": "090l219mzc0gi33i3psgph6s2pwsc8qy4lyrqjdj4qzkvmaj65a7",
+ "type": "tarball",
+ "url": "https://github.com/nmattia/niv/archive/82e5cd1ad3c387863f0545d7591512e76ab0fc41.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "nixpkgs": {
+ "branch": "nixos-22.11",
+ "description": "Nix Packages collection",
+ "homepage": "",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "2d10e73416ec1449ef74aeac7faf2cf8c556ff5a",
+ "sha256": "00s89np0sqr3jxxp5h9nrpqy30fy4vsrmis6mmryrrmjqh09lpfv",
+ "type": "tarball",
+ "url": "https://github.com/NixOS/nixpkgs/archive/2d10e73416ec1449ef74aeac7faf2cf8c556ff5a.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "processor-trace": {
+ "branch": "master",
+ "description": "libipt - an Intel(R) Processor Trace decoder library",
+ "homepage": "",
+ "owner": "01org",
+ "repo": "processor-trace",
+ "rev": "c848a85c3104e2f5780741f85de5c9e65476ece2",
+ "sha256": "1ml8g6pm2brlcqp90yvgc780xf64d6k2km7fiqs88wvhlwsl7vzf",
+ "type": "tarball",
+ "url": "https://github.com/01org/processor-trace/archive/c848a85c3104e2f5780741f85de5c9e65476ece2.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "rr": {
+ "branch": "master",
+ "description": "Record and Replay Framework",
+ "homepage": "http://rr-project.org/",
+ "owner": "rr-debugger",
+ "repo": "rr",
+ "rev": "e77b5f8ca4b360daffd31cf72cb6b093fa9e0b62",
+ "sha256": "sha256:1gxphqcv1yw2ffmjp0d2cv0mpccr00pf9jhf44rq57jqdsvlfn2c",
+ "type": "tarball",
+ "url": "https://github.com/rr-debugger/rr/archive/3f87444659d1f063397fabc7791ed3b13b15c798.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ }
+}
diff --git a/.gitlab/rel_eng/nix/sources.nix b/.gitlab/rel_eng/nix/sources.nix
new file mode 100644
index 0000000000..9a01c8acfc
--- /dev/null
+++ b/.gitlab/rel_eng/nix/sources.nix
@@ -0,0 +1,194 @@
+# This file has been generated by Niv.
+
+let
+
+ #
+ # The fetchers. fetch_<type> fetches specs of type <type>.
+ #
+
+ fetch_file = pkgs: name: spec:
+ let
+ name' = sanitizeName name + "-src";
+ in
+ if spec.builtin or true then
+ builtins_fetchurl { inherit (spec) url sha256; name = name'; }
+ else
+ pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
+
+ fetch_tarball = pkgs: name: spec:
+ let
+ name' = sanitizeName name + "-src";
+ in
+ if spec.builtin or true then
+ builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
+ else
+ pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
+
+ fetch_git = name: spec:
+ let
+ ref =
+ if spec ? ref then spec.ref else
+ if spec ? branch then "refs/heads/${spec.branch}" else
+ if spec ? tag then "refs/tags/${spec.tag}" else
+ abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
+ submodules = if spec ? submodules then spec.submodules else false;
+ submoduleArg =
+ let
+ nixSupportsSubmodules = builtins.compareVersions builtins.nixVersion "2.4" >= 0;
+ emptyArgWithWarning =
+ if submodules == true
+ then
+ builtins.trace
+ (
+ "The niv input \"${name}\" uses submodules "
+ + "but your nix's (${builtins.nixVersion}) builtins.fetchGit "
+ + "does not support them"
+ )
+ {}
+ else {};
+ in
+ if nixSupportsSubmodules
+ then { inherit submodules; }
+ else emptyArgWithWarning;
+ in
+ builtins.fetchGit
+ ({ url = spec.repo; inherit (spec) rev; inherit ref; } // submoduleArg);
+
+ fetch_local = spec: spec.path;
+
+ fetch_builtin-tarball = name: throw
+ ''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
+ $ niv modify ${name} -a type=tarball -a builtin=true'';
+
+ fetch_builtin-url = name: throw
+ ''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
+ $ niv modify ${name} -a type=file -a builtin=true'';
+
+ #
+ # Various helpers
+ #
+
+ # https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
+ sanitizeName = name:
+ (
+ concatMapStrings (s: if builtins.isList s then "-" else s)
+ (
+ builtins.split "[^[:alnum:]+._?=-]+"
+ ((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
+ )
+ );
+
+ # The set of packages used when specs are fetched using non-builtins.
+ mkPkgs = sources: system:
+ let
+ sourcesNixpkgs =
+ import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
+ hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
+ hasThisAsNixpkgsPath = <nixpkgs> == ./.;
+ in
+ if builtins.hasAttr "nixpkgs" sources
+ then sourcesNixpkgs
+ else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
+ import <nixpkgs> {}
+ else
+ abort
+ ''
+ Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
+ add a package called "nixpkgs" to your sources.json.
+ '';
+
+ # The actual fetching function.
+ fetch = pkgs: name: spec:
+
+ if ! builtins.hasAttr "type" spec then
+ abort "ERROR: niv spec ${name} does not have a 'type' attribute"
+ else if spec.type == "file" then fetch_file pkgs name spec
+ else if spec.type == "tarball" then fetch_tarball pkgs name spec
+ else if spec.type == "git" then fetch_git name spec
+ else if spec.type == "local" then fetch_local spec
+ else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
+ else if spec.type == "builtin-url" then fetch_builtin-url name
+ else
+ abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
+
+ # If the environment variable NIV_OVERRIDE_${name} is set, then use
+ # the path directly as opposed to the fetched source.
+ replace = name: drv:
+ let
+ saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
+ ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
+ in
+ if ersatz == "" then drv else
+ # this turns the string into an actual Nix path (for both absolute and
+ # relative paths)
+ if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
+
+ # Ports of functions for older nix versions
+
+ # a Nix version of mapAttrs if the built-in doesn't exist
+ mapAttrs = builtins.mapAttrs or (
+ f: set: with builtins;
+ listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
+ );
+
+ # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
+ range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
+
+ # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
+ stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
+
+ # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
+ stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
+ concatMapStrings = f: list: concatStrings (map f list);
+ concatStrings = builtins.concatStringsSep "";
+
+ # https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
+ optionalAttrs = cond: as: if cond then as else {};
+
+ # fetchTarball version that is compatible between all the versions of Nix
+ builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
+ let
+ inherit (builtins) lessThan nixVersion fetchTarball;
+ in
+ if lessThan nixVersion "1.12" then
+ fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
+ else
+ fetchTarball attrs;
+
+ # fetchurl version that is compatible between all the versions of Nix
+ builtins_fetchurl = { url, name ? null, sha256 }@attrs:
+ let
+ inherit (builtins) lessThan nixVersion fetchurl;
+ in
+ if lessThan nixVersion "1.12" then
+ fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
+ else
+ fetchurl attrs;
+
+ # Create the final "sources" from the config
+ mkSources = config:
+ mapAttrs (
+ name: spec:
+ if builtins.hasAttr "outPath" spec
+ then abort
+ "The values in sources.json should not have an 'outPath' attribute"
+ else
+ spec // { outPath = replace name (fetch config.pkgs name spec); }
+ ) config.sources;
+
+ # The "config" used by the fetchers
+ mkConfig =
+ { sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
+ , sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
+ , system ? builtins.currentSystem
+ , pkgs ? mkPkgs sources system
+ }: rec {
+ # The sources, i.e. the attribute set of spec name to spec
+ inherit sources;
+
+ # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
+ inherit pkgs;
+ };
+
+in
+mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
diff --git a/.gitlab/rel_eng/upload.sh b/.gitlab/rel_eng/upload.sh
new file mode 100755
index 0000000000..6d1dfa1d53
--- /dev/null
+++ b/.gitlab/rel_eng/upload.sh
@@ -0,0 +1,250 @@
+#!/usr/bin/env bash
+
+set -e
+
+# This is a script for preparing and uploading a release of GHC.
+#
+# Usage,
+# 1. Update $ver
+# 2. Set $SIGNING_KEY to your key id (prefixed with '=')
+# 3. Create a directory and place the source and binary tarballs there
+# 4. Run this script from that directory
+#
+# You can also invoke the script with an argument to perform only
+# a subset of the usual release,
+#
+# upload.sh recompress produce lzip tarballs from xz tarballs
+#
+# upload.sh gen_hashes generate signed hashes of the release
+# tarballs
+#
+# upload.sh prepare_docs (deprecated) prepare the documentation directory
+# (this should be unecessary as the script which
+# fetches artifacts should create this folder from
+# the doc-tarball job)
+#
+# upload.sh upload_docs upload documentation to hackage from the hackage_docs folder
+#
+# upload.sh upload upload the tarballs and documentation
+# to downloads.haskell.org
+#
+# Prerequisites: moreutils
+
+if [ -z "$SIGNING_KEY" ]; then
+ SIGNING_KEY="=Benjamin Gamari <ben@well-typed.com>"
+fi
+
+
+# Infer release name from directory name
+if [ -z "$rel_name" ]; then
+ rel_name="$(basename $(pwd))"
+fi
+
+# Infer version from tarball names
+if [ -z "$ver" ]; then
+ ver="$(ls ghc-*.tar.* | sed -ne 's/ghc-\([0-9]\+\.[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).\+/\1/p' | head -n1)"
+ if [ -z "$ver" ]; then echo "Failed to infer \$ver"; exit 1; fi
+fi
+
+host="gitlab-storage.haskell.org"
+
+usage() {
+ echo "Usage: [rel_name=<name>] ver=7.10.3-rc2 $0 <action>"
+ echo
+ echo "where,"
+ echo " ver gives the version number (e.g. the name of the tarballs, in the case of"
+ echo " a release candidate something like 7.10.3.20150820, otherwise just 7.10.3)"
+ echo " rel_name gives the release name (e.g. in the case of a release candidate 7.10.3-rc2"
+ echo " otherwise just 7.10.3)"
+ echo "and <action> is one of,"
+ echo " [nothing] do everything below"
+ echo " recompress produce lzip and gzip tarballs from xz tarballs"
+ echo " gen_hashes generated hashes of the release tarballs"
+ echo " sign sign hashes of the release tarballs"
+ echo " prepare_docs prepare the documentation directory"
+ echo " upload_docs upload documentation downloads.haskell.org"
+ echo " upload upload the tarballs and documentation to downloads.haskell.org"
+ echo " purge_all purge entire release from the CDN"
+ echo " purge_file file purge a given file from the CDN"
+ echo " verify verify the signatures in this directory"
+ echo
+}
+
+if [ -z "$ver" ]; then
+ usage
+ exit 1
+fi
+if [ -z "$rel_name" ]; then
+ rel_name="$ver"
+fi
+
+# returns the set of files that must have hashes generated.
+function hash_files() {
+ echo $(find -maxdepth 1 \
+ -iname '*.xz' \
+ -o -iname '*.lz' \
+ -o -iname '*.bz2' \
+ -o -iname '*.zip' \
+ )
+ echo $(find -maxdepth 1 -iname '*.patch')
+}
+
+function gen_hashes() {
+ echo -n "Hashing..."
+ sha1sum $(hash_files) >| SHA1SUMS &
+ sha256sum $(hash_files) >| SHA256SUMS &
+ wait
+ echo "done"
+}
+
+function sign() {
+ # Kill DISPLAY lest pinentry won't work
+ DISPLAY=
+ eval "$(gpg-agent --daemon --sh --pinentry-program $(which pinentry))"
+ for i in $(hash_files) SHA1SUMS SHA256SUMS; do
+ if [ -e $i -a -e $i.sig -a $i.sig -nt $i ]; then
+ echo "Skipping signing of $i"
+ continue
+ elif [ -e $i.sig ] && gpg2 --verify $i.sig; then
+ # Don't resign if current signature is valid
+ touch $i.sig
+ continue
+ fi
+ echo "Signing $i"
+ rm -f $i.sig
+ gpg2 --use-agent --detach-sign --local-user="$SIGNING_KEY" $i
+ done
+}
+
+function verify() {
+ if [ $(find -iname '*.sig' | wc -l) -eq 0 ]; then
+ echo "No signatures to verify"
+ return
+ fi
+
+ for i in *.sig; do
+ echo
+ echo Verifying $i
+ gpg2 --verify $i $(basename $i .sig)
+ done
+}
+
+function upload() {
+ verify
+ chmod ugo+r,o-w -R .
+ dir=$(echo $rel_name | sed s/-release//)
+ lftp -c " \
+ open -u ghc: sftp://$host && \
+ mirror -P20 -c --reverse --exclude=fetch-gitlab --exclude=out . ghc/$dir && \
+ wait all;"
+ chmod ugo-w $(ls *.xz *.bz2 *.zip)
+}
+
+function purge_all() {
+ # Purge CDN cache
+ curl -X PURGE http://downloads.haskell.org/ghc/
+ curl -X PURGE http://downloads.haskell.org/~ghc/
+ curl -X PURGE http://downloads.haskell.org/ghc/$dir
+ curl -X PURGE http://downloads.haskell.org/ghc/$dir/
+ curl -X PURGE http://downloads.haskell.org/~ghc/$dir
+ curl -X PURGE http://downloads.haskell.org/~ghc/$dir/
+ for i in *; do
+ purge_file $i
+ done
+}
+
+function purge_file() {
+ curl -X PURGE http://downloads.haskell.org/~ghc/$rel_name/$i
+ curl -X PURGE http://downloads.haskell.org/~ghc/$rel_name/$i/
+ curl -X PURGE http://downloads.haskell.org/~ghc/$rel_name/$i/docs/
+ curl -X PURGE http://downloads.haskell.org/ghc/$rel_name/$i
+ curl -X PURGE http://downloads.haskell.org/ghc/$rel_name/$i/
+ curl -X PURGE http://downloads.haskell.org/ghc/$rel_name/$i/docs/
+}
+
+function prepare_docs() {
+ echo "THIS COMMAND IS DEPRECATED, THE DOCS FOLDER SHOULD BE PREPARED BY THE FETCH SCRIPT"
+ local tmp
+ rm -Rf docs
+ if [ -z "$GHC_TREE" ]; then
+ tmp="$(mktemp -d)"
+ tar -xf "ghc-$ver-src.tar.xz" -C "$tmp"
+ GHC_TREE="$tmp/ghc-$ver"
+ fi
+ mkdocs="$GHC_TREE/distrib/mkDocs/mkDocs"
+ if [ ! -e "$mkdocs" ]; then
+ echo "Couldn't find GHC mkDocs at $mkdocs."
+ echo "Perhaps you need to override GHC_TREE?"
+ rm -Rf "$tmp"
+ exit 1
+ fi
+ windows_bindist="$(ls ghc-$ver-x86_64-unknown-mingw32.tar.xz | head -n1)"
+ linux_bindist="$(ls ghc-$ver-x86_64-deb9-linux.tar.xz | head -n1)"
+ echo "Windows bindist: $windows_bindist"
+ echo "Linux bindist: $linux_bindist"
+ $ENTER_FHS_ENV $mkdocs $linux_bindist $windows_bindist
+ if [ -d "$tmp" ]; then rm -Rf "$tmp"; fi
+
+ mkdir -p docs/html
+ tar -Jxf "$linux_bindist"
+ cp -R "ghc-$ver/docs/users_guide/build-html/users_guide docs/html/users_guide"
+ #cp -R ghc-$ver/utils/haddock/doc/haddock docs/html/haddock
+ rm -R "ghc-$ver"
+
+ tar -Jxf docs/libraries.html.tar.xz -C docs/html
+ mv docs/index.html docs/html
+}
+
+function recompress() {
+ combine <(basename -s .xz *.xz) not <(basename -s .lz *.lz) | \
+ parallel 'echo "Recompressing {}.xz to {}.lz"; unxz -c {}.xz | lzip - -o {}.lz'
+
+ for darwin_bindist in $(ls ghc-*-darwin.tar.xz); do
+ local dest="$(basename $darwin_bindist .xz).bz2"
+ if [[ ! -f "$dest" ]]; then
+ echo "Recompressing Darwin bindist to bzip2..."
+ unxz -c "$darwin_bindist" | bzip2 > "$dest"
+ fi
+ done
+
+ for windows_bindist in $(ls ghc-*-mingw32*.tar.xz); do
+ local tmp="$(mktemp -d tmp.XXX)"
+ local dest="$(realpath $(basename $windows_bindist .tar.xz).zip)"
+ echo $dest
+ if [[ ! -f "$dest" ]]; then
+ echo "Recompressing Windows bindist to zip..."
+ tar -C "$tmp" -xf "$windows_bindist"
+ ls $tmp
+ (cd "$tmp"; zip -9 -r "$dest" *)
+ fi
+ rm -R "$tmp"
+ done
+}
+
+function upload_docs() {
+ local tmp="$(mktemp -d)"
+ tar -xf ghc-$ver-src.tar.xz -C "$tmp"
+ GHC_TREE="$tmp/ghc-$ver"
+ local args=$@
+ if [[ -n "$PUBLISH" ]]; then
+ echo "Publishing to Hackage..."
+ args+=( "--publish" )
+ fi
+ "$GHC_TREE/.gitlab/rel_eng/upload_ghc_libs.py" upload --docs=hackage_docs ${args[@]}
+}
+
+if [ "x$1" == "x" ]; then
+ recompress
+ gen_hashes
+ sign
+ if [ ! -d docs ]; then
+ prepare_docs || ( rm -R docs; exit 1 )
+ fi
+ if [ -d hackage_docs ]; then
+ upload_docs
+ fi
+ upload
+ purge_all
+else
+ $@
+fi
diff --git a/.gitlab/upload_ghc_libs.py b/.gitlab/rel_eng/upload_ghc_libs.py
index fdb1c55324..fdb1c55324 100755
--- a/.gitlab/upload_ghc_libs.py
+++ b/.gitlab/rel_eng/upload_ghc_libs.py