1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
|
#!/usr/bin/env python3
import glob
import os
import os.path
import sys
import argparse
from textwrap import dedent
import subprocess
import re
cwd = os.getcwd()
parser = argparse.ArgumentParser()
parser.add_argument('--validate', action='store_true', help='Run in validate mode')
parser.add_argument('--hadrian', action='store_true', help='Do not assume the make base build system')
args = parser.parse_args()
# Packages whose libraries aren't in the submodule root
EXCEPTIONS = {
'libraries/containers/': 'libraries/containers/containers/'
}
def print_err(s):
print(dedent(s), file=sys.stderr)
def die(mesg):
print_err(mesg)
sys.exit(1)
def check_for_url_rewrites():
if os.path.isdir('.git') and \
subprocess.check_output('git config remote.origin.url'.split()).find(b'github.com') != -1 and \
subprocess.call(['git', 'config', '--get-regexp', '^url.*github.com/.*/packages-.insteadOf']) != 0:
# If we cloned from github, make sure the url rewrites are set.
# Otherwise 'git submodule update --init' prints confusing errors.
die("""\
It seems you cloned this repository from GitHub. But your git config files
don't contain the url rewrites that are needed to make this work (GitHub
doesn't support '/' in repository names, so we use a different naming scheme
for the submodule repositories there).
Please run the following commands first:
git config --global url."git://github.com/ghc/packages-".insteadOf git://github.com/ghc/packages/
git config --global url."http://github.com/ghc/packages-".insteadOf http://github.com/ghc/packages/
git config --global url."https://github.com/ghc/packages-".insteadOf https://github.com/ghc/packages/
git config --global url."ssh://git\@github.com/ghc/packages-".insteadOf ssh://git\@github.com/ghc/packages/
git config --global url."git\@github.com:/ghc/packages-".insteadOf git\@github.com:/ghc/packages/
And then:
git submodule update --init
./boot
Or start over, and clone the GHC repository from the haskell server:
git clone --recursive git@gitlab.haskell.org:ghc/ghc.git
For more information, see:
* https://gitlab.haskell.org/ghc/ghc/wikis/newcomers or
* https://gitlab.haskell.org/ghc/ghc/wikis/building/getting-the-sources#cloning-from-github
""")
def check_boot_packages():
# Check that we have all boot packages.
import re
for l in open('packages', 'r'):
if l.startswith('#'):
continue
parts = [part for part in l.split(' ') if part]
if len(parts) != 4:
die("Error: Bad line in packages file: " + l)
dir_ = parts[0]
tag = parts[1]
# If tag is not "-" then it is an optional repository, so its
# absence isn't an error.
if tag == '-':
# We would like to just check for a .git directory here,
# but in an lndir tree we avoid making .git directories,
# so it doesn't exist. We therefore require that every repo
# has a LICENSE file instead.
license_path = os.path.join(EXCEPTIONS.get(dir_+'/', dir_), 'LICENSE')
if not os.path.isfile(license_path):
die("""\
Error: %s doesn't exist
Maybe you haven't run 'git submodule update --init'?
""" % license_path)
# Create libraries/*/{ghc.mk,GNUmakefile}
def boot_pkgs():
library_dirs = []
for package in glob.glob("libraries/*/"):
packages_file = os.path.join(package, 'ghc-packages')
print(package)
if os.path.isfile(packages_file):
for subpkg in open(packages_file, 'r'):
library_dirs.append(os.path.join(package, subpkg.strip()))
elif package in EXCEPTIONS:
library_dirs.append(EXCEPTIONS[package])
else:
library_dirs.append(package)
for package in library_dirs:
if package[-1] == '/':
# drop trailing '/'
package = package[:-1]
dir_ = os.path.relpath(package, 'libraries')
cabals = glob.glob(os.path.join(package, '*.cabal.in'))
if len(cabals) == 0:
cabals = glob.glob(os.path.join(package, '*.cabal'))
if len(cabals) > 1:
die('Too many .cabal files in %s' % package)
elif len(cabals) == 1:
cabal = cabals[0]
if os.path.isfile(cabal):
# strip both .cabal and .in
pkg = os.path.splitext(os.path.splitext(os.path.basename(cabal))[0])[0]
top = os.path.join(*['..'] * len(os.path.normpath(package).split(os.path.sep)))
ghc_mk = os.path.join(package, 'ghc.mk')
if os.path.exists(ghc_mk):
print('Skipping %s which already exists' % ghc_mk)
continue
print('Creating %s' % ghc_mk)
with open(ghc_mk, 'w') as f:
f.write(dedent(
"""\
{package}_PACKAGE = {pkg}
{package}_dist-install_GROUP = libraries
$(if $(filter {dir},$(PACKAGES_STAGE0)),$(eval $(call build-package,{package},dist-boot,0)))
$(if $(filter {dir},$(PACKAGES_STAGE1)),$(eval $(call build-package,{package},dist-install,1)))
$(if $(filter {dir},$(PACKAGES_STAGE2)),$(eval $(call build-package,{package},dist-install,2)))
""".format(package = package,
pkg = pkg,
dir = dir_)))
makefile = os.path.join(package, 'GNUmakefile')
with open(makefile, 'w') as f:
f.write(dedent(
"""\
dir = {package}
TOP = {top}
include $(TOP)/mk/sub-makefile.mk
FAST_MAKE_OPTS += stage=0
""".format(package = package, top = top)
))
def autoreconf():
# Run autoreconf on everything that needs it.
processes = {}
if os.name == 'nt':
# Get the normalized ACLOCAL_PATH for Windows
# This is necessary since on Windows this will be a Windows
# path, which autoreconf doesn't know doesn't know how to handle.
ac_local = os.getenv('ACLOCAL_PATH', '')
ac_local_arg = re.sub(r';', r':', ac_local)
ac_local_arg = re.sub(r'\\', r'/', ac_local_arg)
ac_local_arg = re.sub(r'(\w):/', r'/\1/', ac_local_arg)
reconf_cmd = 'ACLOCAL_PATH=%s autoreconf' % ac_local_arg
else:
reconf_cmd = 'autoreconf'
for dir_ in ['.'] + glob.glob('libraries/*/'):
if os.path.isfile(os.path.join(dir_, 'configure.ac')):
print("Booting %s" % dir_)
processes[dir_] = subprocess.Popen(['sh', '-c', reconf_cmd], cwd=dir_)
# Wait for all child processes to finish.
fail = False
for k,v in processes.items():
code = v.wait()
if code != 0:
print_err('autoreconf in %s failed with exit code %d' % (k, code))
fail = True
if fail:
sys.exit(1)
def check_build_mk():
if not args.validate and not os.path.isfile("mk/build.mk"):
print(dedent(
"""
WARNING: You don't have a mk/build.mk file.
By default a standard GHC build will be done, which uses optimisation
and builds the profiling libraries. This will take a long time, so may
not be what you want if you are developing GHC or the libraries, rather
than simply building it to use it.
For information on creating a mk/build.mk file, please see:
https://gitlab.haskell.org/ghc/ghc/wikis/building/using#build-configuration
"""))
check_for_url_rewrites()
check_boot_packages()
if not args.hadrian:
boot_pkgs()
autoreconf()
if not args.hadrian:
check_build_mk()
|