forked from alisw/alibuild
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathaliBuild
executable file
·698 lines (646 loc) · 30.1 KB
/
aliBuild
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
#!/usr/bin/env python
import os, sys, shutil, subprocess, logging, time, re, yaml, hashlib, argparse
from commands import getstatusoutput
from os.path import basename, dirname, abspath, exists, realpath, join
from os import makedirs, unlink, readlink
from glob import glob
from logging import debug,error
from datetime import datetime
def format(s, **kwds):
return s % kwds
def writeAll(fn, txt):
f = open(fn, "w")
f.write(txt)
f.close()
def star():
return basename(sys.argv[0]).replace("Build", "")
def gzip():
return getstatusoutput("which pigz")[0] and "gzip" or pigz
def execute(command, printer=debug):
popen = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
lines_iterator = iter(popen.stdout.readline, "")
for line in lines_iterator:
debug(line.strip("\n")) # yield line
output = popen.communicate()[0]
debug(output)
exitCode = popen.returncode
return exitCode
def dieOnError(error, msg):
if error:
print msg
sys.exit(1)
def updateReferenceRepos(referenceSources, p, spec):
# Update source reference area, if possible.
# If the area is already there and cannot be written, assume it maintained
# by someone else.
#
# If the area can be created, clone a bare repository with the sources.
debug("Updating references.")
referenceRepo = "%s/%s" % (abspath(referenceSources), p.lower())
if os.access(dirname(referenceSources), os.W_OK):
getstatusoutput("mkdir -p %s" % referenceSources)
writeableReference = os.access(referenceSources, os.W_OK)
if not writeableReference and exists(referenceRepo):
debug("Using %s as reference for %s." % (referenceRepo, p))
spec["reference"] = referenceRepo
return
if not writeableReference:
debug("Cannot create reference for %s in specified folder.", p)
return
err, out = getstatusoutput("mkdir -p %s" % abspath(referenceSources))
if not "source" in spec:
return
if not exists(referenceRepo):
cmd = ["git", "clone", "--bare", spec["source"], referenceRepo]
debug(" ".join(cmd))
err = execute(" ".join(cmd))
else:
err = execute(format("cd %(referenceRepo)s && "
"git fetch --tags %(source)s 2>&1 && "
"git fetch %(source)s 2>&1",
referenceRepo=referenceRepo,
source=spec["source"]))
dieOnError(err, "Error while updating reference repos %s." % spec["source"])
spec["reference"] = referenceRepo
def getDirectoryHash(d):
err, out = getstatusoutput("GIT_DIR=%s/.git git rev-parse HEAD" % d)
dieOnError(err, "Impossible to find reference for %s " % d)
return out
# Creates a directory in the store which contains symlinks to the package
# and its direct / indirect dependencies
def createDistLinks(spec, args, repoType, requiresType):
target = format("TARS/%(a)s/%(rp)s/%(p)s/%(p)s-%(v)s-%(r)s",
a=args.architecture,
rp=repoType,
p=spec["package"],
v=spec["version"],
r=spec["revision"])
shutil.rmtree(target, True)
for x in [spec["package"]] + list(spec[requiresType]):
dep = specs[x]
source = format("../../../../../TARS/%(a)s/store/%(sh)s/%(h)s/%(p)s-%(v)s-%(r)s.%(a)s.tar.gz",
a=args.architecture,
sh=dep["hash"][0:2],
h=dep["hash"],
p=dep["package"],
v=dep["version"],
r=dep["revision"])
err = execute(format("cd %(workDir)s &&"
"mkdir -p %(target)s &&"
"ln -sfn %(source)s %(target)s",
workDir = args.workDir,
target=target,
source=source))
rsyncOptions = ""
if args.writeStore:
cmd = format("cd %(w)s && "
"rsync -avR %(o)s --ignore-existing %(t)s/ %(rs)s/",
w=args.workDir,
rs=args.writeStore,
o=rsyncOptions,
t=target)
execute(cmd)
def filterByArchitecture(arch, requires):
for r in requires:
require, matcher = ":" in r and r.split(":", 1) or (r, ".*")
if re.match(matcher, arch):
yield require
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("action")
parser.add_argument("pkgname")
parser.add_argument("--config-dir", "-c", dest="configDir", default="%sdist" % star())
parser.add_argument("--devel", dest="devel", default=[])
parser.add_argument("--docker", dest="docker", action="store_true", default=False)
parser.add_argument("--work-dir", "-w", dest="workDir", default="sw")
parser.add_argument("--architecture", "-a", dest="architecture", required=True)
parser.add_argument("-e", dest="environment", action='append', default=[])
parser.add_argument("-v", dest="volumes", action='append', default=[],
help="Specify volumes to be used in Docker")
parser.add_argument("--jobs", "-j", dest="jobs", default=1)
parser.add_argument("--reference-sources", dest="referenceSources", default="sw/MIRROR")
parser.add_argument("--remote-store", dest="remoteStore", default="",
help="Where to find packages already built for reuse."
"Use ssh:// in front for remote store. End with ::rw if you want to upload.")
parser.add_argument("--write-store", dest="writeStore", default="",
help="Where to upload the built packages for reuse."
"Use ssh:// in front for remote store.")
parser.add_argument("--debug", "-d", dest="debug", action="store_true", default=False)
args = parser.parse_args()
logger = logging.getLogger()
logger_handler = logging.StreamHandler()
logger.addHandler(logger_handler)
if args.debug:
logger.setLevel(logging.DEBUG)
logger_handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
else:
logger.setLevel(logging.INFO)
# The docker image is given by the first part of the architecture we want to
# build for.
dockerImage = ""
if args.docker:
dockerImage = "alisw/%s-builder" % args.architecture.split("_")[0]
rsyncOptions = ""
if args.remoteStore.startswith("ssh://"):
args.remoteStore = args.remoteStore[6:]
if args.writeStore.startswith("ssh://"):
args.writeStore = args.writeStore[6:]
dieOnError(args.remoteStore.endswith("::rw") and args.writeStore,
"You cannot specify ::rw and --write-store at the same time")
if args.remoteStore.endswith("::rw"):
args.remoteStore = args.remoteStore[0:-4]
args.writeStore = args.remoteStore
if args.devel:
args.devel = args.devel.split(",")
print "Write store disabled since --devel option passed."
print "Development packages: %s" % " ".join(args.devel)
args.writeStore = ""
# Setup build environment.
if not args.action == "build":
parser.error("Action %s unsupported" % args.action)
packages = [args.pkgname]
specs = {}
buildOrder = []
workDir = abspath(args.workDir)
specDir = "%s/SPECS" % workDir
if not exists(specDir):
makedirs(specDir)
debug(format("Using %(star)sBuild from "
"%(star)sbuild@%(toolHash)s recipes "
"in %(star)sdist@%(distHash)s",
star=star(),
toolHash=getDirectoryHash(dirname(__file__)),
distHash=getDirectoryHash(args.configDir)))
while packages:
p = packages.pop(0)
if p in specs:
continue
try:
d = open("%s/%s.sh" % (args.configDir, p.lower())).read()
except IOError,e:
dieOnError(True, str(e))
header, recipe = d.split("---", 1)
spec = yaml.safe_load(header)
# For the moment we treat build_requires just as requires.
fn = lambda what: filterByArchitecture(args.architecture, spec.get(what, []))
spec["requires"] = [x for x in fn("requires")]
spec["build_requires"] = [x for x in fn("build_requires")]
spec["runtime_requires"] = spec["requires"]
spec["requires"] = spec["runtime_requires"] + spec["build_requires"]
# Check that version is a string
dieOnError(not isinstance(spec["version"], basestring),
"In recipe \"%s\": version must be a string" % p)
spec["tag"] = spec.get("tag", spec["version"])
spec["version"] = spec["version"].replace("/", "_")
spec["recipe"] = recipe.strip("\n")
specs[spec["package"]] = spec
packages += spec["requires"]
# Do topological sort to have the correct build order even in the
# case of non-tree like dependencies..
# The actual algorith used can be found at:
#
# http://www.stoimen.com/blog/2012/10/01/computer-algorithms-topological-sort-of-a-graph/
#
edges = [(p["package"], d) for p in specs.values() for d in p["requires"] ]
L = [l for l in specs.values() if not l["requires"]]
S = []
while L:
spec = L.pop(0)
S.append(spec)
nextVertex = [e[0] for e in edges if e[1] == spec["package"]]
edges = [e for e in edges if e[1] != spec["package"]]
hasPredecessors = set([m for e in edges for m in nextVertex if e[0] == m])
withPredecessor = set(nextVertex) - hasPredecessors
L += [specs[m] for m in withPredecessor]
buildOrder = [s["package"] for s in S]
debug("We will build packages in the following order: %s", " ".join(buildOrder))
# Date fields to substitute: they are zero-padded
now = datetime.now()
nowKwds = { "year": str(now.year),
"month": str(now.month).zfill(2),
"day": str(now.day).zfill(2),
"hour": str(now.hour).zfill(2) }
# Resolve the tag to the actual commit ref, so that
for p in buildOrder:
spec = specs[p]
spec["commit_hash"] = "0"
if "source" in spec:
# Replace source with local one if we are in development mode.
if spec["package"] in args.devel:
spec["source"] = join(os.getcwd(), spec["package"])
cmd = format("git ls-remote --heads %(source)s",
source = spec["source"])
err, out = getstatusoutput(cmd)
dieOnError(err, "Unable to fetch from %s" % spec["source"])
# Tag may contain date params like %(year)s, %(month)s, %(day)s, %(hour).
spec["tag"] = format(spec["tag"], **nowKwds)
# By default we assume tag is a commit hash.
spec["commit_hash"] = spec["tag"]
for l in out.split("\n"):
if l.endswith("refs/heads/%s" % spec["tag"]) or spec["package"] in args.devel:
spec["commit_hash"] = l.split("\t", 1)[0]
# We are in development mode, we need to rebuild if the commit hash
# is different and if there are extra changes on to.
if spec["package"] in args.devel:
cmd = format("cd %(source)s ; git diff -r HEAD | sha1sum",
source = spec["source"])
debug(cmd)
err, out = getstatusoutput(cmd)
debug(out)
dieOnError(err, "Unable to detect source code changes.")
spec["devel_hash"] = spec["commit_hash"] + out
spec["commit_hash"] = "0"
break
# Version may contain date params like tag, plus %(commit_hash)s,
# %(short_hash)s and %(tag)s.
spec["version"] = format(spec["version"],
commit_hash=spec["commit_hash"],
short_hash=spec["commit_hash"][0:10],
tag=spec["tag"],
tag_basename=basename(spec["tag"]),
**nowKwds)
# Decide what is the main package we are building and at what commit.
#
# We emit an event for the main package, when encountered, so that we can use
# it to index builds of the same hash on different architectures. We also
# make sure add the main package and it's hash to the debug log, so that we
# can always extract it from it.
# If one of the special packages is in the list of packages to be built,
# we use it as main package, rather than the last one.
mainPackage = buildOrder[-1]
MAIN_PACKAGES = ["aliroot", "aliphysics", "o2"]
hasMainPackages = [x for x in reversed(buildOrder)
if x.lower() in MAIN_PACKAGES]
if hasMainPackages:
mainPackage = hasMainPackages[-1]
mainHash = specs[mainPackage]["commit_hash"]
debug("Main package is %s@%s" % (mainPackage, mainHash))
if args.debug:
logger_handler.setFormatter(
logging.Formatter("%%(levelname)s:%s:%s: %%(message)s" %
(mainPackage, mainHash[0:8])))
# Now that we have the main package set, we can print out Useful information
# which we will be able to associate with this build.
for p in buildOrder:
spec = specs[p]
if "source" in spec:
debug("Commit hash for %s@%s is %s" % (spec["source"], spec["tag"], spec["commit_hash"]))
# Calculate the hashes. We do this in build order so that we can guarantee
# that the hashes of the dependencies are calculated first. Also notice that
# if the commit hash is a real hash, and not a tag, we can safely assume
# that's unique, and therefore we can avoid putting the repository or the
# name of the branch in the hash.
debug("Calculating hashes.")
for p in buildOrder:
spec = specs[p]
h = hashlib.sha1()
for x in ["recipe", "version", "package", "commit_hash",
"env", "append_path", "prepend_path"]:
h.update(str(spec.get(x, "none")))
if spec["commit_hash"] == spec.get("tag", "0"):
h.update(spec.get("source", "none"))
if "source" in spec:
h.update(spec["tag"])
for dep in spec.get("requires", []):
h.update(specs[dep]["hash"])
if bool(spec.get("force_rebuild", False)):
h.update(str(time.time()))
if spec["package"] in args.devel and "incremental_recipe" in spec:
h.update(spec["incremental_recipe"])
incremental_hash = hashlib.sha1(spec["incremental_recipe"]).hexdigest()
spec["incremental_hash"] = "INCREMENTAL_BUILD_HASH=%s" % incremental_hash
elif p in args.devel:
h.update(spec.get("devel_hash"))
spec["hash"] = h.hexdigest()
debug("Hash for recipe %s is %s" % (p, spec["hash"]))
# This adds to the spec where it should find, locally or remotely the
# various tarballs and links.
for p in buildOrder:
spec = specs[p]
pkgSpec = {
"repo": workDir,
"package": spec["package"],
"version": spec["version"],
"hash": spec["hash"],
"prefix": spec["hash"][0:2],
"architecture": args.architecture
}
tarSpecs = [
("storePath", "TARS/%(architecture)s/store/%(prefix)s/%(hash)s"),
("linksPath", "TARS/%(architecture)s/%(package)s"),
("tarballHashDir", "%(repo)s/TARS/%(architecture)s/store/%(prefix)s/%(hash)s"),
("tarballLinkDir", "%(repo)s/TARS/%(architecture)s/%(package)s")
]
spec.update(dict([(x, format(y, **pkgSpec)) for (x, y) in tarSpecs]))
# We recursively calculate the full set of requires "full_requires"
# including build_requires and the subset of them which are needed at
# runtime "full_runtime_requires".
for p in buildOrder:
spec = specs[p]
todo = [p]
spec["full_requires"] = []
spec["full_runtime_requires"] = []
while todo:
i = todo.pop(0)
requires = specs[i].get("requires", [])
runTimeRequires = specs[i].get("runtime_requires", [])
spec["full_requires"] += requires
spec["full_runtime_requires"] += runTimeRequires
todo += requires
spec["full_requires"] = set(spec["full_requires"])
spec["full_runtime_requires"] = set(spec["full_runtime_requires"])
# We now iterate on all the packages, making sure we build correctly every
# single one of them. This is done this way so that the second time we run we
# can check if the build was consistent and if it is, we bail out.
packageIterations = 0
while buildOrder:
packageIterations += 1
if packageIterations > 20:
print "Too many attempts at building %s. Something wrong with the repository?"
exit(1)
p = buildOrder[0]
spec = specs[p]
# Since we can execute this multiple times for a given package, in order to
# ensure consistency, we need to reset things and make them pristine.
spec.pop("revision", None)
debug("Updating from tarballs")
# If we arrived here it really means we have a tarball which was created
# using the same recipe. We will use it as a cache for the build. This means
# that while we will still perform the build process, rather than
# executing the build itself we will:
#
# - Unpack it in a temporary place.
# - Invoke the relocation specifying the correct work_dir and the
# correct path which should have been used.
# - Move the version directory to its final destination, including the
# correct revision.
# - Repack it and put it in the store with the
#
# this will result in a new package which has the same binary contents of
# the old one but where the relocation will work for the new dictory. Here
# we simply store the fact that we can reuse the contents of cachedTarball.
if args.remoteStore:
debug("Updating remote store for package %s@%s" % (p, spec["hash"]))
cmd = format("mkdir -p %(tarballHashDir)s\n"
"rsync -av %(rsyncOptions)s %(remoteStore)s/%(storePath)s/ %(tarballHashDir)s/ || true\n"
"rsync -av --delete %(rsyncOptions)s %(remoteStore)s/%(linksPath)s/ %(tarballLinkDir)s/ || true\n",
rsyncOptions=rsyncOptions,
remoteStore=args.remoteStore,
storePath=spec["storePath"],
linksPath=spec["linksPath"],
tarballHashDir=spec["tarballHashDir"],
tarballLinkDir=spec["tarballLinkDir"])
err = execute(cmd)
dieOnError(err, "Unable to update from specified store.")
# Decide how it should be called, based on the hash and what is already
# available.
debug("Checking for packages already built.")
linksGlob = format("%(w)s/TARS/%(a)s/%(p)s/%(p)s-%(v)s-*.%(a)s.tar.gz",
w=workDir,
a=args.architecture,
p=spec["package"],
v=spec["version"])
debug("Glob pattern used: %s" % linksGlob)
packages = glob(linksGlob)
# In case there is no installed software, revision is 1
# If there is already an installed package:
# - Remove it if we do not know its hash
# - Use the latest number in the version, to decide its revision
debug("Packages already built using this version\n%s" % "\n".join(packages))
busyRevisions = []
for d in packages:
realPath = readlink(d)
matcher = format("../../%(a)s/store/[0-9a-f]{2}/([0-9a-f]*)/%(p)s-%(v)s-([0-9]*).%(a)s.tar.gz",
a=args.architecture,
p=spec["package"],
v=spec["version"])
m = re.match(matcher, realPath)
if not m:
continue
h, revision = m.groups()
revision = int(revision)
# If we have an hash match, we use the old revision for the package
# and we do not need to build it.
if h == spec["hash"]:
spec["revision"] = revision
if spec["package"] in args.devel and "incremental_recipe" in spec:
spec["obsolete_tarball"] = d
else:
print "Package %s with hash %s is already found in %s. Not building." % (p, h, d)
break
else:
busyRevisions.append(revision)
if not "revision" in spec and busyRevisions:
spec["revision"] = min(set(range(1, max(busyRevisions)+2)) - set(busyRevisions))
elif not "revision" in spec:
spec["revision"] = "1"
# Now that we have all the information about the package we want to build, let's
# check if it wasn't built / unpacked already.
hashFile = "%s/%s/%s/%s-%s/.build-hash" % (workDir,
args.architecture,
spec["package"],
spec["version"],
spec["revision"])
try:
fileHash = open(hashFile).read().strip("\n")
except:
fileHash = "0"
if fileHash != spec["hash"]:
if fileHash != "0":
debug("Mismatch between local area and the one which I should build. Redoing.")
shutil.rmtree(dirname(hashFile), True)
else:
# If we get here, we know we are in sync with whatever remote store. We
# can therefore create a directory which contains all the packages which
# were used to compile this one.
debug("Package %s was correctly compiled. Moving to next one." % spec["package"])
# If using incremental builds, next time we execute the script we need to remove
# the placeholders which avoid rebuilds.
if spec["package"] in args.devel and "incremental_recipe" in spec:
unlink(hashFile)
if "obsolete_tarball" in spec:
unlink(realpath(spec["obsolete_tarball"]))
unlink(spec["obsolete_tarball"])
# We need to create 2 sets of links, once with the full requires,
# once with only direct dependencies, since that's required to
# register packages in Alien.
createDistLinks(spec, args, "dist", "full_requires")
createDistLinks(spec, args, "dist-direct", "requires")
createDistLinks(spec, args, "dist-runtime", "full_runtime_requires")
buildOrder.pop(0)
packageIterations = 0
continue
debug("Looking for cached tarball in %s" % spec["tarballHashDir"])
# FIXME: I should get the tarballHashDir updated with server at this point.
# It does not really matter that the symlinks are ok at this point
# as I only used the tarballs as reusable binary blobs.
spec["cachedTarball"] = ""
if not spec["package"] in args.devel:
spec["cachedTarball"] = (glob("%s/*" % spec["tarballHashDir"]) or [""])[0]
debug(spec["cachedTarball"] and
"Found tarball in %s" % spec["cachedTarball"] or
"No cache tarballs found")
# FIXME: Why doing it here? This should really be done before anything else.
updateReferenceRepos(args.referenceSources, p, spec)
# Generate the part which sources the environment for all the dependencies.
# Notice that we guarantee that a dependency is always sourced before the
# parts depending on it, but we do not guaranteed anything for the order in
# which unrelated components are activated.
dependencies = ""
dependenciesInit = ""
for dep in spec.get("requires", []):
depSpec = specs[dep]
depInfo = {
"architecture": args.architecture,
"package": dep,
"version": depSpec["version"],
"revision": depSpec["revision"],
"bigpackage": dep.upper().replace("-", "_")
}
dependencies += format("source \"$WORK_DIR/%(architecture)s/%(package)s/%(version)s-%(revision)s/etc/profile.d/init.sh\"\n",
**depInfo)
dependenciesInit += format('echo source \${WORK_DIR}/%(architecture)s/%(package)s/%(version)s-%(revision)s/etc/profile.d/init.sh >> \"$INSTALLROOT/etc/profile.d/init.sh\"\n',
**depInfo)
# Generate the part which creates the environment for the package.
# This can be either variable set via the "env" keyword in the metadata
# or paths which get appended via the "append_path" one.
# By default we append LD_LIBRARY_PATH, PATH and DYLD_LIBRARY_PATH
# FIXME: do not append variables for Mac on Linux.
environment = ""
dieOnError(not isinstance(spec.get("env", {}), dict),
"Tag `env' in %s should be a dict." % p)
for key,value in spec.get("env", {}).iteritems():
environment += format("echo 'export %(key)s=%(value)s' >> $INSTALLROOT/etc/profile.d/init.sh\n",
key=key,
value=value)
basePath = "%s_ROOT" % p.upper().replace("-", "_")
pathDict = spec.get("append_path", {})
dieOnError(not isinstance(pathDict, dict),
"Tag `append_path' in %s should be a dict." % p)
for pathName,pathVal in pathDict.iteritems():
pathVal = isinstance(pathVal, list) and pathVal or [ pathVal ]
environment += format("echo 'export %(key)s=%(value)s:$%(key)s' >> \"$INSTALLROOT/etc/profile.d/init.sh\"\n",
key=pathName,
value=":".join(pathVal))
# Same thing, but prepending the results so that they win against system ones.
defaultPrependPaths = { "LD_LIBRARY_PATH": "$%s/lib" % basePath,
"DYLD_LIBRARY_PATH": "$%s/lib" % basePath,
"PATH": "$%s/bin" % basePath }
pathDict = spec.get("prepend_path", {})
dieOnError(not isinstance(pathDict, dict),
"Tag `prepend_path' in %s should be a dict." % p)
for pathName,pathVal in pathDict.iteritems():
pathDict[pathName] = isinstance(pathVal, list) and pathVal or [ pathVal ]
for pathName,pathVal in defaultPrependPaths.iteritems():
pathDict[pathName] = [ pathVal ] + pathDict.get(pathName, [])
for pathName,pathVal in pathDict.iteritems():
environment += format("echo 'export %(key)s=%(value)s:$%(key)s' >> \"$INSTALLROOT/etc/profile.d/init.sh\"\n",
key=pathName,
value=":".join(pathVal))
# The actual build script.
referenceStatement = ""
if "reference" in spec:
referenceStatement = "export GIT_REFERENCE=${GIT_REFERENCE_OVERRIDE:-%s}/%s" % (dirname(spec["reference"]), basename(spec["reference"]))
debug(spec)
fp = open(dirname(realpath(__file__))+'/build_template.sh', 'r')
cmd_raw = fp.read()
fp.close()
source = spec.get("source", "")
# Shortend the commit hash in case it's a real commit hash and not simply
# the tag.
commit_hash = spec["commit_hash"]
if spec["tag"] != spec["commit_hash"]:
commit_hash = spec["commit_hash"][0:10]
# Split the source in two parts, sourceDir and sourceName. This is done so
# that when we use Docker we can replace sourceDir with the correct
# container path, if required. No changes for what concerns the standard
# bash builds, though.
cmd = format(cmd_raw,
dependencies=dependencies,
dependenciesInit=dependenciesInit,
environment=environment,
workDir=workDir,
configDir=abspath(args.configDir),
pkgname=spec["package"],
hash=spec["hash"],
incremental_hash=spec.get("incremental_hash", ""),
incremental_recipe=spec.get("incremental_recipe", ":"),
version=spec["version"],
revision=spec["revision"],
architecture=args.architecture,
jobs=args.jobs,
sourceDir=source and (dirname(source) + "/") or "",
sourceName=source and basename(source) or "",
write_repo=spec.get("write_repo", source),
tag=spec["tag"],
commit_hash=commit_hash,
referenceStatement=referenceStatement,
cachedTarball=spec["cachedTarball"],
gzip=gzip())
scriptDir = "%s/SPECS/%s/%s/%s-%s" % (workDir,
args.architecture,
spec["package"],
spec["version"],
spec["revision"])
err, out = getstatusoutput("mkdir -p %s" % scriptDir)
writeAll("%s/build.sh" % scriptDir, cmd)
writeAll("%s/%s.sh" % (scriptDir, spec["package"]), spec["recipe"])
print "Building %s@%s" % (spec["package"], spec["version"])
# In case the --docker options is passed, we setup a docker container which
# will perform the actual build. Otherwise build as usual using bash.
if dockerImage:
additionalEnv = ""
additionalVolumes = ""
develVolumes = ""
mirrorVolume = "reference" in spec and " -v %s:/mirror" % dirname(spec["reference"]) or ""
overrideSource = source.startswith("/") and "-e SOURCE0_DIR_OVERRIDE=/" or ""
for devel in args.devel:
develVolumes += "-v $PWD/`readlink %s || echo %s`:/%s:ro" % (devel, devel, devel)
for env in args.environment:
additionalEnv = "-e %s" % env
for volume in args.volumes:
additionalVolumes = "-v %s" % volume
dockerWrapper = format("docker run --rm -it"
" -v %(workdir)s:/sw"
" -v %(scriptDir)s/build.sh:/build.sh:ro"
" %(mirrorVolume)s"
" %(develVolumes)s"
" %(additionalEnv)s"
" %(additionalVolumes)s"
" -e ARCHITECTURE=%(architecture)s"
" -e GIT_REFERENCE_OVERRIDE=/mirror"
" -e JOBS=%(jobs)s"
" %(overrideSource)s"
" -e WORK_DIR_OVERRIDE=/sw"
" %(image)s"
" /bin/bash -e -x /build.sh",
additionalEnv=additionalEnv,
additionalVolumes=additionalVolumes,
architecture=args.architecture,
develVolumes=develVolumes,
workdir=abspath(args.workDir),
image=dockerImage,
mirrorVolume=mirrorVolume,
jobs=args.jobs,
overrideSource=overrideSource,
scriptDir=scriptDir)
debug(dockerWrapper)
err = execute(dockerWrapper)
else:
err = execute("/bin/bash -e -x %s/build.sh 2>&1" % scriptDir)
dieOnError(err, "Error while executing %s/build.sh" % scriptDir)
if args.writeStore:
tarballNameWithRev = format("%(package)s-%(version)s-%(revision)s.%(architecture)s.tar.gz",
architecture=args.architecture,
**spec)
cmd = format("cd %(workdir)s && "
"rsync -avR %(rsyncOptions)s --ignore-existing %(storePath)s/%(tarballNameWithRev)s %(remoteStore)s/ &&"
"rsync -avR %(rsyncOptions)s --ignore-existing %(linksPath)s/%(tarballNameWithRev)s %(remoteStore)s/",
workdir=args.workDir,
remoteStore=args.remoteStore,
rsyncOptions=rsyncOptions,
storePath=spec["storePath"],
linksPath=spec["linksPath"],
tarballNameWithRev=tarballNameWithRev)
err = execute(cmd)
dieOnError(err, "Unable to upload tarball.")