2011-02-21 20:23:22 +01:00
|
|
|
#!/usr/bin/python3
|
|
|
|
##===- utils/llvmbuild - Build the LLVM project ----------------*-python-*-===##
|
|
|
|
#
|
|
|
|
# The LLVM Compiler Infrastructure
|
|
|
|
#
|
|
|
|
# This file is distributed under the University of Illinois Open Source
|
|
|
|
# License. See LICENSE.TXT for details.
|
|
|
|
#
|
|
|
|
##===----------------------------------------------------------------------===##
|
|
|
|
#
|
|
|
|
# This script builds many different flavors of the LLVM ecosystem. It
|
2012-01-28 00:01:35 +01:00
|
|
|
# will build LLVM, Clang and dragonegg as well as run tests on them.
|
|
|
|
# This script is convenient to use to check builds and tests before
|
|
|
|
# committing changes to the upstream repository
|
2011-02-21 20:23:22 +01:00
|
|
|
#
|
|
|
|
# A typical source setup uses three trees and looks like this:
|
|
|
|
#
|
|
|
|
# official
|
|
|
|
# dragonegg
|
|
|
|
# llvm
|
2012-01-28 00:01:35 +01:00
|
|
|
# tools
|
|
|
|
# clang
|
2011-02-21 20:23:22 +01:00
|
|
|
# staging
|
|
|
|
# dragonegg
|
|
|
|
# llvm
|
2012-01-28 00:01:35 +01:00
|
|
|
# tools
|
|
|
|
# clang
|
2011-02-21 20:23:22 +01:00
|
|
|
# commit
|
|
|
|
# dragonegg
|
|
|
|
# llvm
|
2012-01-28 00:01:35 +01:00
|
|
|
# tools
|
|
|
|
# clang
|
2011-02-21 20:23:22 +01:00
|
|
|
#
|
|
|
|
# In a typical workflow, the "official" tree always contains unchanged
|
|
|
|
# sources from the main LLVM project repositories. The "staging" tree
|
|
|
|
# is where local work is done. A set of changes resides there waiting
|
|
|
|
# to be moved upstream. The "commit" tree is where changes from
|
|
|
|
# "staging" make their way upstream. Individual incremental changes
|
|
|
|
# from "staging" are applied to "commit" and committed upstream after
|
|
|
|
# a successful build and test run. A successful build is one in which
|
|
|
|
# testing results in no more failures than seen in the testing of the
|
|
|
|
# "official" tree.
|
|
|
|
#
|
|
|
|
# A build may be invoked as such:
|
|
|
|
#
|
2012-01-28 00:01:35 +01:00
|
|
|
# llvmbuild --src=~/llvm/commit --src=~/llvm/staging --src=~/llvm/official
|
2011-02-21 20:23:22 +01:00
|
|
|
# --build=debug --build=release --build=paranoid
|
|
|
|
# --prefix=/home/greened/install --builddir=/home/greened/build
|
|
|
|
#
|
2012-01-28 00:01:35 +01:00
|
|
|
# This will build the LLVM ecosystem, including LLVM, Clangand
|
|
|
|
# dragonegg, putting build results in ~/build and installing tools in
|
|
|
|
# ~/install. llvm-compilers-check creates separate build and install
|
|
|
|
# directories for each source/build flavor. In the above example,
|
|
|
|
# llvmbuild will build debug, release and paranoid (debug+checks)
|
|
|
|
# flavors from each source tree (official, staging and commit) for a
|
|
|
|
# total of nine builds. All builds will be run in parallel.
|
2011-02-21 20:23:22 +01:00
|
|
|
#
|
|
|
|
# The user may control parallelism via the --jobs and --threads
|
2012-01-28 00:01:35 +01:00
|
|
|
# switches. --jobs tells llvm-compilers-checl the maximum total
|
|
|
|
# number of builds to activate in parallel. The user may think of it
|
|
|
|
# as equivalent to the GNU make -j switch. --threads tells
|
|
|
|
# llvm-compilers-check how many worker threads to use to accomplish
|
|
|
|
# those builds. If --threads is less than --jobs, --threads workers
|
|
|
|
# will be launched and each one will pick a source/flavor combination
|
|
|
|
# to build. Then llvm-compilers-check will invoke GNU make with -j
|
|
|
|
# (--jobs / --threads) to use up the remaining job capacity. Once a
|
|
|
|
# worker is finished with a build, it will pick another combination
|
|
|
|
# off the list and start building it.
|
2011-02-21 20:23:22 +01:00
|
|
|
#
|
|
|
|
##===----------------------------------------------------------------------===##
|
|
|
|
|
|
|
|
import optparse
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import threading
|
|
|
|
import queue
|
|
|
|
import logging
|
|
|
|
import traceback
|
|
|
|
import subprocess
|
|
|
|
import re
|
|
|
|
|
|
|
|
# TODO: Use shutil.which when it is available (3.2 or later)
|
|
|
|
def find_executable(executable, path=None):
|
|
|
|
"""Try to find 'executable' in the directories listed in 'path' (a
|
|
|
|
string listing directories separated by 'os.pathsep'; defaults to
|
|
|
|
os.environ['PATH']). Returns the complete filename or None if not
|
|
|
|
found
|
|
|
|
"""
|
|
|
|
if path is None:
|
|
|
|
path = os.environ['PATH']
|
|
|
|
paths = path.split(os.pathsep)
|
|
|
|
extlist = ['']
|
|
|
|
if os.name == 'os2':
|
|
|
|
(base, ext) = os.path.splitext(executable)
|
|
|
|
# executable files on OS/2 can have an arbitrary extension, but
|
|
|
|
# .exe is automatically appended if no dot is present in the name
|
|
|
|
if not ext:
|
|
|
|
executable = executable + ".exe"
|
|
|
|
elif sys.platform == 'win32':
|
|
|
|
pathext = os.environ['PATHEXT'].lower().split(os.pathsep)
|
|
|
|
(base, ext) = os.path.splitext(executable)
|
|
|
|
if ext.lower() not in pathext:
|
|
|
|
extlist = pathext
|
|
|
|
for ext in extlist:
|
|
|
|
execname = executable + ext
|
|
|
|
if os.path.isfile(execname):
|
|
|
|
return execname
|
|
|
|
else:
|
|
|
|
for p in paths:
|
|
|
|
f = os.path.join(p, execname)
|
|
|
|
if os.path.isfile(f):
|
|
|
|
return f
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def is_executable(fpath):
|
|
|
|
return os.path.exists(fpath) and os.access(fpath, os.X_OK)
|
|
|
|
|
|
|
|
def add_options(parser):
|
|
|
|
parser.add_option("-v", "--verbose", action="store_true",
|
|
|
|
default=False,
|
|
|
|
help=("Output informational messages"
|
|
|
|
" [default: %default]"))
|
|
|
|
parser.add_option("--src", action="append",
|
|
|
|
help=("Top-level source directory [default: %default]"))
|
2011-02-25 21:51:27 +01:00
|
|
|
parser.add_option("--build", action="append",
|
2011-02-21 20:23:22 +01:00
|
|
|
help=("Build types to run [default: %default]"))
|
|
|
|
parser.add_option("--cc", default=find_executable("cc"),
|
|
|
|
help=("The C compiler to use [default: %default]"))
|
|
|
|
parser.add_option("--cxx", default=find_executable("c++"),
|
|
|
|
help=("The C++ compiler to use [default: %default]"))
|
|
|
|
parser.add_option("--threads", default=4, type="int",
|
|
|
|
help=("The number of worker threads to use "
|
|
|
|
"[default: %default]"))
|
|
|
|
parser.add_option("--jobs", "-j", default=8, type="int",
|
|
|
|
help=("The number of simultaneous build jobs "
|
|
|
|
"[default: %default]"))
|
|
|
|
parser.add_option("--prefix",
|
|
|
|
help=("Root install directory [default: %default]"))
|
|
|
|
parser.add_option("--builddir",
|
|
|
|
help=("Root build directory [default: %default]"))
|
2011-02-25 21:51:27 +01:00
|
|
|
parser.add_option("--extra-llvm-config-flags", default="",
|
|
|
|
help=("Extra flags to pass to llvm configure [default: %default]"))
|
|
|
|
parser.add_option("--force-configure", default=False, action="store_true",
|
|
|
|
help=("Force reconfigure of all components"))
|
2012-01-28 00:01:35 +01:00
|
|
|
parser.add_option("--no-dragonegg", default=False, action="store_true",
|
|
|
|
help=("Do not build dragonegg"))
|
2011-10-14 21:12:34 +02:00
|
|
|
parser.add_option("--no-install", default=False, action="store_true",
|
|
|
|
help=("Do not do installs"))
|
2011-02-21 20:23:22 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
def check_options(parser, options, valid_builds):
|
|
|
|
# See if we're building valid flavors.
|
|
|
|
for build in options.build:
|
|
|
|
if (build not in valid_builds):
|
|
|
|
parser.error("'" + build + "' is not a valid build flavor "
|
|
|
|
+ str(valid_builds))
|
|
|
|
|
|
|
|
# See if we can find source directories.
|
|
|
|
for src in options.src:
|
2011-07-06 18:54:14 +02:00
|
|
|
for component in components:
|
|
|
|
component = component.rstrip("2")
|
2011-02-21 20:23:22 +01:00
|
|
|
compsrc = src + "/" + component
|
|
|
|
if (not os.path.isdir(compsrc)):
|
|
|
|
parser.error("'" + compsrc + "' does not exist")
|
|
|
|
|
|
|
|
# See if we can find the compilers
|
|
|
|
options.cc = find_executable(options.cc)
|
|
|
|
options.cxx = find_executable(options.cxx)
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
# Find a unique short name for the given set of paths. This searches
|
|
|
|
# back through path components until it finds unique component names
|
|
|
|
# among all given paths.
|
|
|
|
def get_path_abbrevs(paths):
|
|
|
|
# Find the number of common starting characters in the last component
|
|
|
|
# of the paths.
|
|
|
|
unique_paths = list(paths)
|
|
|
|
|
|
|
|
class NotFoundException(Exception): pass
|
|
|
|
|
|
|
|
# Find a unique component of each path.
|
|
|
|
unique_bases = unique_paths[:]
|
|
|
|
found = 0
|
|
|
|
while len(unique_paths) > 0:
|
|
|
|
bases = [os.path.basename(src) for src in unique_paths]
|
|
|
|
components = { c for c in bases }
|
|
|
|
# Account for single entry in paths.
|
|
|
|
if len(components) > 1 or len(components) == len(bases):
|
|
|
|
# We found something unique.
|
|
|
|
for c in components:
|
|
|
|
if bases.count(c) == 1:
|
|
|
|
index = bases.index(c)
|
|
|
|
unique_bases[index] = c
|
|
|
|
# Remove the corresponding path from the set under
|
|
|
|
# consideration.
|
|
|
|
unique_paths[index] = None
|
|
|
|
unique_paths = [ p for p in unique_paths if p is not None ]
|
|
|
|
unique_paths = [os.path.dirname(src) for src in unique_paths]
|
|
|
|
|
|
|
|
if len(unique_paths) > 0:
|
|
|
|
raise NotFoundException()
|
|
|
|
|
|
|
|
abbrevs = dict(zip(paths, [base for base in unique_bases]))
|
|
|
|
|
|
|
|
return abbrevs
|
|
|
|
|
|
|
|
# Given a set of unique names, find a short character sequence that
|
|
|
|
# uniquely identifies them.
|
|
|
|
def get_short_abbrevs(unique_bases):
|
|
|
|
# Find a unique start character for each path base.
|
|
|
|
my_unique_bases = unique_bases[:]
|
|
|
|
unique_char_starts = unique_bases[:]
|
|
|
|
while len(my_unique_bases) > 0:
|
|
|
|
for start, char_tuple in enumerate(zip(*[base
|
|
|
|
for base in my_unique_bases])):
|
|
|
|
chars = { c for c in char_tuple }
|
|
|
|
# Account for single path.
|
|
|
|
if len(chars) > 1 or len(chars) == len(char_tuple):
|
|
|
|
# We found something unique.
|
|
|
|
for c in chars:
|
|
|
|
if char_tuple.count(c) == 1:
|
|
|
|
index = char_tuple.index(c)
|
|
|
|
unique_char_starts[index] = start
|
|
|
|
# Remove the corresponding path from the set under
|
|
|
|
# consideration.
|
|
|
|
my_unique_bases[index] = None
|
|
|
|
my_unique_bases = [ b for b in my_unique_bases
|
|
|
|
if b is not None ]
|
|
|
|
break
|
|
|
|
|
|
|
|
if len(my_unique_bases) > 0:
|
|
|
|
raise NotFoundException()
|
|
|
|
|
|
|
|
abbrevs = [abbrev[start_index:start_index+3]
|
|
|
|
for abbrev, start_index
|
|
|
|
in zip([base for base in unique_bases],
|
|
|
|
[index for index in unique_char_starts])]
|
|
|
|
|
|
|
|
abbrevs = dict(zip(unique_bases, abbrevs))
|
|
|
|
|
|
|
|
return abbrevs
|
|
|
|
|
|
|
|
class Builder(threading.Thread):
|
|
|
|
class ExecutableNotFound(Exception): pass
|
|
|
|
class FileNotExecutable(Exception): pass
|
|
|
|
|
2011-02-25 21:51:27 +01:00
|
|
|
def __init__(self, work_queue, jobs,
|
2012-01-28 00:01:35 +01:00
|
|
|
build_abbrev, source_abbrev,
|
2011-02-25 21:51:27 +01:00
|
|
|
options):
|
2011-02-21 20:23:22 +01:00
|
|
|
super().__init__()
|
|
|
|
self.work_queue = work_queue
|
|
|
|
self.jobs = jobs
|
2011-02-25 21:51:27 +01:00
|
|
|
self.cc = options.cc
|
|
|
|
self.cxx = options.cxx
|
2011-02-21 20:23:22 +01:00
|
|
|
self.build_abbrev = build_abbrev
|
|
|
|
self.source_abbrev = source_abbrev
|
2011-02-25 21:51:27 +01:00
|
|
|
self.build_prefix = options.builddir
|
|
|
|
self.install_prefix = options.prefix
|
|
|
|
self.options = options
|
2011-02-21 20:23:22 +01:00
|
|
|
self.component_abbrev = dict(
|
|
|
|
llvm="llvm",
|
2012-01-28 00:01:35 +01:00
|
|
|
dragonegg="degg")
|
2011-02-21 20:23:22 +01:00
|
|
|
def run(self):
|
|
|
|
while True:
|
|
|
|
try:
|
2012-01-28 00:01:35 +01:00
|
|
|
source, build = self.work_queue.get()
|
|
|
|
self.dobuild(source, build)
|
2011-02-21 20:23:22 +01:00
|
|
|
except:
|
|
|
|
traceback.print_exc()
|
|
|
|
finally:
|
|
|
|
self.work_queue.task_done()
|
|
|
|
|
|
|
|
def execute(self, command, execdir, env, component):
|
|
|
|
prefix = self.component_abbrev[component.replace("-", "_")]
|
|
|
|
pwd = os.getcwd()
|
|
|
|
if not os.path.exists(execdir):
|
|
|
|
os.makedirs(execdir)
|
|
|
|
|
2011-02-23 00:30:45 +01:00
|
|
|
execenv = os.environ.copy()
|
|
|
|
|
2011-02-21 20:23:22 +01:00
|
|
|
for key, value in env.items():
|
2011-02-23 00:30:45 +01:00
|
|
|
execenv[key] = value
|
2011-02-21 20:23:22 +01:00
|
|
|
|
|
|
|
self.logger.debug("[" + prefix + "] " + "env " + str(env) + " "
|
|
|
|
+ " ".join(command));
|
|
|
|
|
|
|
|
try:
|
|
|
|
proc = subprocess.Popen(command,
|
|
|
|
cwd=execdir,
|
2011-02-23 00:30:45 +01:00
|
|
|
env=execenv,
|
2011-02-21 20:23:22 +01:00
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT)
|
|
|
|
|
|
|
|
line = proc.stdout.readline()
|
|
|
|
while line:
|
|
|
|
self.logger.info("[" + prefix + "] "
|
|
|
|
+ str(line, "utf-8").rstrip())
|
|
|
|
line = proc.stdout.readline()
|
|
|
|
|
|
|
|
except:
|
|
|
|
traceback.print_exc()
|
|
|
|
|
|
|
|
# Get a list of C++ include directories to pass to clang.
|
|
|
|
def get_includes(self):
|
|
|
|
# Assume we're building with g++ for now.
|
|
|
|
command = [self.cxx]
|
|
|
|
command += ["-v", "-x", "c++", "/dev/null", "-fsyntax-only"]
|
|
|
|
includes = []
|
|
|
|
self.logger.debug(command)
|
|
|
|
try:
|
|
|
|
proc = subprocess.Popen(command,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT)
|
|
|
|
|
|
|
|
gather = False
|
|
|
|
line = proc.stdout.readline()
|
|
|
|
while line:
|
|
|
|
self.logger.debug(line)
|
|
|
|
if re.search("End of search list", str(line)) is not None:
|
|
|
|
self.logger.debug("Stop Gather")
|
|
|
|
gather = False
|
|
|
|
if gather:
|
|
|
|
includes.append(str(line, "utf-8").strip())
|
|
|
|
if re.search("#include <...> search starts", str(line)) is not None:
|
|
|
|
self.logger.debug("Start Gather")
|
|
|
|
gather = True
|
|
|
|
line = proc.stdout.readline()
|
|
|
|
except:
|
|
|
|
traceback.print_exc()
|
|
|
|
self.logger.debug(includes)
|
|
|
|
return includes
|
|
|
|
|
2012-01-28 00:01:35 +01:00
|
|
|
def dobuild(self, source, build):
|
2011-02-21 20:23:22 +01:00
|
|
|
build_suffix = ""
|
|
|
|
|
|
|
|
ssabbrev = get_short_abbrevs([ab for ab in self.source_abbrev.values()])
|
|
|
|
|
2012-01-28 00:01:35 +01:00
|
|
|
prefix = "[" + ssabbrev[self.source_abbrev[source]] + "-" + self.build_abbrev[build] + "]"
|
|
|
|
self.install_prefix += "/" + self.source_abbrev[source] + "/" + build
|
|
|
|
build_suffix += "/" + self.source_abbrev[source] + "/" + build
|
2011-02-21 20:23:22 +01:00
|
|
|
|
|
|
|
self.logger = logging.getLogger(prefix)
|
|
|
|
|
|
|
|
self.logger.debug(self.install_prefix)
|
|
|
|
|
|
|
|
# Assume we're building with gcc for now.
|
|
|
|
cxxincludes = self.get_includes()
|
2012-02-03 01:59:30 +01:00
|
|
|
cxxroot = os.path.dirname(cxxincludes[0]) # Remove the version
|
|
|
|
cxxroot = os.path.dirname(cxxroot) # Remove the c++
|
|
|
|
cxxroot = os.path.dirname(cxxroot) # Remove the include
|
2011-02-21 20:23:22 +01:00
|
|
|
|
|
|
|
configure_flags = dict(
|
|
|
|
llvm=dict(debug=["--prefix=" + self.install_prefix,
|
2011-02-25 21:51:27 +01:00
|
|
|
"--with-extra-options=-Werror",
|
2011-07-06 18:54:14 +02:00
|
|
|
"--enable-assertions",
|
|
|
|
"--disable-optimized",
|
2012-02-03 01:59:30 +01:00
|
|
|
"--with-gcc-toolchain=" + cxxroot],
|
2011-02-21 20:23:22 +01:00
|
|
|
release=["--prefix=" + self.install_prefix,
|
2011-02-25 21:51:27 +01:00
|
|
|
"--with-extra-options=-Werror",
|
2011-02-21 20:23:22 +01:00
|
|
|
"--enable-optimized",
|
2012-02-03 01:59:30 +01:00
|
|
|
"--with-gcc-toolchain=" + cxxroot],
|
2011-02-21 20:23:22 +01:00
|
|
|
paranoid=["--prefix=" + self.install_prefix,
|
2011-02-25 21:51:27 +01:00
|
|
|
"--with-extra-options=-Werror",
|
2011-07-06 18:54:14 +02:00
|
|
|
"--enable-assertions",
|
2011-02-21 20:23:22 +01:00
|
|
|
"--enable-expensive-checks",
|
2011-07-06 18:54:14 +02:00
|
|
|
"--disable-optimized",
|
2012-02-03 01:59:30 +01:00
|
|
|
"--with-gcc-toolchain=" + cxxroot]),
|
2011-02-21 20:23:22 +01:00
|
|
|
dragonegg=dict(debug=[],
|
|
|
|
release=[],
|
|
|
|
paranoid=[]))
|
|
|
|
|
|
|
|
configure_env = dict(
|
|
|
|
llvm=dict(debug=dict(CC=self.cc,
|
|
|
|
CXX=self.cxx),
|
|
|
|
release=dict(CC=self.cc,
|
|
|
|
CXX=self.cxx),
|
|
|
|
paranoid=dict(CC=self.cc,
|
|
|
|
CXX=self.cxx)),
|
|
|
|
dragonegg=dict(debug=dict(CC=self.cc,
|
|
|
|
CXX=self.cxx),
|
|
|
|
release=dict(CC=self.cc,
|
|
|
|
CXX=self.cxx),
|
|
|
|
paranoid=dict(CC=self.cc,
|
|
|
|
CXX=self.cxx)))
|
|
|
|
|
|
|
|
make_flags = dict(
|
|
|
|
llvm=dict(debug=["-j" + str(self.jobs)],
|
|
|
|
release=["-j" + str(self.jobs)],
|
|
|
|
paranoid=["-j" + str(self.jobs)]),
|
|
|
|
dragonegg=dict(debug=["-j" + str(self.jobs)],
|
|
|
|
release=["-j" + str(self.jobs)],
|
|
|
|
paranoid=["-j" + str(self.jobs)]))
|
|
|
|
|
|
|
|
make_env = dict(
|
|
|
|
llvm=dict(debug=dict(),
|
|
|
|
release=dict(),
|
|
|
|
paranoid=dict()),
|
2012-01-28 00:01:35 +01:00
|
|
|
dragonegg=dict(debug=dict(GCC=self.cc,
|
2011-02-21 20:23:22 +01:00
|
|
|
LLVM_CONFIG=self.install_prefix + "/bin/llvm-config"),
|
2012-01-28 00:01:35 +01:00
|
|
|
release=dict(GCC=self.cc,
|
2011-02-21 20:23:22 +01:00
|
|
|
LLVM_CONFIG=self.install_prefix + "/bin/llvm-config"),
|
2012-01-28 00:01:35 +01:00
|
|
|
paranoid=dict(GCC=self.cc,
|
2011-02-21 20:23:22 +01:00
|
|
|
LLVM_CONFIG=self.install_prefix + "/bin/llvm-config")))
|
|
|
|
|
|
|
|
make_install_flags = dict(
|
|
|
|
llvm=dict(debug=["install"],
|
|
|
|
release=["install"],
|
|
|
|
paranoid=["install"]),
|
|
|
|
dragonegg=dict(debug=["install"],
|
|
|
|
release=["install"],
|
|
|
|
paranoid=["install"]))
|
|
|
|
|
|
|
|
make_install_env = dict(
|
|
|
|
llvm=dict(debug=dict(),
|
|
|
|
release=dict(),
|
|
|
|
paranoid=dict()),
|
|
|
|
dragonegg=dict(debug=dict(),
|
|
|
|
release=dict(),
|
|
|
|
paranoid=dict()))
|
|
|
|
|
|
|
|
make_check_flags = dict(
|
|
|
|
llvm=dict(debug=["check"],
|
|
|
|
release=["check"],
|
|
|
|
paranoid=["check"]),
|
|
|
|
dragonegg=dict(debug=["check"],
|
|
|
|
release=["check"],
|
|
|
|
paranoid=["check"]))
|
|
|
|
|
|
|
|
make_check_env = dict(
|
|
|
|
llvm=dict(debug=dict(),
|
|
|
|
release=dict(),
|
|
|
|
paranoid=dict()),
|
|
|
|
dragonegg=dict(debug=dict(),
|
|
|
|
release=dict(),
|
|
|
|
paranoid=dict()))
|
|
|
|
|
2011-07-06 18:54:14 +02:00
|
|
|
for component in components:
|
2011-02-21 20:23:22 +01:00
|
|
|
comp = component[:]
|
2011-10-14 21:12:33 +02:00
|
|
|
|
2012-01-28 00:01:35 +01:00
|
|
|
if (self.options.no_dragonegg):
|
|
|
|
if (comp == 'dragonegg'):
|
2011-10-14 21:12:33 +02:00
|
|
|
self.logger.info("Skipping " + component + " in "
|
|
|
|
+ builddir)
|
|
|
|
continue
|
2011-02-21 20:23:22 +01:00
|
|
|
|
|
|
|
srcdir = source + "/" + comp.rstrip("2")
|
|
|
|
builddir = self.build_prefix + "/" + comp + "/" + build_suffix
|
|
|
|
installdir = self.install_prefix
|
|
|
|
|
2011-02-25 21:51:27 +01:00
|
|
|
comp_key = comp.replace("-", "_")
|
|
|
|
|
|
|
|
config_args = configure_flags[comp_key][build][:]
|
|
|
|
config_args.extend(getattr(self.options,
|
2011-07-06 18:54:14 +02:00
|
|
|
"extra_" + comp_key.rstrip("2")
|
2012-01-28 00:01:35 +01:00
|
|
|
+ "_config_flags",
|
|
|
|
"").split())
|
2011-02-25 21:51:27 +01:00
|
|
|
|
2011-02-21 20:23:22 +01:00
|
|
|
self.logger.info("Configuring " + component + " in " + builddir)
|
|
|
|
self.configure(component, srcdir, builddir,
|
2011-02-25 21:51:27 +01:00
|
|
|
config_args,
|
|
|
|
configure_env[comp_key][build])
|
2011-02-21 20:23:22 +01:00
|
|
|
|
|
|
|
self.logger.info("Building " + component + " in " + builddir)
|
2011-10-14 21:12:35 +02:00
|
|
|
self.logger.info("Build: make " + str(make_flags[comp_key][build]))
|
2011-02-21 20:23:22 +01:00
|
|
|
self.make(component, srcdir, builddir,
|
2011-02-25 21:51:27 +01:00
|
|
|
make_flags[comp_key][build],
|
|
|
|
make_env[comp_key][build])
|
2011-02-21 20:23:22 +01:00
|
|
|
|
2011-10-14 21:12:34 +02:00
|
|
|
if (not self.options.no_install):
|
|
|
|
self.logger.info("Installing " + component + " in " + installdir)
|
|
|
|
self.make(component, srcdir, builddir,
|
|
|
|
make_install_flags[comp_key][build],
|
|
|
|
make_install_env[comp_key][build])
|
2011-02-21 20:23:22 +01:00
|
|
|
|
|
|
|
self.logger.info("Testing " + component + " in " + builddir)
|
2011-10-14 21:12:35 +02:00
|
|
|
self.logger.info("Test: make "
|
|
|
|
+ str(make_check_flags[comp_key][build]))
|
2011-02-21 20:23:22 +01:00
|
|
|
self.make(component, srcdir, builddir,
|
2011-02-25 21:51:27 +01:00
|
|
|
make_check_flags[comp_key][build],
|
|
|
|
make_check_env[comp_key][build])
|
2011-02-21 20:23:22 +01:00
|
|
|
|
|
|
|
|
|
|
|
def configure(self, component, srcdir, builddir, flags, env):
|
2011-03-05 00:02:52 +01:00
|
|
|
self.logger.debug("Configure " + str(flags) + " " + str(srcdir) + " -> "
|
|
|
|
+ str(builddir))
|
2011-02-25 21:51:27 +01:00
|
|
|
|
2011-02-21 20:23:22 +01:00
|
|
|
configure_files = dict(
|
|
|
|
llvm=[(srcdir + "/configure", builddir + "/Makefile")],
|
2012-01-28 00:01:35 +01:00
|
|
|
dragonegg=[("","")])
|
2011-02-21 20:23:22 +01:00
|
|
|
|
2011-02-25 21:51:27 +01:00
|
|
|
|
2011-02-21 20:23:22 +01:00
|
|
|
doconfig = False
|
|
|
|
for conf, mf in configure_files[component.replace("-", "_")]:
|
2011-02-25 21:51:27 +01:00
|
|
|
if not os.path.exists(conf):
|
|
|
|
return
|
2011-02-21 20:23:22 +01:00
|
|
|
if os.path.exists(conf) and os.path.exists(mf):
|
|
|
|
confstat = os.stat(conf)
|
|
|
|
makestat = os.stat(mf)
|
|
|
|
if confstat.st_mtime > makestat.st_mtime:
|
|
|
|
doconfig = True
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
doconfig = True
|
|
|
|
break
|
|
|
|
|
2011-02-25 21:51:27 +01:00
|
|
|
if not doconfig and not self.options.force_configure:
|
2011-02-21 20:23:22 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
program = srcdir + "/configure"
|
|
|
|
if not is_executable(program):
|
|
|
|
return
|
|
|
|
|
|
|
|
args = [program]
|
|
|
|
args += ["--verbose"]
|
|
|
|
args += flags
|
|
|
|
self.execute(args, builddir, env, component)
|
|
|
|
|
|
|
|
def make(self, component, srcdir, builddir, flags, env):
|
|
|
|
program = find_executable("make")
|
|
|
|
if program is None:
|
|
|
|
raise ExecutableNotFound
|
|
|
|
|
|
|
|
if not is_executable(program):
|
|
|
|
raise FileNotExecutable
|
|
|
|
|
|
|
|
args = [program]
|
|
|
|
args += flags
|
|
|
|
self.execute(args, builddir, env, component)
|
|
|
|
|
|
|
|
# Global constants
|
|
|
|
build_abbrev = dict(debug="dbg", release="opt", paranoid="par")
|
2012-01-28 00:01:35 +01:00
|
|
|
components = ["llvm", "dragonegg"]
|
2011-02-21 20:23:22 +01:00
|
|
|
|
|
|
|
# Parse options
|
|
|
|
parser = optparse.OptionParser(version="%prog 1.0")
|
|
|
|
add_options(parser)
|
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
check_options(parser, options, build_abbrev.keys());
|
|
|
|
|
|
|
|
if options.verbose:
|
|
|
|
logging.basicConfig(level=logging.DEBUG,
|
|
|
|
format='%(name)-13s: %(message)s')
|
|
|
|
else:
|
|
|
|
logging.basicConfig(level=logging.INFO,
|
|
|
|
format='%(name)-13s: %(message)s')
|
|
|
|
|
|
|
|
source_abbrev = get_path_abbrevs(set(options.src))
|
2011-07-06 18:54:14 +02:00
|
|
|
|
2011-02-21 20:23:22 +01:00
|
|
|
work_queue = queue.Queue()
|
|
|
|
|
2011-03-05 00:02:52 +01:00
|
|
|
jobs = options.jobs // options.threads
|
|
|
|
if jobs == 0:
|
|
|
|
jobs = 1
|
|
|
|
|
|
|
|
numthreads = options.threads
|
2011-10-14 21:12:37 +02:00
|
|
|
|
|
|
|
logging.getLogger().info("Building with " + str(options.jobs) + " jobs and "
|
|
|
|
+ str(numthreads) + " threads using " + str(jobs)
|
|
|
|
+ " make jobs")
|
2011-03-05 00:02:52 +01:00
|
|
|
|
2012-01-28 00:01:35 +01:00
|
|
|
logging.getLogger().info("CC = " + str(options.cc))
|
|
|
|
logging.getLogger().info("CXX = " + str(options.cxx))
|
|
|
|
|
2011-03-05 00:02:52 +01:00
|
|
|
for t in range(numthreads):
|
2011-02-25 21:51:27 +01:00
|
|
|
builder = Builder(work_queue, jobs,
|
2012-01-28 00:01:35 +01:00
|
|
|
build_abbrev, source_abbrev,
|
2011-02-25 21:51:27 +01:00
|
|
|
options)
|
2011-02-21 20:23:22 +01:00
|
|
|
builder.daemon = True
|
|
|
|
builder.start()
|
|
|
|
|
|
|
|
for build in set(options.build):
|
|
|
|
for source in set(options.src):
|
2012-01-28 00:01:35 +01:00
|
|
|
work_queue.put((source, build))
|
2011-02-21 20:23:22 +01:00
|
|
|
|
|
|
|
work_queue.join()
|