mirror of https://github.com/nodejs/node.git
Upgrade Waf to 1.5.16
This commit is contained in:
parent
fa514a9f16
commit
6e715b82d0
|
@ -12,6 +12,6 @@ t = join(w, 'Tools')
|
|||
sys.path = [w, t] + sys.path
|
||||
|
||||
import Scripting
|
||||
VERSION="1.5.15"
|
||||
VERSION="1.5.16"
|
||||
Scripting.prepare(t, os.getcwd(), VERSION, wafdir)
|
||||
sys.exit(0)
|
||||
|
|
|
@ -37,7 +37,7 @@ if 'PSYCOWAF' in os.environ:
|
|||
try:import psyco;psyco.full()
|
||||
except:pass
|
||||
|
||||
VERSION="1.5.15"
|
||||
VERSION="1.5.16"
|
||||
REVISION="x"
|
||||
INSTALL="x"
|
||||
C1='x'
|
||||
|
@ -82,26 +82,30 @@ def unpack_wafdir(dir):
|
|||
err("Cannot unpack waf lib into %s\nMove waf into a writeable directory" % dir)
|
||||
|
||||
os.chdir(dir)
|
||||
tmp = 't.tbz2'
|
||||
tmp = 't.bz2'
|
||||
t = open(tmp,'wb')
|
||||
t.write(txt)
|
||||
t.close()
|
||||
|
||||
t = None
|
||||
try:
|
||||
t = tarfile.open(tmp)
|
||||
for x in t: t.extract(x)
|
||||
t.close()
|
||||
except:
|
||||
os.chdir(cwd)
|
||||
try: shutil.rmtree(dir)
|
||||
except OSError: pass
|
||||
err("Waf cannot be unpacked, check that bzip2 support is present")
|
||||
try:
|
||||
os.system('bunzip2 t.bz2')
|
||||
t = tarfile.open('t')
|
||||
except:
|
||||
os.chdir(cwd)
|
||||
try: shutil.rmtree(dir)
|
||||
except OSError: pass
|
||||
err("Waf cannot be unpacked, check that bzip2 support is present")
|
||||
|
||||
for x in t: t.extract(x)
|
||||
t.close()
|
||||
|
||||
for x in ['Tools', '3rdparty']:
|
||||
os.chmod(join('wafadmin',x), 493)
|
||||
|
||||
os.unlink(tmp)
|
||||
|
||||
if sys.hexversion>0x300000f:
|
||||
sys.path = [join(dir, 'wafadmin')] + sys.path
|
||||
import py3kfixes
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2008-2010 (ita)
|
||||
|
||||
"""
|
||||
Execute the tasks with gcc -MD, read the dependencies from the .d file
|
||||
and prepare the dependency calculation for the next run
|
||||
"""
|
||||
|
||||
import os, re, threading
|
||||
import Task, Logs, Utils, preproc
|
||||
from TaskGen import before, after, feature
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
preprocessor_flag = '-MD'
|
||||
|
||||
@feature('cc')
|
||||
@before('apply_core')
|
||||
def add_mmd_cc(self):
|
||||
if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
|
||||
self.env.append_value('CCFLAGS', preprocessor_flag)
|
||||
|
||||
@feature('cxx')
|
||||
@before('apply_core')
|
||||
def add_mmd_cxx(self):
|
||||
if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
|
||||
self.env.append_value('CXXFLAGS', preprocessor_flag)
|
||||
|
||||
def scan(self):
|
||||
"the scanner does not do anything initially"
|
||||
nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
|
||||
names = []
|
||||
return (nodes, names)
|
||||
|
||||
re_o = re.compile("\.o$")
|
||||
re_src = re.compile("^(\.\.)[\\/](.*)$")
|
||||
|
||||
def post_run(self):
|
||||
# The following code is executed by threads, it is not safe, so a lock is needed...
|
||||
|
||||
if getattr(self, 'cached', None):
|
||||
return Task.Task.post_run(self)
|
||||
|
||||
name = self.outputs[0].abspath(self.env)
|
||||
name = re_o.sub('.d', name)
|
||||
txt = Utils.readf(name)
|
||||
#os.unlink(name)
|
||||
|
||||
txt = txt.replace('\\\n', '')
|
||||
|
||||
lst = txt.strip().split(':')
|
||||
val = ":".join(lst[1:])
|
||||
val = val.split()
|
||||
|
||||
nodes = []
|
||||
bld = self.generator.bld
|
||||
|
||||
f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
|
||||
for x in val:
|
||||
if os.path.isabs(x):
|
||||
|
||||
if not preproc.go_absolute:
|
||||
continue
|
||||
|
||||
lock.acquire()
|
||||
try:
|
||||
node = bld.root.find_resource(x)
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
g = re.search(re_src, x)
|
||||
if g:
|
||||
x = g.group(2)
|
||||
lock.acquire()
|
||||
try:
|
||||
node = bld.bldnode.parent.find_resource(x)
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
g = re.search(f, x)
|
||||
if g:
|
||||
x = g.group(2)
|
||||
lock.acquire()
|
||||
try:
|
||||
node = bld.srcnode.find_resource(x)
|
||||
finally:
|
||||
lock.release()
|
||||
|
||||
if id(node) == id(self.inputs[0]):
|
||||
# ignore the source file, it is already in the dependencies
|
||||
# this way, successful config tests may be retrieved from the cache
|
||||
continue
|
||||
|
||||
if not node:
|
||||
raise ValueError('could not find %r for %r' % (x, self))
|
||||
else:
|
||||
nodes.append(node)
|
||||
|
||||
Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
|
||||
|
||||
bld.node_deps[self.unique_id()] = nodes
|
||||
bld.raw_deps[self.unique_id()] = []
|
||||
|
||||
try:
|
||||
del self.cache_sig
|
||||
except:
|
||||
pass
|
||||
|
||||
Task.Task.post_run(self)
|
||||
|
||||
import Constants, Utils
|
||||
def sig_implicit_deps(self):
|
||||
try:
|
||||
return Task.Task.sig_implicit_deps(self)
|
||||
except Utils.WafError:
|
||||
return Constants.SIG_NIL
|
||||
|
||||
for name in 'cc cxx'.split():
|
||||
try:
|
||||
cls = Task.TaskBase.classes[name]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
cls.post_run = post_run
|
||||
cls.scan = scan
|
||||
cls.sig_implicit_deps = sig_implicit_deps
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# go.py - Waf tool for the Go programming language
|
||||
# By: Tom Wambold <tom5760@gmail.com>
|
||||
|
||||
import platform
|
||||
|
||||
import Task
|
||||
import Utils
|
||||
from TaskGen import feature, extension, after
|
||||
|
||||
Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
|
||||
Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
|
||||
Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
|
||||
|
||||
def detect(conf):
|
||||
|
||||
def set_def(var, val):
|
||||
if not conf.env[var]:
|
||||
conf.env[var] = val
|
||||
|
||||
set_def('GO_PLATFORM', platform.machine())
|
||||
|
||||
if conf.env.GO_PLATFORM == 'x86_64':
|
||||
set_def('GO_COMPILER', '6g')
|
||||
set_def('GO_LINKER', '6l')
|
||||
set_def('GO_EXTENSION', '.6')
|
||||
elif conf.env.GO_PLATFORM == 'i386':
|
||||
set_def('GO_COMPILER', '8g')
|
||||
set_def('GO_LINKER', '8l')
|
||||
set_def('GO_EXTENSION', '.8')
|
||||
|
||||
if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
|
||||
raise conf.fatal('Unsupported platform ' + platform.machine())
|
||||
|
||||
set_def('GO_PACK', 'gopack')
|
||||
set_def('GO_PACK_EXTENSION', '.a')
|
||||
|
||||
conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
|
||||
conf.find_program(conf.env.GO_LINKER, var='GOL', mandatory=True)
|
||||
conf.find_program(conf.env.GO_PACK, var='GOP', mandatory=True)
|
||||
|
||||
@extension('.go')
|
||||
def compile_go(self, node):
|
||||
try:
|
||||
self.go_nodes.append(node)
|
||||
except AttributeError:
|
||||
self.go_nodes = [node]
|
||||
|
||||
@feature('go')
|
||||
@after('apply_core')
|
||||
def apply_compile_go(self):
|
||||
try:
|
||||
nodes = self.go_nodes
|
||||
except AttributeError:
|
||||
self.go_compile_task = None
|
||||
else:
|
||||
self.go_compile_task = self.create_task('gocompile',
|
||||
nodes,
|
||||
[self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
|
||||
|
||||
@feature('gopackage', 'goprogram')
|
||||
@after('apply_compile_go')
|
||||
def apply_goinc(self):
|
||||
if not getattr(self, 'go_compile_task', None):
|
||||
return
|
||||
|
||||
names = self.to_list(getattr(self, 'uselib_local', []))
|
||||
for name in names:
|
||||
obj = self.name_to_obj(name)
|
||||
if not obj:
|
||||
raise Utils.WafError('object %r was not found in uselib_local '
|
||||
'(required by %r)' % (lib_name, self.name))
|
||||
obj.post()
|
||||
self.go_compile_task.set_run_after(obj.go_package_task)
|
||||
self.go_compile_task.deps_nodes.extend(obj.go_package_task.outputs)
|
||||
self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
|
||||
self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
|
||||
|
||||
@feature('gopackage')
|
||||
@after('apply_goinc')
|
||||
def apply_gopackage(self):
|
||||
self.go_package_task = self.create_task('gopack',
|
||||
self.go_compile_task.outputs[0],
|
||||
self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
|
||||
self.go_package_task.set_run_after(self.go_compile_task)
|
||||
self.go_package_task.deps_nodes.extend(self.go_compile_task.outputs)
|
||||
|
||||
@feature('goprogram')
|
||||
@after('apply_goinc')
|
||||
def apply_golink(self):
|
||||
self.go_link_task = self.create_task('golink',
|
||||
self.go_compile_task.outputs[0],
|
||||
self.path.find_or_declare(self.target))
|
||||
self.go_link_task.set_run_after(self.go_compile_task)
|
||||
self.go_link_task.deps_nodes.extend(self.go_compile_task.outputs)
|
||||
|
|
@ -0,0 +1,199 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: UTF-8
|
||||
# Petar Forai
|
||||
# Thomas Nagy 2008
|
||||
|
||||
import re
|
||||
import Task, Utils, Logs
|
||||
from TaskGen import extension, taskgen, feature, after
|
||||
from Configure import conf
|
||||
import preproc
|
||||
|
||||
"""
|
||||
Welcome in the hell of adding tasks dynamically
|
||||
|
||||
swig interface files may be created at runtime, the module name may be unknown in advance
|
||||
|
||||
rev 5859 is much more simple
|
||||
"""
|
||||
|
||||
SWIG_EXTS = ['.swig', '.i']
|
||||
|
||||
swig_str = '${SWIG} ${SWIGFLAGS} ${SRC}'
|
||||
cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
|
||||
|
||||
def runnable_status(self):
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return ASK_LATER
|
||||
|
||||
if not getattr(self, 'init_outputs', None):
|
||||
self.init_outputs = True
|
||||
if not getattr(self, 'module', None):
|
||||
# search the module name
|
||||
txt = self.inputs[0].read(self.env)
|
||||
m = re_module.search(txt)
|
||||
if not m:
|
||||
raise ValueError("could not find the swig module name")
|
||||
self.module = m.group(1)
|
||||
|
||||
swig_c(self)
|
||||
|
||||
# add the language-specific output files as nodes
|
||||
# call funs in the dict swig_langs
|
||||
for x in self.env['SWIGFLAGS']:
|
||||
# obtain the language
|
||||
x = x[1:]
|
||||
try:
|
||||
fun = swig_langs[x]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
fun(self)
|
||||
|
||||
return Task.Task.runnable_status(self)
|
||||
setattr(cls, 'runnable_status', runnable_status)
|
||||
|
||||
re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
|
||||
|
||||
re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
|
||||
re_2 = re.compile('%include "(.*)"', re.M)
|
||||
re_3 = re.compile('#include "(.*)"', re.M)
|
||||
|
||||
def scan(self):
|
||||
"scan for swig dependencies, climb the .i files"
|
||||
env = self.env
|
||||
|
||||
lst_src = []
|
||||
|
||||
seen = []
|
||||
to_see = [self.inputs[0]]
|
||||
|
||||
while to_see:
|
||||
node = to_see.pop(0)
|
||||
if node.id in seen:
|
||||
continue
|
||||
seen.append(node.id)
|
||||
lst_src.append(node)
|
||||
|
||||
# read the file
|
||||
code = node.read(env)
|
||||
code = preproc.re_nl.sub('', code)
|
||||
code = preproc.re_cpp.sub(preproc.repl, code)
|
||||
|
||||
# find .i files and project headers
|
||||
names = re_2.findall(code) + re_3.findall(code)
|
||||
for n in names:
|
||||
for d in self.generator.swig_dir_nodes + [node.parent]:
|
||||
u = d.find_resource(n)
|
||||
if u:
|
||||
to_see.append(u)
|
||||
break
|
||||
else:
|
||||
Logs.warn('could not find %r' % n)
|
||||
|
||||
# list of nodes this one depends on, and module name if present
|
||||
if Logs.verbose:
|
||||
Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
|
||||
return (lst_src, [])
|
||||
cls.scan = scan
|
||||
|
||||
# provide additional language processing
|
||||
swig_langs = {}
|
||||
def swig(fun):
|
||||
swig_langs[fun.__name__.replace('swig_', '')] = fun
|
||||
|
||||
def swig_c(self):
|
||||
ext = '.swigwrap_%d.c' % self.generator.idx
|
||||
flags = self.env['SWIGFLAGS']
|
||||
if '-c++' in flags:
|
||||
ext += 'xx'
|
||||
out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
|
||||
|
||||
if '-c++' in flags:
|
||||
task = self.generator.cxx_hook(out_node)
|
||||
else:
|
||||
task = self.generator.cc_hook(out_node)
|
||||
|
||||
task.set_run_after(self)
|
||||
|
||||
ge = self.generator.bld.generator
|
||||
ge.outstanding.insert(0, task)
|
||||
ge.total += 1
|
||||
|
||||
try:
|
||||
ltask = self.generator.link_task
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
ltask.inputs.append(task.outputs[0])
|
||||
|
||||
self.outputs.append(out_node)
|
||||
|
||||
if not '-o' in self.env['SWIGFLAGS']:
|
||||
self.env.append_value('SWIGFLAGS', '-o')
|
||||
self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
|
||||
|
||||
@swig
|
||||
def swig_python(tsk):
|
||||
tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
|
||||
|
||||
@swig
|
||||
def swig_ocaml(tsk):
|
||||
tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
|
||||
tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
|
||||
|
||||
@taskgen
|
||||
@feature('swig')
|
||||
@after('apply_incpaths')
|
||||
def add_swig_paths(self):
|
||||
"""the attribute 'after' is not used here, the method is added directly at the end"""
|
||||
|
||||
self.swig_dir_nodes = self.env['INC_PATHS']
|
||||
include_flags = self.env['_CXXINCFLAGS'] or self.env['_CCINCFLAGS']
|
||||
self.env.append_unique('SWIGFLAGS', [f.replace("/I", "-I") for f in include_flags])
|
||||
|
||||
@extension(SWIG_EXTS)
|
||||
def i_file(self, node):
|
||||
if not 'add_swig_paths' in self.meths:
|
||||
self.meths.append('add_swig_paths')
|
||||
|
||||
# the task instance
|
||||
tsk = self.create_task('swig')
|
||||
tsk.set_inputs(node)
|
||||
tsk.module = getattr(self, 'swig_module', None)
|
||||
|
||||
flags = self.to_list(getattr(self, 'swig_flags', []))
|
||||
tsk.env['SWIGFLAGS'] = flags
|
||||
|
||||
if not '-outdir' in flags:
|
||||
flags.append('-outdir')
|
||||
flags.append(node.parent.abspath(self.env))
|
||||
|
||||
@conf
|
||||
def check_swig_version(conf, minver=None):
|
||||
"""Check for a minimum swig version like conf.check_swig_version('1.3.28')
|
||||
or conf.check_swig_version((1,3,28)) """
|
||||
reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
|
||||
|
||||
swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
|
||||
|
||||
swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
|
||||
if isinstance(minver, basestring):
|
||||
minver = [int(s) for s in minver.split(".")]
|
||||
if isinstance(minver, tuple):
|
||||
minver = [int(s) for s in minver]
|
||||
result = (minver is None) or (minver[:3] <= swigver[:3])
|
||||
swigver_full = '.'.join(map(str, swigver))
|
||||
if result:
|
||||
conf.env['SWIG_VERSION'] = swigver_full
|
||||
minver_str = '.'.join(map(str, minver))
|
||||
if minver is None:
|
||||
conf.check_message_custom('swig version', '', swigver_full)
|
||||
else:
|
||||
conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
|
||||
return result
|
||||
|
||||
def detect(conf):
|
||||
swig = conf.find_program('swig', var='SWIG', mandatory=True)
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: UTF-8
|
||||
# Nicolas Joseph 2009
|
||||
|
||||
from fnmatch import fnmatchcase
|
||||
import os, os.path, re, stat
|
||||
import Task, Utils, Node, Constants
|
||||
from TaskGen import feature, extension, after
|
||||
from Logs import debug, warn, error
|
||||
|
||||
VALADOC_STR = '${VALADOC}'
|
||||
|
||||
class valadoc_task(Task.Task):
|
||||
|
||||
vars = ['VALADOC', 'VALADOCFLAGS']
|
||||
color = 'BLUE'
|
||||
after = 'cxx_link cc_link'
|
||||
quiet = True
|
||||
|
||||
output_dir = ''
|
||||
doclet = ''
|
||||
package_name = ''
|
||||
package_version = ''
|
||||
files = []
|
||||
protected = False
|
||||
private = False
|
||||
inherit = False
|
||||
deps = False
|
||||
enable_non_null_experimental = False
|
||||
force = False
|
||||
|
||||
def runnable_status(self):
|
||||
return True
|
||||
|
||||
def run(self):
|
||||
if self.env['VALADOC']:
|
||||
if not self.env['VALADOCFLAGS']:
|
||||
self.env['VALADOCFLAGS'] = ''
|
||||
cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
|
||||
cmd.append ('-o %s' % self.output_dir)
|
||||
if getattr(self, 'doclet', None):
|
||||
cmd.append ('--doclet %s' % self.doclet)
|
||||
cmd.append ('--package-name %s' % self.package_name)
|
||||
if getattr(self, 'version', None):
|
||||
cmd.append ('--package-version %s' % self.package_version)
|
||||
if getattr(self, 'packages', None):
|
||||
for package in self.packages:
|
||||
cmd.append ('--pkg %s' % package)
|
||||
if getattr(self, 'vapi_dirs', None):
|
||||
for vapi_dir in self.vapi_dirs:
|
||||
cmd.append ('--vapidir %s' % vapi_dir)
|
||||
if getattr(self, 'protected', None):
|
||||
cmd.append ('--protected')
|
||||
if getattr(self, 'private', None):
|
||||
cmd.append ('--private')
|
||||
if getattr(self, 'inherit', None):
|
||||
cmd.append ('--inherit')
|
||||
if getattr(self, 'deps', None):
|
||||
cmd.append ('--deps')
|
||||
if getattr(self, 'enable_non_null_experimental', None):
|
||||
cmd.append ('--enable-non-null-experimental')
|
||||
if getattr(self, 'force', None):
|
||||
cmd.append ('--force')
|
||||
cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
|
||||
return self.generator.bld.exec_command(' '.join(cmd))
|
||||
else:
|
||||
error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
|
||||
return -1
|
||||
|
||||
@feature('valadoc')
|
||||
def process_valadoc(self):
|
||||
task = getattr(self, 'task', None)
|
||||
if not task:
|
||||
task = self.create_task('valadoc')
|
||||
self.task = task
|
||||
if getattr(self, 'output_dir', None):
|
||||
task.output_dir = self.output_dir
|
||||
else:
|
||||
Utils.WafError('no output directory')
|
||||
if getattr(self, 'doclet', None):
|
||||
task.doclet = self.doclet
|
||||
else:
|
||||
Utils.WafError('no doclet directory')
|
||||
if getattr(self, 'package_name', None):
|
||||
task.package_name = self.package_name
|
||||
else:
|
||||
Utils.WafError('no package name')
|
||||
if getattr(self, 'package_version', None):
|
||||
task.package_version = self.package_version
|
||||
if getattr(self, 'packages', None):
|
||||
task.packages = Utils.to_list(self.packages)
|
||||
if getattr(self, 'vapi_dirs', None):
|
||||
task.vapi_dirs = Utils.to_list(self.vapi_dirs)
|
||||
if getattr(self, 'files', None):
|
||||
task.files = self.files
|
||||
else:
|
||||
Utils.WafError('no input file')
|
||||
if getattr(self, 'protected', None):
|
||||
task.protected = self.protected
|
||||
if getattr(self, 'private', None):
|
||||
task.private = self.private
|
||||
if getattr(self, 'inherit', None):
|
||||
task.inherit = self.inherit
|
||||
if getattr(self, 'deps', None):
|
||||
task.deps = self.deps
|
||||
if getattr(self, 'enable_non_null_experimental', None):
|
||||
task.enable_non_null_experimental = self.enable_non_null_experimental
|
||||
if getattr(self, 'force', None):
|
||||
task.force = self.force
|
||||
|
||||
def detect(conf):
|
||||
conf.find_program('valadoc', var='VALADOC', mandatory=False)
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# ita 2010
|
||||
|
||||
import Logs, Utils, Build, Task
|
||||
|
||||
def say(txt):
|
||||
Logs.warn("^o^: %s" % txt)
|
||||
|
||||
try:
|
||||
ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
|
||||
except Exception, e:
|
||||
pass
|
||||
else:
|
||||
def say(txt):
|
||||
f = Utils.cmd_output([ret, txt])
|
||||
Utils.pprint('PINK', f)
|
||||
|
||||
say('you make the errors, we detect them')
|
||||
|
||||
def check_task_classes(self):
|
||||
for x in Task.TaskBase.classes:
|
||||
if isinstance(x, Task.Task):
|
||||
if not getattr(cls, 'ext_in', None) or getattr(cls, 'before', None):
|
||||
say('class %s has no precedence constraints (ext_in/before)')
|
||||
if not getattr(cls, 'ext_out', None) or getattr(cls, 'after', None):
|
||||
say('class %s has no precedence constraints (ext_out/after)')
|
||||
|
||||
comp = Build.BuildContext.compile
|
||||
def compile(self):
|
||||
if not getattr(self, 'magic', None):
|
||||
check_task_classes(self)
|
||||
return comp(self)
|
||||
Build.BuildContext.compile = compile
|
||||
|
|
@ -523,9 +523,9 @@ class BuildContext(Utils.Context):
|
|||
if node.id & 3 != Node.BUILD:
|
||||
continue
|
||||
|
||||
for dct in self.node_sigs:
|
||||
for dct in self.node_sigs.values():
|
||||
if node.id in dct:
|
||||
dict.__delitem__(node.id)
|
||||
dct.__delitem__(node.id)
|
||||
|
||||
# the policy is to avoid removing nodes representing directories
|
||||
src_dir_node.childs.__delitem__(node.name)
|
||||
|
@ -809,6 +809,27 @@ class BuildContext(Utils.Context):
|
|||
destpath = os.path.join(destdir, self.red.sub('', destpath))
|
||||
return destpath
|
||||
|
||||
def install_dir(self, path, env=None):
|
||||
"""
|
||||
create empty folders for the installation (very rarely used)
|
||||
"""
|
||||
if env:
|
||||
assert isinstance(env, Environment.Environment), "invalid parameter"
|
||||
else:
|
||||
env = self.env
|
||||
|
||||
if not path:
|
||||
return []
|
||||
|
||||
destpath = self.get_install_path(path, env)
|
||||
|
||||
if self.is_install > 0:
|
||||
info('* creating %s' % destpath)
|
||||
Utils.check_dir(destpath)
|
||||
elif self.is_install < 0:
|
||||
info('* removing %s' % destpath)
|
||||
self.uninstall.append(destpath + '/xxx') # yes, ugly
|
||||
|
||||
def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
|
||||
"""To install files only after they have been built, put the calls in a method named
|
||||
post_build on the top-level wscript
|
||||
|
@ -992,6 +1013,7 @@ class BuildContext(Utils.Context):
|
|||
def use_the_magic(self):
|
||||
Task.algotype = Task.MAXPARALLEL
|
||||
Task.file_deps = Task.extract_deps
|
||||
self.magic = True
|
||||
|
||||
install_as = group_method(install_as)
|
||||
install_files = group_method(install_files)
|
||||
|
|
|
@ -22,10 +22,17 @@ Note: the c/c++ related code is in the module config_c
|
|||
import os, shlex, sys, time
|
||||
try: import cPickle
|
||||
except ImportError: import pickle as cPickle
|
||||
import Environment, Utils, Options
|
||||
import Environment, Utils, Options, Logs
|
||||
from Logs import warn
|
||||
from Constants import *
|
||||
|
||||
try:
|
||||
from urllib import request
|
||||
except:
|
||||
from urllib import urlopen
|
||||
else:
|
||||
urlopen = request.urlopen
|
||||
|
||||
conf_template = '''# project %(app)s configured on %(now)s by
|
||||
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
|
||||
# using %(args)s
|
||||
|
@ -166,9 +173,40 @@ class ConfigurationContext(Utils.Context):
|
|||
continue
|
||||
self.tool_cache.append(mag)
|
||||
|
||||
if not tooldir:
|
||||
# check if the tool exists in the Tools or 3rdparty folders
|
||||
_Tools = Options.tooldir[0]
|
||||
_3rdparty = os.sep.join((_Tools, '..', '3rdparty'))
|
||||
for d in (_Tools, _3rdparty):
|
||||
lst = os.listdir(d)
|
||||
if tool + '.py' in lst:
|
||||
break
|
||||
else:
|
||||
# try to download the tool from the repository then
|
||||
for x in Utils.to_list(Options.remote_repo):
|
||||
for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']:
|
||||
url = '/'.join((x, sub, tool + '.py'))
|
||||
try:
|
||||
web = urlopen(url)
|
||||
if web.getcode() != 200:
|
||||
continue
|
||||
except Exception, e:
|
||||
# on python3 urlopen throws an exception
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
loc = open(_3rdparty + os.sep + tool + '.py', 'wb')
|
||||
loc.write(web.read())
|
||||
web.close()
|
||||
finally:
|
||||
loc.close()
|
||||
Logs.warn('downloaded %s from %s' % (tool, url))
|
||||
else:
|
||||
break
|
||||
|
||||
module = Utils.load_tool(tool, tooldir)
|
||||
|
||||
if funs:
|
||||
if funs is not None:
|
||||
self.eval_rules(funs)
|
||||
else:
|
||||
func = getattr(module, 'detect', None)
|
||||
|
@ -276,7 +314,7 @@ class ConfigurationContext(Utils.Context):
|
|||
ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
|
||||
if ret: break
|
||||
|
||||
self.check_message_1('Check for program %s' % ' or '.join(filename))
|
||||
self.check_message_1('Checking for program %s' % ' or '.join(filename))
|
||||
self.log.write(' find program=%r paths=%r var=%r\n -> %r\n' % (filename, path_list, var, ret))
|
||||
if ret:
|
||||
Utils.pprint('GREEN', str(ret))
|
||||
|
|
|
@ -9,9 +9,9 @@ maintainer: the version number is updated from the top-level wscript file
|
|||
"""
|
||||
|
||||
# do not touch these three lines, they are updated automatically
|
||||
HEXVERSION = 0x105015
|
||||
WAFVERSION="1.5.15"
|
||||
WAFREVISION = "7505M"
|
||||
HEXVERSION=0x105016
|
||||
WAFVERSION="1.5.16"
|
||||
WAFREVISION = "7610:7647M"
|
||||
ABI = 7
|
||||
|
||||
# permissions
|
||||
|
|
|
@ -129,6 +129,7 @@ class Node(object):
|
|||
return self.id & 3
|
||||
|
||||
def set_type(self, t):
|
||||
"dangerous, you are not supposed to use this"
|
||||
self.id = self.id + t - self.id & 3
|
||||
|
||||
def dirs(self):
|
||||
|
|
|
@ -25,6 +25,10 @@ except KeyError: cache_global = ''
|
|||
platform = Utils.unversioned_sys_platform()
|
||||
conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
|
||||
|
||||
remote_repo = ['http://waf.googlecode.com/svn/']
|
||||
"""remote directory for the plugins"""
|
||||
|
||||
|
||||
# Such a command-line should work: JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
|
||||
default_prefix = os.environ.get('PREFIX')
|
||||
if not default_prefix:
|
||||
|
|
|
@ -508,7 +508,10 @@ def dist(appname='', version=''):
|
|||
pass
|
||||
|
||||
# copy the files into the temporary folder
|
||||
copytree('.', tmp_folder, getattr(Utils.g_module, BLDDIR, None))
|
||||
blddir = getattr(Utils.g_module, BLDDIR, None)
|
||||
if not blddir:
|
||||
blddir = getattr(Utils.g_module, 'out', None)
|
||||
copytree('.', tmp_folder, blddir)
|
||||
|
||||
# undocumented hook for additional cleanup
|
||||
dist_hook = getattr(Utils.g_module, 'dist_hook', None)
|
||||
|
@ -541,7 +544,7 @@ def dist(appname='', version=''):
|
|||
return arch_name
|
||||
|
||||
# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
|
||||
def distcheck(appname='', version=''):
|
||||
def distcheck(appname='', version='', subdir=''):
|
||||
'''checks if the sources compile (tarball from 'dist')'''
|
||||
import tempfile, tarfile
|
||||
|
||||
|
@ -550,14 +553,25 @@ def distcheck(appname='', version=''):
|
|||
|
||||
waf = os.path.abspath(sys.argv[0])
|
||||
tarball = dist(appname, version)
|
||||
|
||||
path = appname + '-' + version
|
||||
|
||||
# remove any previous instance
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
|
||||
t = tarfile.open(tarball)
|
||||
for x in t: t.extract(x)
|
||||
t.close()
|
||||
|
||||
path = appname + '-' + version
|
||||
# build_path is the directory for the waf invocation
|
||||
if subdir:
|
||||
build_path = os.path.join(path, subdir)
|
||||
else:
|
||||
build_path = path
|
||||
|
||||
instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
|
||||
ret = Utils.pproc.Popen([waf, 'configure', 'install', 'uninstall', '--destdir=' + instdir], cwd=path).wait()
|
||||
ret = Utils.pproc.Popen([waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + instdir], cwd=build_path).wait()
|
||||
if ret:
|
||||
raise Utils.WafError('distcheck failed with code %i' % ret)
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ The role of the Task Manager is to give the tasks in order (groups of task that
|
|||
|
||||
"""
|
||||
|
||||
import os, shutil, sys, re, random, datetime, tempfile
|
||||
import os, shutil, sys, re, random, datetime, tempfile, shlex
|
||||
from Utils import md5
|
||||
import Build, Runner, Utils, Node, Logs, Options
|
||||
from Logs import debug, warn, error
|
||||
|
@ -395,6 +395,9 @@ class TaskBase(object):
|
|||
|
||||
def exec_command(self, *k, **kw):
|
||||
"use this for executing commands from tasks"
|
||||
# TODO in waf 1.6, eliminate bld.exec_command, and move the cwd processing to here
|
||||
if self.env['env']:
|
||||
kw['env'] = self.env['env']
|
||||
return self.generator.bld.exec_command(*k, **kw)
|
||||
|
||||
def runnable_status(self):
|
||||
|
@ -656,7 +659,7 @@ class Task(TaskBase):
|
|||
try:
|
||||
os.stat(node.abspath(env))
|
||||
except OSError:
|
||||
self.has_run = MISSING
|
||||
self.hasrun = MISSING
|
||||
self.err_msg = '-> missing file: %r' % node.abspath(env)
|
||||
raise Utils.WafError
|
||||
|
||||
|
@ -753,7 +756,8 @@ class Task(TaskBase):
|
|||
|
||||
for node in self.outputs:
|
||||
self.generator.bld.node_sigs[variant][node.id] = sig
|
||||
self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
|
||||
if Options.options.progress_bar < 1:
|
||||
self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
|
||||
|
||||
self.cached = True
|
||||
return 1
|
||||
|
@ -871,7 +875,14 @@ class Task(TaskBase):
|
|||
sig = self.compute_sig_implicit_deps()
|
||||
except KeyError:
|
||||
try:
|
||||
nodes = bld.node_deps.get(self.unique_id(), [])
|
||||
nodes = []
|
||||
for k in bld.node_deps.get(self.unique_id(), []):
|
||||
if k.id & 3 == 2: # Node.FILE:
|
||||
if not k.id in bld.node_sigs[0]:
|
||||
nodes.append(k)
|
||||
else:
|
||||
if not k.id in bld.node_sigs[self.env.variant()]:
|
||||
nodes.append(k)
|
||||
except:
|
||||
nodes = '?'
|
||||
raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)' % (nodes, self))
|
||||
|
@ -980,9 +991,8 @@ def compile_fun_noshell(name, line):
|
|||
app('lst.extend(to_list(env[%r]))' % var)
|
||||
if not var in dvars: dvars.append(var)
|
||||
|
||||
if extr:
|
||||
if params[-1]:
|
||||
app("lst.extend(%r)" % params[-1].split())
|
||||
if params[-1]:
|
||||
app("lst.extend(%r)" % shlex.split(params[-1]))
|
||||
|
||||
fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
|
||||
debug('action: %s', fun)
|
||||
|
@ -1068,6 +1078,9 @@ def update_outputs(cls):
|
|||
new_sig = self.signature()
|
||||
prev_sig = bld.task_sigs[self.unique_id()][0]
|
||||
if prev_sig == new_sig:
|
||||
for x in self.outputs:
|
||||
if not x.id in bld.node_sigs[self.env.variant()]:
|
||||
return RUN_ME
|
||||
return SKIP_ME
|
||||
except KeyError:
|
||||
pass
|
||||
|
|
|
@ -489,27 +489,31 @@ def exec_rule(self):
|
|||
|
||||
# get the function and the variables
|
||||
func = self.rule
|
||||
|
||||
vars2 = []
|
||||
if isinstance(func, str):
|
||||
# use the shell by default for user-defined commands
|
||||
(func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
|
||||
func.code = self.rule
|
||||
vars = getattr(self, 'vars', vars2)
|
||||
if not vars:
|
||||
if isinstance(self.rule, str):
|
||||
vars = self.rule
|
||||
else:
|
||||
vars = Utils.h_fun(self.rule)
|
||||
|
||||
# create the task class
|
||||
name = getattr(self, 'name', None) or self.target or self.rule
|
||||
if not isinstance(name, str):
|
||||
name = str(self.idx)
|
||||
cls = Task.task_type_from_func(name, func, vars)
|
||||
cls = Task.task_type_from_func(name, func, getattr(self, 'vars', vars2))
|
||||
|
||||
# now create one instance
|
||||
tsk = self.create_task(name)
|
||||
|
||||
dep_vars = getattr(self, 'dep_vars', ['ruledeps'])
|
||||
if dep_vars:
|
||||
tsk.dep_vars = dep_vars
|
||||
if isinstance(self.rule, str):
|
||||
tsk.env.ruledeps = self.rule
|
||||
else:
|
||||
# only works if the function is in a global module such as a waf tool
|
||||
tsk.env.ruledeps = Utils.h_fun(self.rule)
|
||||
|
||||
# we assume that the user knows that without inputs or outputs
|
||||
#if not getattr(self, 'target', None) and not getattr(self, 'source', None):
|
||||
# cls.quiet = True
|
||||
|
|
|
@ -9,7 +9,7 @@ import Task
|
|||
from TaskGen import extension
|
||||
|
||||
bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
|
||||
cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', before='cxx', shell=False)
|
||||
cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', shell=False)
|
||||
|
||||
@extension(['.y', '.yc', '.yy'])
|
||||
def big_bison(self, node):
|
||||
|
@ -34,6 +34,5 @@ def big_bison(self, node):
|
|||
|
||||
def detect(conf):
|
||||
bison = conf.find_program('bison', var='BISON', mandatory=True)
|
||||
v = conf.env
|
||||
v['BISONFLAGS'] = '-d'
|
||||
conf.env['BISONFLAGS'] = '-d'
|
||||
|
||||
|
|
|
@ -209,7 +209,7 @@ def default_cc(self):
|
|||
@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
|
||||
def apply_verif(self):
|
||||
"""no particular order, used for diagnostic"""
|
||||
if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None)):
|
||||
if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None) or getattr(self, 'obj_files', None)):
|
||||
raise Utils.WafError('no source files specified for %s' % self)
|
||||
if not self.target:
|
||||
raise Utils.WafError('no target for %s' % self)
|
||||
|
|
|
@ -119,7 +119,7 @@ def validate_cfg(self, kw):
|
|||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'ok'
|
||||
kw['okmsg'] = 'yes'
|
||||
if not 'errmsg' in kw:
|
||||
kw['errmsg'] = 'not found'
|
||||
|
||||
|
@ -148,7 +148,7 @@ def cmd_and_log(self, cmd, kw):
|
|||
if kw.get('mandatory', False):
|
||||
kw['errmsg'] = out.strip()
|
||||
else:
|
||||
kw['errmsg'] = 'fail'
|
||||
kw['errmsg'] = 'no'
|
||||
self.fatal('fail')
|
||||
return out
|
||||
|
||||
|
@ -160,7 +160,7 @@ def exec_cfg(self, kw):
|
|||
cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version'])
|
||||
self.cmd_and_log(cmd, kw)
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'ok'
|
||||
kw['okmsg'] = 'yes'
|
||||
return
|
||||
|
||||
# checking for the version of a module
|
||||
|
@ -169,7 +169,7 @@ def exec_cfg(self, kw):
|
|||
if y in kw:
|
||||
self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw)
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'ok'
|
||||
kw['okmsg'] = 'yes'
|
||||
self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
|
||||
break
|
||||
|
||||
|
@ -189,7 +189,7 @@ def exec_cfg(self, kw):
|
|||
var = '%s_%s' % (uselib, v)
|
||||
env[var] = val
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'ok'
|
||||
kw['okmsg'] = 'yes'
|
||||
return
|
||||
|
||||
lst = [kw['path']]
|
||||
|
@ -203,7 +203,7 @@ def exec_cfg(self, kw):
|
|||
cmd = ' '.join(lst)
|
||||
ret = self.cmd_and_log(cmd, kw)
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'ok'
|
||||
kw['okmsg'] = 'yes'
|
||||
|
||||
self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
|
||||
parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
|
||||
|
@ -368,14 +368,14 @@ def validate_c(self, kw):
|
|||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for custom code'
|
||||
if not 'errmsg' in kw:
|
||||
kw['errmsg'] = 'fail'
|
||||
kw['errmsg'] = 'no'
|
||||
|
||||
for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
|
||||
if flagsname in kw:
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
|
||||
if not 'errmsg' in kw:
|
||||
kw['errmsg'] = 'fail'
|
||||
kw['errmsg'] = 'no'
|
||||
|
||||
if not 'execute' in kw:
|
||||
kw['execute'] = False
|
||||
|
@ -384,7 +384,7 @@ def validate_c(self, kw):
|
|||
kw['errmsg'] = 'not found'
|
||||
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'ok'
|
||||
kw['okmsg'] = 'yes'
|
||||
|
||||
if not 'code' in kw:
|
||||
kw['code'] = SNIP3
|
||||
|
@ -514,7 +514,11 @@ def run_c_code(self, *k, **kw):
|
|||
|
||||
bld.rescan(bld.srcnode)
|
||||
|
||||
o = bld(features=[kw['compile_mode'], kw['type']], source=test_f_name, target='testprog')
|
||||
if not 'features' in kw:
|
||||
# conf.check(features='cc cprogram pyext', ...)
|
||||
kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc"
|
||||
|
||||
o = bld(features=kw['features'], source=test_f_name, target='testprog')
|
||||
|
||||
for k, v in kw.iteritems():
|
||||
setattr(o, k, v)
|
||||
|
@ -536,12 +540,11 @@ def run_c_code(self, *k, **kw):
|
|||
self.log.write('command returned %r' % ret)
|
||||
self.fatal(str(ret))
|
||||
|
||||
# if we need to run the program, try to get its result
|
||||
# keep the name of the program to execute
|
||||
if kw['execute']:
|
||||
lastprog = o.link_task.outputs[0].abspath(env)
|
||||
|
||||
# if we need to run the program, try to get its result
|
||||
if kw['execute']:
|
||||
args = Utils.to_list(kw.get('exec_args', []))
|
||||
proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
|
||||
(out, err) = proc.communicate()
|
||||
|
@ -669,7 +672,7 @@ def write_config_header(self, configfile='', env='', guard='', top=False):
|
|||
dest.write(self.get_config_header())
|
||||
|
||||
# config files are not removed on "waf clean"
|
||||
env.append_value(CFG_FILES, os.path.join(diff, configfile))
|
||||
env.append_unique(CFG_FILES, os.path.join(diff, configfile))
|
||||
|
||||
dest.write('\n#endif /* %s */\n' % waf_guard)
|
||||
dest.close()
|
||||
|
|
|
@ -15,12 +15,11 @@ TaskGen.declare_chain(
|
|||
name = 'flex',
|
||||
rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',
|
||||
ext_in = '.l',
|
||||
ext_out = '.c .cxx'
|
||||
decider = decide_ext,
|
||||
before = 'cc cxx',
|
||||
)
|
||||
|
||||
def detect(conf):
|
||||
conf.find_program('flex', var='FLEX', mandatory=True)
|
||||
v = conf.env
|
||||
v['FLEXFLAGS'] = ''
|
||||
conf.env['FLEXFLAGS'] = ''
|
||||
|
||||
|
|
|
@ -134,6 +134,8 @@ def create_task_macplist(self):
|
|||
@feature('cshlib')
|
||||
def apply_link_osx(self):
|
||||
name = self.link_task.outputs[0].name
|
||||
if not self.install_path:
|
||||
return
|
||||
if getattr(self, 'vnum', None):
|
||||
name = name.replace('.dylib', '.%s.dylib' % self.vnum)
|
||||
|
||||
|
|
|
@ -203,11 +203,13 @@ MACOSX_DEPLOYMENT_TARGET = %r
|
|||
if lib.startswith('-l'):
|
||||
lib = lib[2:] # strip '-l'
|
||||
env.append_value('LIB_PYEMBED', lib)
|
||||
|
||||
if python_SHLIBS is not None:
|
||||
for lib in python_SHLIBS.split():
|
||||
if lib.startswith('-l'):
|
||||
lib = lib[2:] # strip '-l'
|
||||
env.append_value('LIB_PYEMBED', lib)
|
||||
env.append_value('LIB_PYEMBED', lib[2:]) # strip '-l'
|
||||
else:
|
||||
env.append_value('LINKFLAGS_PYEMBED', lib)
|
||||
|
||||
if Options.platform != 'darwin' and python_LDFLAGS:
|
||||
env.append_value('LINKFLAGS_PYEMBED', python_LDFLAGS.split())
|
||||
|
@ -283,17 +285,8 @@ MACOSX_DEPLOYMENT_TARGET = %r
|
|||
env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing')
|
||||
|
||||
# See if it compiles
|
||||
test_env = env.copy()
|
||||
a = test_env.append_value
|
||||
a('CPPPATH', env['CPPPATH_PYEMBED'])
|
||||
a('LIBPATH', env['LIBPATH_PYEMBED'])
|
||||
a('LIB', env['LIB_PYEMBED'])
|
||||
a('LINKFLAGS', env['LINKFLAGS_PYEMBED'])
|
||||
a('CXXFLAGS', env['CXXFLAGS_PYEMBED'])
|
||||
a('CCFLAGS', env['CCFLAGS_PYEMBED'])
|
||||
|
||||
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H',
|
||||
env=test_env, fragment=FRAG_2,
|
||||
uselib='PYEMBED', fragment=FRAG_2,
|
||||
errmsg='Could not find the python development headers', mandatory=1)
|
||||
|
||||
@conf
|
||||
|
|
|
@ -211,6 +211,7 @@ def create_uic_task(self, node):
|
|||
"hook for uic tasks"
|
||||
uictask = self.create_task('ui4', node)
|
||||
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
|
||||
return uictask
|
||||
|
||||
class qt4_taskgen(cxx.cxx_taskgen):
|
||||
def __init__(self, *k, **kw):
|
||||
|
@ -267,6 +268,7 @@ def cxx_hook(self, node):
|
|||
|
||||
task = self.create_task('qxx', node, node.change_ext(obj_ext))
|
||||
self.compiled_tasks.append(task)
|
||||
return task
|
||||
|
||||
def process_qm2rcc(task):
|
||||
outfile = task.outputs[0].abspath(task.env)
|
||||
|
|
|
@ -196,9 +196,11 @@ def apply_tex(self):
|
|||
|
||||
if self.type == 'latex':
|
||||
if 'ps' in outs:
|
||||
self.create_task('dvips', task.outputs, node.change_ext('.ps'))
|
||||
tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
|
||||
tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + ':' + self.path.abspath() + ':' + self.path.abspath(self.env)}
|
||||
if 'pdf' in outs:
|
||||
self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
|
||||
tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
|
||||
tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + ':' + self.path.abspath() + ':' + self.path.abspath(self.env)}
|
||||
elif self.type == 'pdflatex':
|
||||
if 'ps' in outs:
|
||||
self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
|
||||
|
|
|
@ -279,7 +279,7 @@ def detect(conf):
|
|||
|
||||
try:
|
||||
output = Utils.cmd_output(valac + " --version", silent=True)
|
||||
version = output.split(' ', 1)[-1].strip().split(".")
|
||||
version = output.split(' ', 1)[-1].strip().split(".")[0:3]
|
||||
version = [int(x) for x in version]
|
||||
valac_version = tuple(version)
|
||||
except Exception:
|
||||
|
|
|
@ -273,7 +273,8 @@ def load_module(file_path, name=WSCRIPT_FILE):
|
|||
try:
|
||||
exec(compile(code, file_path, 'exec'), module.__dict__)
|
||||
except Exception:
|
||||
raise WscriptError(traceback.format_exc(), file_path)
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), file_path)
|
||||
sys.path.pop(0)
|
||||
|
||||
g_loaded_modules[file_path] = module
|
||||
|
@ -325,7 +326,7 @@ else:
|
|||
# we actually try the function once to see if it is suitable
|
||||
try:
|
||||
myfun()
|
||||
except IOError:
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
get_term_cols = myfun
|
||||
|
@ -645,7 +646,8 @@ class Context(object):
|
|||
try:
|
||||
exec(compile(txt, file_path, 'exec'), dc)
|
||||
except Exception:
|
||||
raise WscriptError(traceback.format_exc(), base)
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), base)
|
||||
finally:
|
||||
self.curdir = old
|
||||
if getattr(self.__class__, 'post_recurse', None):
|
||||
|
|
Loading…
Reference in New Issue