# Part of the A-A-P recipe executive: process the command line arguments.
# Copyright (C) 2002-2003 Stichting NLnet Labs
# Permission to copy and use this file is specified in the file COPYING.
# If this file is missing you can find it here: http://www.a-a-p.org/COPYING
import os
import os.path
import string
import Global
from Node import Node
from Error import *
from Dictlist import dictlist2str,varname2dictlist,listitem2str
from Process import Process,recipe_error
from ParsePos import ParsePos
from RecPos import rpcopy,RecPos
from Util import *
from Depend import Depend,depend_auto
from Remote import url_time,is_url
from VersCont import fetch_nodelist
from Cache import local_name
from Sign import get_old_sign,get_new_sign,sign_clear,sign_clear_target
from Sign import sign_updated,buildcheckstr2sign,buildcheck_updated
from Sign import check_name
from Commands import expand,do_fetch_all,do_verscont_all,do_remove_add,\
do_publish_all, do_revise_all, aap_do
from Message import *
from Work import setrpstack,getwork
from DoArgs import local_arg,error_continue
from Scope import get_build_recdict,rule_in_tree
from Scope import prepend_user_scope,undo_user_scope
def dictlist2shellstr(list, key):
"""Turn a dictlist into a string that can be used as an argument for a
shell command.
TODO: let this depend on the type of shell used.
"""
res = ''
for i in list:
if res:
res = res + ' '
for c in i[key]:
if string.find("'\" \t", c) >= 0:
res = res + "\\"
res = res + c
return res
def exec_commands(dep, recdict, target, filetype = None, really_build = 1):
"""
Execute the build commands "dep.commands" to build the Node "target", using
the info in the Depend object "dep".
"filetype" is the value for the "filetype" variable (used for automatic
dependencies).
When "really_build" is non-zero we are building normally. When
"really_build" is zero we are executing the ">always" and ">nobuild"
sections of a command block.
"""
work = getwork(recdict)
# Go to the build directory, unless the dependency is from a startup file.
cwd = None
if not dep.startup:
cwd = os.getcwd()
goto_dir(recdict, dep.builddir)
# Make a new recdict for the scope.
# Use the user scope specified for the dependency.
new_recdict = get_build_recdict(recdict, dep.buildrecdict,
keep_current_scope = dep.keep_current_scope,
rpstack = dep.rpstack,
xscope = dep.get_scope_names(work))
new_recdict["_really_build"] = really_build
# Set the variables $source, $target and the like.
new_recdict["buildtarget"] = target.get_name()
dl = shorten_dictlist(dep.targetlist)
work.add_node_attributes(dl)
xp = Expand(1, Expand.quote_aap)
new_recdict["target"] = dictlist2str(dl, xp)
new_recdict["target_list"] = map(lambda x : x["name"], dl)
new_recdict["target_dl"] = dl
# Get the list of dependencies.
dl = shorten_dictlist(dep.sourcelist)
work.add_node_attributes(dl)
new_recdict["depend"] = dictlist2str(dl, xp)
new_recdict["depend_list"] = map(lambda x : x["name"], dl)
new_recdict["depend_dl"] = dl
# Get the list of sources, omit virtual dependencies.
sl = filter(lambda x : not x["_node"].attributes.get("virtual"), dl)
new_recdict["source"] = dictlist2str(sl, xp)
if len(sl):
new_recdict["fname"] = listitem2str(sl[0]["name"])
else:
new_recdict["fname"] = ''
new_recdict["source_list"] = map(lambda x : x["name"], sl)
new_recdict["source_dl"] = sl
# Set the filetype for a depend action.
if filetype:
new_recdict["filetype"] = filetype
new_recdict["action"] = "depend"
new_recdict["targettype"] = "aap"
new_recdict["match"] = dep.matchstr
# Use "var_ABC" attributes from dependencies to set the "ABC" variable.
# Use "add_ABC" attributes from dependencies to add to the "ABC" variable.
from Action import get_vars_from_attr
for s in dl:
get_vars_from_attr(s, new_recdict)
# Create a ParsePos object to contain the parse position in the string.
# Make a copy of the RecPos stack, so that the item on top can be
# changed. Set the line number to just before where the commands were
# defined.
fp = ParsePos(rpcopy(dep.rpstack, dep.rpstack[-1].line_nr - 1),
string = dep.commands)
#
# Parse and execute the commands.
#
Process(fp, new_recdict, 0)
# Destroy the dictionary to free up used space (and it's not supposed to be
# used again).
dep.use_recdict = None
# Go back to the directory we came from.
if cwd:
goto_dir(recdict, cwd)
def locate_bdir(name, remove = 1):
"""
Locate the build subdirectory (builddir) "build" or "build-*" in "name".
When "remove" is True return the name with the builddir part removed.
When "remove" is False return the builddir name.
Return None if no builddir found.
"""
# Loop to move one path component from "head" to "tail" at a time until we
# find "build" or "build-*" or "head" does not have components left.
head = name
tail = ''
while 1:
newhead = os.path.dirname(head)
if newhead == head:
break # no directory names left
# Match with "/build/", and "/build-*/".
n = os.path.basename(head)
if (len(n) >= 5 and os.path.normcase(n)[:5] == "build"
and (len(n) == 5 or n[5] == '-')):
if not remove:
return head # Return the name of the build directory.
# Return the name with the "/build/" or "/build-*/" part removed.
if tail:
return os.path.join(newhead, tail)
return newhead
# Advance to the next component.
head = newhead
if tail:
tail = os.path.join(n, tail)
else:
tail = n
# no builddir found
return None
class Update:
"""Objects used to remember which source of a dependencey is outdated and
the newest time for "newer"."""
def __init__(self):
self.forced = Global.cmd_args.options.get("force")
# {force} used on target or source
self.time = 0 # oldest timestamp found
self.time_source = '' # source that has older timestamp
self.source = '' # source that has outdated signature
self.buildcheck = '' # buildcheck signature
self.newsig = 0 # no old signature known
def outdated(self, target):
"""Return TRUE if we already know that updating is required.
This doesn't handle a source being newer than the target."""
return (self.forced
or self.time
or self.source
or (target.attributes.get("virtual")
and not target.attributes.get("remember")))
def set_buildcheck(self, recdict, str, target, level):
"""Set the buildcheck signature from string "str" for target "target".
Check if it's different from the last time."""
buildcheck = buildcheckstr2sign(str)
self.buildcheck = buildcheck
# Only need to do the check if not updating already.
if not self.outdated(target):
# building is to be done if the signature differs.
# With "--contents" we don't build for a changed buildcheck.
old = get_old_sign(recdict, "", "buildcheck", target)
if not old:
self.newsig = 1
if self.buildcheck != old:
if Global.cmd_args.options.get("contents"):
msg_depend(recdict, _('Buildcheck for "%s" changed, ignored because of --contents option') % target.short_name(), level)
else:
self.source = "buildcheck"
# DEBUG
# msg_extra(recdict, 'build commands: "%s"' % str)
def upd_reason(self, dst_time, target):
"""Check if "target" needs to be updated. "dst_time" is the timestamp
of the target."""
# If a target is older than any source or one of the targets in the
# dependency doesn't exist: execute the commands.
if self.forced:
if Global.cmd_args.options.get("force"):
reason = _('-F or --force command line argument')
elif self.source:
reason = (_('"%s" has force attribute')
% shorten_name(self.source))
else:
reason = (_('target "%s" has force attribute')
% target.short_name())
elif (target.attributes.get("virtual")
and not target.attributes.get("remember")
and (not self.source or self.source == 'buildcheck')):
# the buildcheck for a virtual target isn't stored, thus its
# signature will always be different (new).
reason = _('virtual target')
elif self.source:
if self.newsig:
reason = _('no old signature for "%s"') % shorten_name(self.source)
else:
reason = _('"%s" has changed') % shorten_name(self.source)
elif dst_time < self.time:
reason = _('"%s" is newer') % self.time_source
elif dst_time == 0 and not target.attributes.get("virtual"):
reason = _("it doesn't exist")
else:
reason = ''
return reason
# List of nodes for for which autodep_dictlist needs to be cleared when a
# target is build.
clear_autodep_list = []
def autodep_update(work, recdict, scope_recdict, src_dict,
node, target, update, rule, nlevel, retlist):
"""
Find automatic dependencies for item "src_dict"/"node". If there are any,
also update them (recursively!) and append them to "retlist", so that
their signatures are stored when building "target" succeeds.
Do this on the global node, not the item itself, to re-use dependencies
when the source is also used in other dependencies.
Careful though: When the compilation arguments are different the
dependencies may also be different!
"""
# Avoid endless recursion by setting autodep_busy.
if node.autodep_busy:
return
# If "target" starts with (part of) $BDIR set the depdir attribute
# to use it for the auto-dependency file. Useful if some variant
# isn't used for the directory.
if not node.attributes.has_key("depdir"):
bdir = locate_bdir(target.get_name(), remove = 0)
if bdir:
node.attributes["depdir"] = bdir
# Updating dependencies needs to be repeated when this happens:
# 1. Automatic dependencies are figured out.
# 2. dictlist_update() is invoked to update the dependencies.
# 3. One of the dependencies is outdated, it is modified. The
# files it depends on may now be different (e.g., when a
# "#include" line was added or removed).
# To detect this situation "clear_autodep_list" contains the nodes
# for which the dependencies are being updated. The
# "autodep_dictlist" member is cleared for these nodes when any
# item is build in may_exec_depend().
# A global list appears to be the only method that works in all
# situations.
while 1:
# Obtain or upddate the automatic dependencies for "node".
depend_auto(work, recdict, node, src_dict, nlevel)
# If dependencies are found, they also need to be updated.
if node.autodep_dictlist:
global clear_autodep_list
save_clear_autodep_list = clear_autodep_list
clear_autodep_list.append(node)
node.autodep_busy = 1
slist = dictlist_update(node.autodep_dictlist,
work, recdict,
scope_recdict,
target, update, rule,
autodepend = not node.autodep_recursive,
level = nlevel)
clear_autodep_list = save_clear_autodep_list
node.autodep_busy = 0
if not node.autodep_dictlist:
# The autodep_dictlist list was cleared, thus the
# dependencies are now outdated. Must be caused by an
# included file that was generated now.
continue
retlist.extend(slist)
break
def expand_srcpath(recdict, isabs, name, dict):
"""
Expand "name" into a list of names using $SRCPATH.
The "srcpath" attribute in "dict" overrules $SRCPATH.
"""
if not isabs:
if dict.has_key("srcpath"):
from Dictlist import str2dictlist
srcpath = str2dictlist([], dict["srcpath"])
else:
srcpath = varname2dictlist(recdict, "_no", "SRCPATH")
if isabs or not srcpath:
names = [ name ]
else:
names = []
for i in srcpath:
names.append(os.path.join(i["name"], name))
return names
# Values for "done" below.
build_done_NOT = 0 # updating not done yet
build_done_OK = 1 # updating done OK
build_done_IGNORE = 2 # updating skipped
build_done_ERROR = 3 # updating failed
def target_update_inlist(work, recdict, scope_recdict,
src_dict, names, target, level, nlevel):
"""
Update "src_dict" from one of the possible locations in "names".
Returns one of the build_done_ values and a node, if found.
"""
done = build_done_NOT
for src_name in names:
# Find the node after removing "./" things ($SRCPATH may cause it)
if is_url(src_name):
norm_name = src_name
else:
norm_name = os.path.normpath(src_name)
node = work.get_node(norm_name, 1, src_dict)
# Check if we skip updating this item.
if node.attributes.get("update") == "no":
return build_done_OK, node
# Ignore a source that depend on itself.
if node == target:
msg_depend(recdict,
_('Target depends on itself (ignored): "%s"')
% target.short_name(), level)
return build_done_IGNORE, node
# Try updating the item. If it works we use it.
node.scope_recdict = scope_recdict
res = target_update(work, recdict, node, level = nlevel)
if res == updated_OK:
# TRICK: fix the dependency to use this node
src_dict["name"] = node.get_name()
src_dict["_node"] = node
return build_done_OK, node
if res == updated_ERROR:
# We continue any way. If another name is successful it's OK, if
# not then remember this error.
done = build_done_ERROR
# Did not find a name that could be updated.
return done, None
def dictlist_update(dictlist, work, recdict, scope_recdict,
target, update, rule, autodepend = 1, level = None):
"""
Go over all items in dictlist "dictlist" and update them. Each item is a
source in a dependency, thus it's also a node.
- Skip the "target" node (avoid files that depend on itself).
- Use $SRCPATH to find the items. If the "srcpath" attribute is given use
it instead of $SRCPATH.
- When "rule" is non-zero, and an item starts with the name of a build
directory ("build-"), also try without that directory name.
- Find out if an item is outdated, using the Update object "update".
- "recdict" is the normal namespace to get variables for. "scope_recdict"
is used to find rules that apply. Normally it is the recdict of the
recipe where the dependency was specified.
- When "autodepend" is zero don't check for automatic dependencies.
Returns the dictlist of nodes that have been updated. This is "dictlist"
with automatic dependencies added and items corrected for the path where
the source was found.
"""
# Make sure there is a node for each item in "dictlist".
dictlist2depnodelist(work, dictlist)
retlist = []
# Increas the level, if given.
if level:
nlevel = level + 1
else:
nlevel = None
for src_dict in dictlist:
# For an absolute path use the source name literally, otherwise use
# $SRCPATH to locate the source.
# However, need to add the directory for a child recipe.
src = src_dict["name"]
src_isabs = os.path.isabs(src)
if not src_isabs and src_dict.has_key("_node"):
src = src_dict["_node"].short_name()
names = expand_srcpath(recdict,
(src_isabs or is_url(src)), src, src_dict)
if rule:
# When "src" starts with "$BDIR/" also try without it.
# Thus "foo/build-abc/bar.c" becomes "foo/bar.c".
# This helps for ":rule *.o : *.c" used for "foo/build-abc/foo.o"
src_no_bdir = locate_bdir(os.path.abspath(src))
if src_no_bdir:
names.extend(expand_srcpath(recdict, 0, src_no_bdir, src_dict))
# Update this "src" from one of the possible targets in "names".
done, node = target_update_inlist(work, recdict, scope_recdict,
src_dict, names, target, level, nlevel)
if done == build_done_IGNORE:
continue # skip source equal to target
if done == build_done_NOT:
msg = _('Do not know how to build "%s"')
elif done == build_done_ERROR:
msg = _('Building "%s" FAILED')
else:
msg = None
if msg:
msg = msg % shorten_name(src)
if error_continue():
# Ignoring errors: continue with further targets.
# Remember building this target failed, any other targets
# that depend on it will also fail.
target.status = Node.builderror
msg_error(recdict, msg)
else:
# Quit building at the first error.
raise UserError, msg
if autodepend and node and not node.attributes.get("virtual"):
# Find automatic dependencies for this item and update them
# recursively.
autodep_update(work, recdict, scope_recdict,
src_dict, node, target, update, rule, nlevel, retlist)
# Check if the target needs updating because item "src_dict" is
# outdated.
check_need_update(recdict, update, src_dict, target)
# Make the list of nodes after adjusting for the item actually used in the
# source path or after removing $BDIR.
retlist.extend(dictlist2depnodelist(work, dictlist))
# Return the list of items that were updated.
return retlist
def check_need_update(recdict, update, src_dict, target, rootname = None):
"""
Check if node "target" needs to be updated by checking the source
"src_dict".
"update" is the Update object used for the target.
"rootname" is used for publishing: the target name without the publish
attribute, used for the "--contents" option.
"""
# Only need to do the check if not updating already. Saves a bit of time
# in computing signatures (although they might have to be computed anyway
# to be able to remember them).
if target.status != Node.builderror and not update.outdated(target):
virtual = 0
if src_dict.has_key("_node"):
node = src_dict["_node"]
src_name = node.get_name()
virtual = node.attributes.get("virtual")
elif is_url(src_dict["name"]):
src_name = src_dict["name"]
else:
src_name = os.path.normpath(src_dict["name"])
# The "force" attribute forces updating always.
# A virtual target used as a source causes updating as well.
if src_dict.get("force"):
update.forced = 1
update.source = src_name
elif virtual:
update.source = src_name
else:
alt_dict = None
if src_dict.has_key("_node"):
alt_dict = src_dict["_node"].attributes
check = check_name(recdict, src_name, src_dict, alt_dict)
if check == "newer":
c = "time" # "newer" check also uses timestamp
else:
c = check
if rootname and Global.cmd_args.options.get("contents"):
# When publishing and the "--contents" option is used,
# obtain the most recent signature for any destination.
old = get_old_sign(recdict, src_name, c, target,
rootname = rootname)
else:
old = get_old_sign(recdict, src_name, c, target)
new = get_new_sign(recdict, src_name, c)
if new == '' or new == '0':
raise (UserError, _('Cannot compute signature for "%s"')
% src_name)
# Update update.time or update.source:
# If it's -1 we already know building is to be done.
# If the check is "newer" need to find the newest timestamp.
# For other checks building is to be done if the sign differs.
if check == "newer":
t = long(float(new))
if update.time == 0 or t > update.time:
update.time = t
update.time_source = src_name
else:
if not old:
update.newsig = 1
if new != old:
update.source = src_name
def buildcheck_update(recdict, checkstring, dep, target, update, level):
"""
Check if the "checkstring" value changed since the last build and change
"update" accordingly. "dep.commands" are the commands for the build rule.
"""
# If the "buildcheck" attribute is empty, don't check if it changed.
i = skip_white(checkstring, 0)
if i >= len(checkstring):
update.buildcheck = ''
return
# No need to compute the buildcheck for a virtual target, it's not used or
# stored anyway. Speeds up the big build block that ":mkdownload"
# generates.
if (target.attributes.get("virtual")
and not target.attributes.get("remember")):
update.buildcheck = ''
return
work = getwork(recdict)
line_nr = dep.rpstack[-1].line_nr
setrpstack(recdict, dep.rpstack)
# Use scope specified with "scope" attribute on dependency.
saved_scope = prepend_user_scope(recdict, dep.get_scope_names(work))
# To take into account changes in the commands of an action, need to
# replace "do action file" by the commands for the action.
from Action import action_expand_do
commands = action_expand_do(recdict, dep.commands,
dep.targetlist, dep.sourcelist)
# Remove comments.
i = 0
cl = len(commands)
while i < cl:
s = skip_white(commands, i)
e = string.find(commands, '\n', s)
if e < 0:
e = cl - 1
if s < cl and commands[s] == '#':
cl = cl - (e + 1 - i)
commands = commands[:i] + commands[e + 1:]
else:
i = e + 1
# Set the values for $source and $target to an empty string. The
# dependency takes care of changes in these and their values are
# unpredictable. Also avoids using a value from a surrounding scope.
save = {}
saved = ["source", "target", "fname", "match"]
for k in saved:
save[k] = recdict.get(k)
recdict[k] = ''
# Use "var_xxx" attributes from source files, they overrule variables.
# Also use "var_xxx" attributes from target files. This isn't exactly the
# same as what happens for an executed dependency, but it is required to
# handle attributes that are passed to an action, such as
# {var_LDFLAGS = -X}.
from Action import get_vars_from_attr
save_attr = {}
for s in dep.sourcelist + dep.targetlist:
node = work.find_node(s["name"])
if node:
get_vars_from_attr(node.attributes, recdict, save_attr)
get_vars_from_attr(s, recdict, save_attr)
# Always compute the check signature, it either needs to be compared (not
# outdate yet) or stored (when outdated already).
# Only set the value of $xcommands when it's used, it takes some time and
# may fail.
recdict["commands"] = commands
xp = Expand(0, Expand.quote_aap, skip_errors = 1)
if string.find(checkstring, "xcommands") > 0:
recdict["xcommands"] = expand(line_nr, recdict,
commands, xp, skip_errors = 1)
check_str = expand(line_nr, recdict, checkstring, xp, skip_errors = 1)
del recdict["commands"]
if recdict.has_key("xcommands"):
del recdict["xcommands"]
for k in saved + save_attr.keys():
if save[k] is None:
if recdict.has_key(k):
del recdict[k]
else:
recdict[k] = save[k]
# Undo prepend_user_scope().
undo_user_scope(recdict, saved_scope)
# Collapse spans of white space into a single space. Items like $LIBS may
# have irrelevant trailing spaces. Leading white space may be relevant
# though, keep it.
# TODO: how about white space inside quotes?
bstr = ''
i = 0
leading = 1
check_str_len = len(check_str)
while i < check_str_len:
s = i
c = check_str[i]
while ((c == ' ' or c == '\t')
and i + 1 < check_str_len
and (check_str[i + 1] == ' ' or
check_str[i + 1] == '\t' or check_str[i + 1] == '\n')):
i = i + 1
if not leading:
s = i
e = i + 1
while (e < check_str_len and check_str[e] != ' '
and check_str[e] != '\t'):
e = e + 1
bstr = bstr + check_str[s:e]
i = e
leading = (bstr[-1] == '\n')
update.set_buildcheck(recdict, bstr, target, level)
def target_rule(work, recdict, target, update, src_list_list, level):
"""Find rules that match "target" and do the work for each of them.
"src_list_list" is extended for items that need their sign updated.
Returns a dependency made out of a rule with build commands or None
"""
#
# No specific dependency is defined for this target:
# - Find all matching rules and update their sources. Remember the
# newest timestamp.
# - Find a matching rule with commands and create a dependency from
# that.
#
target_name = target.get_name()
target_sname = target.short_name()
dep = None
buildrule = None # Fully matching build rule or has more than
# one source.
buildrule_len = 0 # length of matching pattern
buildrule_p = None # Potential build rule, use when there is no
# full match.
buildrule_p_len = 0 # length of matching pattern of buildrule_p
double = 0 # set when more than one matching rule found
double_p = 0 # set when more than one potentitial rule found
for r in work.rules:
# Check if the rule exists in the scope of the target
if r.scope == "local":
# The rule must be defined in the same recipe as the build
# commands.
if not target.scope_recdict["_recipe"] is r.buildrecdict["_recipe"]:
continue
elif r.scope == "normal":
if not rule_in_tree(r.buildrecdict, target.scope_recdict):
continue
matchstr, adir, matchlen = r.match_target(target_name, target_sname)
if matchstr:
src_list = r.target2sourcelist(target_name, target_sname)
# Skip rules where the target and source are equal.
# Happens for ":rule %.jpg : path/%.jpg".
if len(src_list) == 1 and src_list[0]["name"] == target_name:
continue
# Check if all the sources exist.
# When not, only remember it as a potential buildrule.
full_match = 1
for s in src_list:
e = os.path.exists(s["name"])
if not e:
# Try removing $BDIR: 'foo/build-abc/foo.c' -> 'foo/foo.c'
n = locate_bdir(os.path.abspath(s["name"]))
if n:
e = os.path.exists(n)
if not e and s.get("fetch"):
# The file doesn't exist but we can try downloading it.
node = work.get_node(s["name"], 0, s)
fetch_nodelist(target.rpstack(), recdict, [ node ], 1)
e = os.path.exists(s["name"])
if not e:
# TODO: Should check if this source can be build from
# other rules or dependencies.
full_match = 0
break
if r.commands:
# Remember the rule with the longest matching pattern.
if full_match:
if buildrule and matchlen == buildrule_len:
double = 1
if matchlen > buildrule_len:
buildrule = r
buildrule_len = matchlen
double = 0
elif not r.sourceexists:
if buildrule_p and matchlen == buildrule_p_len:
double_p = 1
if matchlen > buildrule_p_len:
buildrule_p = r
buildrule_p_len = matchlen
double_p = 0
elif full_match:
# Matching rule but no commands, need to update it
msg_depend(recdict, _('Using rule "%s : %s" for target "%s"')
% (dictlist2str(r.targetlist,
Expand(0, Expand.quote_aap)),
dictlist2str(r.sourcelist,
Expand(0, Expand.quote_aap)),
target_sname), level)
target.current_rule = r
slist = dictlist_update(src_list, work, recdict,
target.scope_recdict,
target, update, 1, level = level)
target.current_rule = None
src_list_list.append(slist)
# Give an error when more than one fully matching rule found or when
# there is no matching rule and more then one potential rule found.
if double:
raise UserError, (_('More than one matching build rule for "%s"')
% target_sname)
# When there are two matching potential rules we don't know which one to
# use. Assume none of them is to be used. Required to handle rules for
# lex (%.c : %.l) and yacc (%.c : %.y).
if not buildrule and double_p:
msg_note(recdict, _('More than one matching build rule for "%s", skipping')
% target_sname)
return None
if not buildrule and buildrule_p:
# Didn't find a fully matching rule, use the potential one.
buildrule = buildrule_p
if buildrule:
msg_depend(recdict, _('Using build rule "%s : %s" for target "%s"')
% (dictlist2str(buildrule.targetlist,
Expand(0, Expand.quote_aap)),
dictlist2str(buildrule.sourcelist,
Expand(0, Expand.quote_aap)),
target_sname), level)
# Create a dependency to be executed with may_exec_depend().
# The build directory is set to ".", which means to use the directory
# where the rule was invoked from.
src_list = buildrule.target2sourcelist(target_name, target_sname)
dep = Depend(buildrule.target2targetlist(target_name, target_sname),
buildrule.build_attr,
src_list,
work,
buildrule.rpstack,
buildrule.commands,
".",
buildrule.buildrecdict)
dep.matchstr, adir, matchlen = buildrule.match_target(target_name,
target_sname)
# Apply attributes from the rule to the target. Must be before
# dictlist_update(), because the "virtual" attribute may influence
# what happens there (e.g., location of sign file).
for d in dep.targetlist:
if d["name"] == target_name:
target.set_attributes(d)
# Update sources for this build rule.
target.current_rule = buildrule
slist = dictlist_update(src_list, work, recdict,
target.scope_recdict,
target, update, 1, level = level)
target.current_rule = None
src_list_list.append(slist)
return dep
# return values for target_update():
updated_OK = 1 # updating worked.
updated_FAIL = 0 # don't know how to update the target.
updated_ERROR = -1 # error while executing commands.
def target_update(work, recdict, target, toplevel = 0, force = 0, level = None):
"""
Update a target by finding the build rule for it and executing the build
commands if it's outdated.
"target" is a Node object.
If "toplevel" is non-zero it is an error not to have build commands.
Return a non-zero number for success.
"""
retval = updated_OK
target_name = target.get_name()
target_sname = target.short_name()
# The "comment" target is a special case.
if target.name == "comment":
work.print_comments()
return retval
# Return right away if this target was already updated.
if target.status is Node.updated and not force:
# This message is only given at the toplevel, otherwise it clogs the
# log file with remarks about header files.
if level == 1:
msg_depend(recdict, _('Target was already updated: "%s"')
% target_sname, level)
return retval
# Don't try updating a target that failed previously.
if target.status is Node.builderror:
msg_depend(recdict, _('skip updating target "%s": previously failed')
% target_sname, level)
return updated_ERROR
if force:
msg_depend(recdict, _('updating target "%s" forcefully')
% target_sname, level)
else:
msg_depend(recdict, _('updating target "%s"')
% target_sname, level)
# Check if we are trying to recursively update ourselves.
if target.status is Node.busy:
if target.current_rule:
rpstack = target.current_rule.rpstack
where = ' from rule'
elif target.current_dep:
rpstack = target.current_dep.rpstack
where = ' from dependency'
else:
where = ''
if where:
where = where + (_(' in recipe "%s" line %d')
% (rpstack[-1].name, rpstack[-1].line_nr))
if target.attributes.get("recursive"):
target.recursive_level = target.recursive_level + 1
if target.recursive_level > int(target.attributes.get("recursive")):
raise UserError, (_('Cyclic dependency exceeds %d levels for "%s"%s')
% (target.recursive_level - 1, target_sname, where))
else:
raise UserError, (_('Cyclic dependency for "%s"%s')
% (target_sname, where))
else:
target.recursive_level = 1
for dep in target.get_dependencies():
if dep.in_use:
raise UserError, (_('Cyclic dependency for "%s" in recipe "%s" line %d')
% (target_sname, dep.rpstack[-1].name,
dep.rpstack[-1].line_nr))
return updated_FAIL
# Increase the build level.
if level:
nlevel = level + 1
else:
nlevel = None
target.status = Node.busy
# save "virtual" attribute, it is restored after done with this dependency.
save_virtual = target.attributes.get("virtual")
# Use an Update object to remember if any of the sources for this target is
# outdated, thus we need to build the target.
# For the "newer" check we need to find the newest timestamp of the
# sources, update.time is set to the timestamp of the item.
# For other checks we compare with a previous signature. If outdated,
# update.source is set to the name of the outdated source.
update = Update()
if force or target.attributes.get("force"):
update.forced = 1
# Remember which lists of sources need to have their signatures updated
# when building succeeds.
src_list_list = []
# Go over all specified dependencies that have this target in their target
# list and update their sources. Also find the newest timestamp.
for dep in target.get_dependencies():
msg = (_('Using dependency "%s : %s"')
% (dictlist2str(dep.targetlist,
Expand(1, Expand.quote_aap)),
dictlist2str(dep.sourcelist,
Expand(1, Expand.quote_aap))))
if len(dep.targetlist) > 1:
msg = msg + _(' for target "%s"') % target_sname
msg_depend(recdict, msg, nlevel)
# The recdict to be used for the build commands is also used for
# checking the dependencies. This means the $BDIR of the child recipe
# is used when necessary.
dep.use_recdict = get_build_recdict(recdict, dep.buildrecdict,
keep_current_scope = dep.keep_current_scope,
rpstack = dep.rpstack,
xscope = dep.get_scope_names(work))
# Go to the build directory, so that auto-dependencies are stored in
# the right AAPDIR directory ($BDIR is only valid in the directory of
# the child recipe). Don't do this for startup dependencies.
cwd = None
if not dep.startup:
cwd = os.getcwd()
goto_dir(recdict, dep.builddir)
# Set the "in_use" flag for the dependency. This is needed for when
# there are multiple targets, only the current target has the busy flag
# set. E.g., "foo bar: bar" and updating "foo".
dep.in_use = 1
target.current_dep = dep
slist = dictlist_update(dep.sourcelist, work, dep.use_recdict,
dep.buildrecdict,
target, update, 0, level = nlevel)
target.current_dep = None
dep.in_use = 0
src_list_list.append(slist)
# Go back to the directory we came from.
if cwd:
goto_dir(recdict, cwd)
# If there is a dependency with commands rules are not applied.
dep = target.get_first_build_dependency()
dirmode = target.isdir()
# If there is no dependency and the target is not a directory:
# 1. Find a matching rule
# 2. Fetch the file
if not dep and not dirmode:
# 1. Find matching rules.
# One with build commands is turned into a dependency.
dep = target_rule(work, recdict, target, update, src_list_list, nlevel)
if dep:
dep.use_recdict = get_build_recdict(recdict, dep.buildrecdict,
keep_current_scope = dep.keep_current_scope,
rpstack = dep.rpstack,
xscope = dep.get_scope_names(work))
else:
# 2. Fetch the file
# If the target doesn't exist and has the "fetch" attribute, try
# fetching it. This may use a cached file.
if (not os.path.exists(target_name)
and target.attributes.has_key("fetch")):
fetch_nodelist(target.rpstack(), recdict, [ target ], 1)
if target.attributes.get("virtual"):
msg_depend(recdict,
_('Virtual target has no build commands: "%s"')
% target_sname, level)
# This is an error if there are no sources.
if not src_list_list:
retval = updated_FAIL
elif toplevel or not os.path.exists(target_name):
msg_depend(recdict,
_('Do not know how to build "%s"')
% target_sname, level)
retval = updated_FAIL
else:
msg_depend(recdict,
_('Target has no build commands and exists: "%s"')
% target_sname, level)
#
# If there is a dependency with build commands, execute it.
#
if dep:
# the "finally" and "fetch" target may have multiple build commands
deplist = target.get_build_dependencies()
if not deplist:
deplist = [ dep ]
for d in deplist:
# Execute the dependency commands if the target is outdated.
try:
may_exec_depend(recdict, d, update,
target, src_list_list, level)
except UserError:
# Only continue execution with the "--continue" option.
if not error_continue():
raise
retval = updated_ERROR
elif dirmode:
# Target is a directory and has no specific build commands: create the
# directory if it doesn't exist.
if os.path.exists(target_name):
if not os.path.isdir(target_name):
msg_warning(recdict,
_('Target is not a directory: "%s"') % target_sname)
else:
msg_depend(recdict, _('Directory exists: "%s"')
% target_sname, level)
else:
if skip_commands():
msg_info(recdict, _('skip creating directory "%s"')
% target_name)
else:
try:
import types
# If a mode like "0777" was specified, use it.
if (isinstance(dirmode, types.StringType)
and dirmode[0] == '0'):
os.makedirs(target_name, oct2int(dirmode))
else:
os.makedirs(target_name)
msg_info(recdict, _('Created directory: "%s"')
% target_name)
except EnvironmentError, e:
recipe_error([], (_('Could not create directory "%s"')
% target_name) + str(e))
elif target.name == "fetch":
# Automatically fetch all nodes with an "fetch" attribute.
if do_fetch_all([], recdict, {}):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "publish":
# Automatically publish all nodes with a "publish" attribute.
# It worked when an empty list is returned.
if not do_publish_all([], recdict, {}):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "commit":
# Automatically commit all nodes with a "commit" attribute.
if do_verscont_all([], recdict, "commit", {}):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "checkout":
# Automatically checkout all nodes with a "commit" attribute.
if do_verscont_all([], recdict, "checkout", {}):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "checkin":
# Automatically checkin all nodes with a "commit" attribute.
if do_verscont_all([], recdict, "checkin", {}):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "unlock":
# Automatically unlcok all nodes with a "commit" attribute.
if do_verscont_all([], recdict, "unlock", {}):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "add":
# Automatically add new nodes with a "commit" attribute.
if do_remove_add([], recdict, {}, local_arg(), "add"):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "remove":
# Automatically delete nodes without a "commit" attribute.
if do_remove_add([], recdict, {}, local_arg(), "remove"):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "tag":
# Automatically tag nodes with a "commit" and "tag" attribute.
if do_verscont_all([], recdict, "tag", {}):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "revise":
# Automatically checkin all nodes with a "commit" attribute.
# Automatically delete nodes.
if do_revise_all([], recdict, {}, local_arg()):
retval = updated_OK
else:
retval = updated_ERROR
elif target.name == "reference":
# Generate or update the cross reference
# TODO: check for errors
aap_do(0, recdict, "reference $SOURCE $?INCLUDE")
retval = updated_OK
elif target.name == "refresh":
# Nothing to do, refreshing will have been done by now.
retval = updated_OK
if retval == updated_OK:
target.status = Node.updated
elif retval == updated_ERROR:
target.status = Node.builderror
else:
target.status = Node.new
target.recursive_level = target.recursive_level - 1
# restore "virtual" attribute
if save_virtual is None:
if target.attributes.has_key("virtual"):
del target.attributes["virtual"]
else:
target.attributes["virtual"] = save_virtual
return retval
class DepNode:
"""Object to keep the node of a dependency with the attributes local to the
dependency."""
def __init__(self, node, dict):
self.node = node
self.attributes = dict
def dictlist2depnodelist(work, dictlist):
"""Turn the items in "dictlist" into DepNodes, return the result."""
nl = []
for item in dictlist:
# for a dependency we know the node, need to find it for a rule
if item.has_key("_node"):
n = item["_node"]
else:
n = work.get_node(item["name"], 1)
nl.append(DepNode(n, item))
if n.attributes.get("virtual") and item.get("remember"):
recipe_error([],
_('Cannot use "remember" attribute on virtual node %s')
% item["name"])
return nl
def commands_with_sections(s):
"""
Return non-zero when sections exist in command block "s".
A section starts with ">always", ">build" or ">nobuild".
Ignore white space and comments.
"""
slen = len(s)
i = 0
while i < slen:
i = skip_white(s, i)
if i >= slen:
break
c = s[i]
if c == '>':
return 1
if c != '\n' and c != '#':
break
i = string.find(s, '\n', i)
if i < 0:
break
i = i + 1
return 0
def may_exec_depend(recdict, dep, update, target, src_list_list, level,
filetype = None):
"""Execute the build commands of dependency "dep" if the target "target" is
outdated.
When updating succeeds, remember the signs for the items in
"src_list_list".
Return non-zero when building was done."""
target_name = target.get_name()
target_sname = target.short_name()
# Get timestamp for the target. If this fails it probably doesn't
# exist and needs to be rebuild (dst_time will be zero).
dst_time = url_time(recdict, target_name)
if dst_time > 0 and target.attributes.get("virtual"):
msg_warning(recdict, _('target is virtual but does exist: "%s"')
% target_sname)
# If a "buildcheck" attribute is defined for the build commands, check
# if it has changed. Otherwise check if the build commands changed.
if dep.build_attr.has_key("buildcheck"):
buildcheck = dep.build_attr["buildcheck"]
else:
buildcheck = "$xcommands"
buildcheck_update(dep.use_recdict, buildcheck, dep, target, update, level)
# Get the reason for building (if any).
reason = update.upd_reason(dst_time, target)
if reason:
retval = 1
else:
retval = 0
if target.status == Node.builderror:
# Can't build this target, there were errors in the dependencies or
# building failed previously.
return retval
# We do the building when the target needs to be updated AND when the build
# commands start with ">always", ">build" or ">nobuild".
if reason:
build = 1
else:
msg_depend(recdict, _('Target "%s" is up-to-date') % target_sname,
level)
build = commands_with_sections(dep.commands)
if build:
if (reason and Global.cmd_args.options.get("touch")
and not Global.force_build):
# Skip standard virtual targets, they are always rebuild
if not target.name in Global.virtual_targets:
msg_info(recdict, _('Touching "%s"') % target_sname)
else:
if reason:
msg = reason
else:
msg = _("command block with sections")
msg_depend(recdict, _('Updating "%s" from "%s": %s')
% (target_sname,
dictlist2str(shorten_dictlist(dep.sourcelist)), msg),
level)
# Skip when not actually building.
if not skip_commands():
# Make sure the directory for target_name exists.
# Create the directory if "target" contains "/build*/".
if locate_bdir(target_name):
aap_checkdir(dep.rpstack, recdict, target_name)
# Execute the build commands.
exec_commands(dep, recdict, target, filetype = filetype,
really_build = reason)
# Skip when not actually building.
if reason and not skip_commands():
# Check that the target was really updated. Only when it
# exists.
if dst_time > 0 and not target.attributes.get("virtual"):
t = url_time(recdict, target_name)
if t == 0:
raise UserError, _('Building failed for "%s"') % target_name
# if t == dst_time:
# Could give an error here, but on fast machines it often
# happens while nothing is wrong.
if reason or update.newsig:
# Update the signatures for all targets. We assume they were all
# updated, even though the build commands may skip some or were not
# executed at all.
for trg in dep.targetlist:
# Can there be a target without a node???
if not trg.has_key("_node"):
continue
node = trg["_node"]
# Skip standard virtual targets, they are always rebuild
if node.name in Global.virtual_targets:
continue
# Target changed, clear its cached signatures.
sign_clear(node.get_name())
# Remember the signatures of the sources now. Do this for sources
# that the target depends on. Any old signatures for this target
# can be removed, they are invalid anyway.
# Don't do this with the --nobuild argument, unless --touch was
# used as well.
if (not Global.cmd_args.options.get("nobuild")
or Global.cmd_args.options.get("touch")
or Global.force_build):
sign_clear_target(recdict, node)
for l in src_list_list:
for dn in l:
# Call check_name() again, because creating the item
# may cause the filetype to change.
if not dn.node.attributes.get("virtual"):
sign_updated(recdict, dn.node, dn.attributes, node)
# Mark a node that is not the main target as have been updated
if node != target:
node.status = Node.updated
# Also remember the buildcheck signature.
if update.buildcheck:
buildcheck_updated(recdict, node, update.buildcheck)
# The target has changed, need to redo automatic dependencies.
# Also redo dependencies for nodes on which the list of automatic
# dependencies depends.
node.autodep_dictlist = None
for n in clear_autodep_list:
n.autodep_dictlist = None
# Return non-zero if building was to be done.
return retval
def autodepend_recurse(work, recdict, src_list, update, target, source):
"""
Read an auto-dependency file and extract the source dependencies.
If "update" isn't None check for the sources to be outdated.
Add the sources to list of DepNodes "src_list".
"""
if source.autodep_recursive and os.path.exists(target.get_name()):
from Depend import read_auto_depend
auto_dep = read_auto_depend(recdict, target, source.get_name())
if auto_dep:
work.dictlist_nodes(auto_dep)
if update:
for a in auto_dep:
check_need_update(recdict, update, a, target)
src_list.extend(dictlist2depnodelist(work, auto_dep))
# Always store the result in the source Node.
source.autodep_dictlist = auto_dep
def find_autodep_items(work, recdict, stype, source, source_dict):
"""
Find the action and name of the autodepend recipe used for Node "source"
for an "stype"file.
Returns (None, None) when there is no matching action.
"""
from Action import find_depend_action
action = find_depend_action(stype)
if not action:
return None, None # Didn't find a matching autodepend.
# Make the name for the recipe that contains the automatic dependencies for
# this node: "$BDIR/node_directory/node_basename.aap".
# A "depdir" attribute of the source node overrules $BDIR.
d = source_dict.get("depdir")
if not d:
d = source.attributes.get("depdir")
if not d:
d = get_var_val_int(recdict, "BDIR")
recipe = work.get_node(os.path.join(d, source.short_name()) + ".aap", 1)
return action, recipe
def build_autodepend(work, recdict, stype, source, source_dict, level):
"""
Find an autodepend rule to make a dependency recipe from an "stype" file.
When a match is found execute the build commands for Node "source".
Use "source_dict" for attributes, overruling attributes of "source".
Unlike using pattern rules, this does NOT update the source.
Return the recipe node (it is already up-to-date).
"""
# Search all defined autodepends for one where stype matches.
# Make the name of the recipe that contains the produced dependencies.
action, recipe = find_autodep_items(work, recdict, stype,
source, source_dict)
if not action:
return None
msg_depend(recdict, _('Using depend %s for source "%s"')
% (stype, source.short_name()), level)
# Use an Update object to remember if any of the sources for this target is
# outdated, thus we need to build the target.
update = Update()
# Check the signature of the source. Use the attributes from the
# autodepend plus those from the source Node and the source file itself.
src_dict = action.attributes.copy()
src_dict["_node"] = source
for k in source.attributes.keys():
src_dict[k] = source.attributes[k]
for k in source_dict.keys():
src_dict[k] = source_dict[k]
src_dict["name"] = source.get_name()
check_need_update(recdict, update, src_dict, recipe)
# Make sure the output directory exists.
aap_checkdir(action.rpstack, recdict, recipe.get_name())
# If the autodepend used the {recursive} attribute, need to carry this over
# to the source node.
if action.attributes.has_key("recursive"):
source.autodep_recursive = action.attributes["recursive"]
else:
source.autodep_recursive = 0
# If the recipe exists and was generated recursively, the recipe depends on
# all files mentioned in it. This makes sure that the dependencies are
# updated when "foo.c" includes "common.h" and "common.h" was changed.
src_list = dictlist2depnodelist(work, [ src_dict ])
autodepend_recurse(work, recdict, src_list, update, recipe, source)
# Create a dependency to be executed below.
dep = Depend([ {"name" : recipe.get_name()} ],
action.attributes,
[ src_dict ],
work, action.rpstack,
action.commands)
# Make a new scope from the current scope and the scope of the action.
# This is a bit tricky, because some things have to be obtained from where
# the action was specified (esp. $_recipe.var), most things from the
# current scope (esp. $CFLAGS).
dep.use_recdict = get_build_recdict(recdict, action.buildrecdict,
keep_current_scope = 1, rpstack = action.rpstack,
xscope = action.attributes.get("scope"))
# Also remember the buildrecdict to use, the recdict will be set again
# when executing the commands.
dep.buildrecdict = action.buildrecdict
dep.keep_current_scope = 1
# Also execute the commands for "--nobuild" and "--touch".
Global.force_build = Global.force_build + 1
# Execute the build commands of the dependency when the recipe is outdated.
try:
res = may_exec_depend(recdict, dep, update, recipe, [ src_list ],
level, filetype = stype)
except UserError:
# Delete the target, it may be empty or invalid and the signature may
# still be correct when it was build OK a previous time.
try_delete(recipe.get_name())
# When started with the "--nobuild" option it's possible that
# generating the auto-dependencies fails, because the input file (or a
# file it includes) was not generated. Silently skip the dependencies
# then.
if not Global.cmd_args.options.get("nobuild") and not error_continue():
raise
msg_info(recdict, _("Updating automatic dependencies failed, continuing anyway..."))
res = 0
Global.force_build = Global.force_build - 1
if res:
# Now the list of dependencies may have changed. Also store signatures
# for newly discovered dependencies, otherwise the file will be build
# again when executing the recipe the next time.
new_src_list = []
autodepend_recurse(work, recdict, new_src_list, None, recipe, source)
for dn in new_src_list:
found = 0
for odn in src_list:
if dn.node == odn.node:
found = 1
break
if not found:
sign_updated(recdict, dn.node, dn.attributes, recipe)
return recipe
def default_targets(work, recdict):
"""Decide what the default target(s) is/are for this recipe."""
if recdict.get("TARGET"):
# Use the targets from $TARGET.
msg_depend(recdict, _('Building target(s) specified with $TARGET'))
targets = varname2dictlist(recdict, None, "TARGET")
for i in xrange(0, len(targets)):
targets[i] = work.get_node(targets[i]["name"], 0, targets[i])
elif work.find_node("all", ""):
# Use the "all" target.
msg_depend(recdict, _('Building the "all" target'))
targets = [ work.find_node("all", "") ]
elif len(recdict["_buildrule_targets"]) > 0:
# Use the targets from build rules.
msg_depend(recdict, _('Building target(s) specified with build rules'))
targets = recdict["_buildrule_targets"]
else:
msg_info(recdict,
_('No target on the command line and no $TARGET, build rules or "all" target in a recipe'))
targets = []
return targets
def dobuild(work, target = None):
"""
Build the targets specified on the command line or the default target.
When "target" is not None build this target.
"""
#
# Execute all the "-c command" arguments.
#
if Global.cmd_args.has_option("command"):
cmdlist = Global.cmd_args.options.get("command")
else:
cmdlist = []
for cmd in cmdlist:
# Create a ParsePos object to contain the parse position in the string.
# Make a new rpstack to be able to give useful error messages.
fp = ParsePos([ RecPos(_('Command line argument "%s"') % cmd, 0) ],
string = cmd + '\n')
# Parse and execute the commands.
try:
Process(fp, work.recdict, 0)
except UserError:
if not error_continue():
raise
# if there is a "-c command" argument and no targets we are done.
if cmdlist and not Global.cmd_args.targets:
return
#
# Build the specified or default targets.
#
if target:
# Target explicitly specified.
targets = [ work.get_node(target) ]
elif Global.cmd_args.targets:
# Targets specified as an argument.
msg_depend(work.recdict,
_('Building targets specified on command line'))
update_target = work.find_node("update", "")
targets = []
for t in Global.cmd_args.targets:
if t == "update" and (not update_target
or not update_target.dependencies):
# An "update" target for which there is no explicit dependency
# is turned into "fetch" and then build the default
# target(s).
msg_depend(work.recdict,
_('"update" builds "fetch" and the default target'))
targets.append(work.get_node("fetch", ""))
targets.extend(default_targets(work, work.recdict))
else:
# Expand variables in the target, e.g.: $BDIR/foo.o.
et = expand(0, work.recdict, t, Expand(0, Expand.quote_aap))
# Find an existing Node or create a new one.
targets.append(work.get_node(et))
else:
targets = default_targets(work, work.recdict)
# Update the toplevel targets. Catch a ":quit" or ":exit" command.
exitobj = None
try:
for target in targets:
# These targets use the toplevel scope for finding rules.
target.scope_recdict = work.recdict
res = target_update(work, work.recdict, target, 1, level = 1)
if res == updated_FAIL:
msg = _('Do not know how to build "%s"')
elif res == updated_ERROR:
msg = _('Building "%s" failed')
else:
msg = None
if msg:
msg = msg % target.short_name()
if error_continue():
# Ignoring errors: continue with further targets.
# Remember building this target failed, any other targets
# that depend on it will also fail.
target.status = Node.builderror
msg_error(work.recdict, msg)
else:
# Quit building at the first error.
raise UserError, msg
except NormalExit, n:
exitobj = n
# Update the "finally" target if it exists.
target = work.find_node("finally", "")
if target:
msg_depend(work.recdict, _('Building the "finally" target'))
target_update(work, work.recdict, target, level = 1)
if exitobj != None:
raise exitobj
# vim: set sw=4 et sts=4 tw=79 fo+=l:
|