Page Menu
Home
Search
Configure Global Search
Log In
Files
F20071
blender-cake-v1.patch
Public
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Authored By
Stuart McMahon (stuartmcmahon)
Nov 13 2013, 4:22 PM
Size
581 KB
Subscribers
None
blender-cake-v1.patch
View Options
This file is larger than 256 KB, so syntax highlighting was skipped.
Index: args.cake
===================================================================
--- args.cake (revision 0)
+++ args.cake (working copy)
@@ -0,0 +1,88 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Command line argument parsing for the Cake build system.
+# Adds a '-p' option for building projects.
+# Splits non-option arguments into targets and keyword arguments and
+# passes them to config.cake via the Engine instance.
+#
+import os.path
+
+from cake.script import Script
+
+# Get some variables from the current script instance.
+engine = Script.getCurrent().engine
+parser = engine.parser
+rootdir = os.path.dirname(__file__)
+
+# Add a project generation option. It will be stored in 'engine.options' which
+# can later be accessed in our config.cake.
+parser.add_option(
+ "-p", "--projects",
+ action="store_true",
+ dest="createProjects",
+ help="Create projects instead of building a variant.",
+ default=False,
+ )
+
+# Set the path to store script cache and dependency info files.
+engine.scriptCachePath = os.path.join(rootdir, "..", "cache", "script")
+engine.dependencyInfoPath = os.path.join(rootdir, "..", "cache", "dep")
+
+def findArgsAndTargets(args):
+ """Split command line arguments into SCons-style ARGUMENTS,
+ COMMAND_LINE_TARGETS and leftover arguments (ENGINE_ARGS).
+ """
+ ARGUMENTS = {}
+ COMMAND_LINE_TARGETS = []
+ ENGINE_ARGS = []
+
+ for arg in args:
+ # Options are handled by the engine.
+ if arg.startswith('-'):
+ ENGINE_ARGS.append(arg)
+ # Check for a keyword argument.
+ elif '=' in arg:
+ if arg.islower():
+ # If it's a variant (lowercase) let the engine handle it.
+ ENGINE_ARGS.append(arg)
+ else:
+ # Remaining keyword arguments are put in the ARGUMENTS dictionary and
+ # handled in config.cake.
+ keyword, value = arg.split('=', 1)
+ ARGUMENTS[keyword] = value
+ else:
+ # Anything left must be a target.
+ COMMAND_LINE_TARGETS.append(arg)
+
+ # Hard-wire the path to the config.cake.
+ ENGINE_ARGS.append('--config=' + os.path.join(rootdir, "build_files", "cake", "config.cake"))
+
+ return ARGUMENTS, COMMAND_LINE_TARGETS, ENGINE_ARGS
+
+# Modify engine.args by stripping off the args we want and passing them
+# to config.cake via the engine instance.
+engine.ARGUMENTS, engine.COMMAND_LINE_TARGETS, engine.args = findArgsAndTargets(engine.args)
Index: build_files/cake/caketools.py
===================================================================
--- build_files/cake/caketools.py (revision 0)
+++ build_files/cake/caketools.py (working copy)
@@ -0,0 +1,164 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Tools to help make Blender compile with Cake.
+#
+import os
+import os.path
+import re
+import sys
+
+from cake.library import waitForAsyncResult
+
+def basePath(value):
+ """Recursively expand paths relative to the current scripts path, or relative
+ to the root directory if prepended with '#'. Absolute paths will be left alone.
+ """
+ from cake.tools import env
+
+ @waitForAsyncResult
+ def _basePath(path):
+ if isinstance(path, str):
+ path = env.expand(path)
+ if path.startswith("#"):
+ if path[1] in '\\/': # Keep project paths relative but remove slashes.
+ return path[2:]
+ else:
+ return path[1:]
+ elif os.path.isabs(path):
+ return path # Keep absolute paths as found.
+ else:
+ return env.script.cwd(path) # Prefix relative paths with scripts dir.
+ elif isinstance(path, (list, set)):
+ return list(_basePath(p) for p in path)
+ elif isinstance(path, tuple):
+ return tuple(_basePath(p) for p in path)
+ elif isinstance(path, dict):
+ return dict((k, _basePath(v)) for k, v in path.iteritems())
+ else:
+ return path # Could be a FileTarget. Leave it as is.
+
+ return _basePath(value)
+
+def blenderPlatform(platform, compiler, architecture):
+ """Determine the blender platform given a Cake platform, compiler and
+ architecture.
+ """
+ if platform == "windows":
+ if compiler == "msvc":
+ if architecture == "x86":
+ return "win32-vc"
+ else:
+ return "win64-vc"
+ elif compiler == "mingw":
+ if architecture == "x86":
+ return "win32-mingw"
+ else:
+ raise ValueError("Architecture '%s' is not supported by MinGW." % architecture)
+ else:
+ raise ValueError("Compiler '%s' is not supported under Windows." % compiler)
+ elif re.match('linux[0-9]+', platform):
+ return "linux"
+ else:
+ return platform
+
+def sconsPlatform():
+ """Return the platform string for our execution environment.
+
+ The returned value should map to one of the SCons/Platform/*.py
+ files. Since we're architecture independent, though, we don't
+ care about the machine architecture.
+ """
+ osname = os.name
+ if osname == 'java':
+ osname = os._osType
+ if osname == 'posix':
+ if sys.platform == 'cygwin':
+ return 'cygwin'
+ elif sys.platform.find('irix') != -1:
+ return 'irix'
+ elif sys.platform.find('sunos') != -1:
+ return 'sunos'
+ elif sys.platform.find('hp-ux') != -1:
+ return 'hpux'
+ elif sys.platform.find('aix') != -1:
+ return 'aix'
+ elif sys.platform.find('darwin') != -1:
+ return 'darwin'
+ else:
+ return 'posix'
+ elif os.name == 'os2':
+ return 'os2'
+ else:
+ return sys.platform
+
+def configureCompiler(B, compiler):
+ """Configure the compiler for a particular variant.
+
+ This function will set new compilation messages.
+ """
+ bc = B.bc
+
+ def libraryPrint(target, sources, cached):
+ if cached:
+ return bc.OKBLUE+"Cached library"+bc.ENDC +" ==> '"+bc.OKGREEN+os.path.split(target)[1] + "'"+bc.ENDC+"\n"
+ else:
+ return B.my_linking_print([target], sources, None) + "\n"
+
+ def objectPrint(target, source, pch, shared, cached):
+ if cached:
+ return bc.OKBLUE+"Cached"+bc.ENDC +" ==> '"+bc.OKGREEN+os.path.split(source)[1] + "'"+bc.ENDC+"\n"
+ else:
+ return B.my_compile_print([target], [source], None) + "\n"
+
+ def programPrint(target, sources, cached):
+ if cached:
+ return bc.OKBLUE+"Cached program"+bc.ENDC +" ==> '"+bc.OKGREEN+os.path.split(target)[1] + "'"+bc.ENDC+"\n"
+ else:
+ return B.my_program_print([target], sources, None) + "\n"
+
+ def resourcePrint(target, source, cached):
+ if cached:
+ return bc.OKBLUE+"Cached"+bc.ENDC +" ==> '"+bc.OKGREEN+os.path.split(source)[1] + "'"+bc.ENDC+"\n"
+ else:
+ return B.my_compile_print([target], [source], None) + "\n"
+
+ compiler.libraryMessage = libraryPrint
+ compiler.objectMessage = objectPrint
+ compiler.programMessage = programPrint
+ compiler.resourceMessage = resourcePrint
+
+def configureProjectTool(projectTool, platform, compiler, architecture, target):
+ """Configure the project tool for a particular variant.
+
+ This function will set project and solution configuration names that match
+ this variant.
+ """
+ projectTool.projectConfigName = '%s %s (%s) %s' % (
+ platform.capitalize(), compiler.capitalize(), architecture, target.capitalize())
+ projectTool.solutionConfigName = target.capitalize()
+ projectTool.solutionPlatformName = '%s %s (%s)' % (
+ platform.capitalize(), compiler.capitalize(), architecture)
Index: build_files/cake/config.cake
===================================================================
--- build_files/cake/config.cake (revision 0)
+++ build_files/cake/config.cake (working copy)
@@ -0,0 +1,140 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# The main bootstrap/configuration file for the Cake build system.
+# Adds tool paths to sys.path.
+# Creates a ProjectTool used for building MSVC project files.
+# Adds a build variant for each supported platform/compiler/architecture
+# combination.
+#
+import os
+import os.path
+import sys
+
+import cake.system
+from cake.engine import Variant
+from cake.library.compilers import CompilerNotFoundError
+from cake.library.project import ProjectTool
+from cake.script import Script
+
+# Get some variables from the current script instance.
+engine = Script.getCurrent().engine
+configuration = Script.getCurrent().configuration
+rootDir = os.path.join(os.path.dirname(__file__), "..", "..")
+
+# Create a project tool for project generation.
+projectTool = ProjectTool(configuration=configuration)
+projectTool.product = ProjectTool.VS2008
+projectTool.enabled = engine.options.createProjects
+engine.addBuildSuccessCallback(projectTool.build)
+
+# Many tool scripts expect the working directory to be this directory.
+os.chdir(rootDir)
+
+# Add paths to sys.path so we can import tool scripts.
+toolpath = os.path.join(rootDir, "build_files", "scons", "tools")
+sys.path.insert(0, toolpath)
+cakepath = os.path.join(rootDir, "build_files", "cake")
+sys.path.insert(0, cakepath)
+
+# Import tools.
+import caketools
+import SCons.Script.SConscript as SConscript
+import SCons.Util
+
+# Setup the configuration.
+configuration.baseDir = rootDir
+configuration.basePath = caketools.basePath
+configuration.defaultBuildScriptName = "SConscript"
+configuration.scriptGlobals = {
+ "BuildDir" : SConscript.BuildDir,
+ "Default" : SConscript.Default,
+ "Depends" : SConscript.Depends,
+ "Dir" : SConscript.Dir,
+ "Exit" : SConscript.Exit,
+ "Import" : SConscript.Import,
+ "Mkdir" : SConscript.Mkdir,
+ "Return" : SConscript.Return,
+ "SConscript" : SConscript.SConscript,
+ "Split" : SCons.Util.Split,
+ }
+
+def createCompiler(platform, compilerName, architecture):
+ """Create a compiler given it's platform, name and architecture.
+ """
+ if compilerName == "msvc":
+
+ from cake.library.compilers.msvc import findMsvcCompiler
+ compiler = findMsvcCompiler(configuration=configuration, version='9.0', architecture=architecture)
+ compiler.addDefine("WIN32")
+ if compiler.architecture in ["x64", "ia64"]:
+ compiler.addDefine("WIN64")
+
+ elif compilerName == "mingw":
+
+ from cake.library.compilers.gcc import findMinGWCompiler
+ compiler = findMinGWCompiler(configuration=configuration)
+ compiler.addDefine("WIN32")
+ compiler.addDefine("FREE_WINDOWS")
+
+ elif compilerName == "gcc":
+
+ from cake.library.compilers.gcc import findGccCompiler
+ compiler = findGccCompiler(configuration=configuration)
+
+ return compiler
+
+def makeVariant(platform, compilerName, architecture):
+ """Create a build variant for a platform/compiler/architecture combination.
+ """
+ # Attempt to create a compiler for this variant.
+ try:
+ compiler = createCompiler(platform, compilerName, architecture)
+ except CompilerNotFoundError:
+ return # Failed to create compiler. Just skip this variant.
+
+ variant = Variant(
+ platform=platform,
+ compiler=compilerName,
+ architecture=architecture,
+ target="release",
+ )
+
+ # Tell the variant where to find it's first-use construction script.
+ variant.constructionScriptPath = os.path.join(rootDir, "build_files", "cake", "variant.cake")
+ variant.tools["compiler"] = compiler.clone()
+ variant.tools["projectTool"] = projectTool.clone()
+ configuration.addVariant(variant)
+
+# Attempt to add all variants supported by the current platform.
+if cake.system.isWindows():
+ makeVariant("windows", "msvc", "x86")
+ makeVariant("windows", "msvc", "x64")
+ makeVariant("windows", "mingw", "x86")
+else:
+ platform = cake.system.platform().lower()
+ architecture = cake.system.architecture().lower()
+ makeVariant(platform, "gcc", architecture)
Index: build_files/cake/SCons/__init__.py
===================================================================
--- build_files/cake/SCons/__init__.py (revision 0)
+++ build_files/cake/SCons/__init__.py (working copy)
@@ -0,0 +1 @@
+# Keepme
Index: build_files/cake/SCons/Action.py
===================================================================
--- build_files/cake/SCons/Action.py (revision 0)
+++ build_files/cake/SCons/Action.py (working copy)
@@ -0,0 +1,58 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Tools that mimic SCons.Action.
+#
+import sys
+
+class _ActionAction(object):
+ """A class that describes an internal action.
+ """
+ def print_cmd_line(self, s, target, source, env):
+ pass
+
+class FunctionAction(object):
+ """A class that describes an action that executes a action.
+ """
+ def __init__(self, execfunction, strfunction):
+ """Construct a FunctionAction object.
+ """
+ self.execfunction = execfunction
+ self.strfunction = strfunction
+
+ def execute(self, target, source, env):
+ """Execute this action.
+ """
+ if self.strfunction is not None:
+ message = self.strfunction(target, source, env)
+ if message:
+ sys.stdout.write(message + "\n")
+ return self.execfunction(target, source, env)
+
+def Action(function, strfunction=None):
+ """Create an action.
+ """
+ return FunctionAction(function, strfunction=strfunction)
Index: build_files/cake/SCons/Builder.py
===================================================================
--- build_files/cake/SCons/Builder.py (revision 0)
+++ build_files/cake/SCons/Builder.py (working copy)
@@ -0,0 +1,40 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Tools that mimic SCons.Builder.
+#
+class BuilderBase(object):
+ """Base class for builders.
+ """
+ def __init__(self, *args, **kwargs):
+ pass
+ def add_action(self, suffix, action):
+ pass
+
+def Builder(*args, **kwargs):
+ """Create a builder.
+ """
+ return BuilderBase(*args, **kwargs)
Index: build_files/cake/SCons/Defaults.py
===================================================================
--- build_files/cake/SCons/Defaults.py (revision 0)
+++ build_files/cake/SCons/Defaults.py (working copy)
@@ -0,0 +1,31 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Tools that mimic SCons.Defaults.
+#
+ProgScan = object()
+"""The default program scanner.
+"""
Index: build_files/cake/SCons/Options.py
===================================================================
--- build_files/cake/SCons/Options.py (revision 0)
+++ build_files/cake/SCons/Options.py (working copy)
@@ -0,0 +1,28 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Tools that mimic SCons.Options.
+#
Index: build_files/cake/SCons/Script/__init__.py
===================================================================
--- build_files/cake/SCons/Script/__init__.py (revision 0)
+++ build_files/cake/SCons/Script/__init__.py (working copy)
@@ -0,0 +1 @@
+# Keepme
Index: build_files/cake/SCons/Script/SConscript.py
===================================================================
--- build_files/cake/SCons/Script/SConscript.py (revision 0)
+++ build_files/cake/SCons/Script/SConscript.py (working copy)
@@ -0,0 +1,457 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Tools that mimic SCons.Script.SConscript.
+#
+import os.path
+import inspect
+import itertools
+import sys
+
+from cake.filesys import makeDirs
+from cake.library import FileTarget, flatten, getPath, getTasks, waitForAsyncResult
+from cake.library.env import EnvironmentTool
+from cake.library.script import ScriptTool
+from cake.library.shell import ShellTool
+from cake.library.filesys import FileSystemTool
+from cake.script import Script
+
+import SCons.Action
+
+class Dir(object):
+ """A class containing information about a directory.
+ """
+ def __init__(self, name, directory=None):
+ self.name = name
+ self.directory = directory
+ if self.directory is None:
+ self.path = self.name
+ else:
+ self.path = os.path.join(self.directory, self.name)
+ if self.path[0] == '#':
+ self.path = self.path[1:]
+ self.abspath = Script.getCurrent().configuration.abspath(self.path)
+
+def BuildDir(build_dir, src_dir, duplicate=None):
+ """Add a build directory to the list of directories to search when building
+ a source directory.
+ """
+ # Put this name in the map of variant dirs.
+ from cake.tools import env
+ env._variantDirs[build_dir] = '#' + env.configuration.basePath(src_dir)
+
+def Default(*targets):
+ """Set the default targets if none are given on the command line.
+ """
+ pass
+
+def Depends(target, dependency):
+ """Add a build dependency to target.
+ """
+ pass
+
+def Exit(value=0):
+ """Exit the build immediately.
+ """
+ sys.exit(value)
+
+def Import(vars, *args):
+ """Import named tools from the current scripts toolset directly into the
+ callers local variables.
+ """
+ frame = inspect.stack()[1] # Get the callers frame.
+ argvalues = inspect.getargvalues(frame[0])
+ localvars = argvalues[3] # Get the callers local variables.
+ tools = Script.getCurrent().tools
+
+ for v in itertools.chain(flatten(vars), args):
+ localvars[v] = tools[v]
+
+def Mkdir(dir):
+ """Return a function that when called will create the directories passed in.
+ """
+ configuration = Script.getCurrent().configuration
+ basePath = configuration.basePath(dir)
+ absPath = configuration.abspath(basePath)
+ return lambda: makeDirs(absPath)
+
+def Return(*args):
+ """Export named local variables from the callers frame into the current
+ scripts 'result' variable.
+ """
+ from cake.tools import env
+ frame = inspect.stack()[1] # Get the callers frame.
+ argvalues = inspect.getargvalues(frame[0])
+ localvars = argvalues[3] # Get the callers local variables.
+ if len(args) == 1:
+ env.script.setResult(result=localvars[args[0]])
+ else:
+ env.script.setResult(result=[localvars[a] for a in args])
+
+class SConscriptProxy(object):
+ """A proxy return value that allows script results to be added to other objects.
+
+ This proxy also allows us to defer access to the scripts 'result' parameter until
+ it's actually needed. When 'result' is accessed the script will be forced to execute
+ immediately, preventing any parallelism.
+ """
+ def __init__(self, scripts):
+ self._scripts = scripts
+
+ def __radd__(self, other):
+ if isinstance(self._scripts, Script):
+ return other + [self._scripts.getResult("result", default=None).result]
+ else:
+ return other + [s.getResult("result", default=None).result for s in self._scripts]
+
+def SConscript(scripts):
+ """Execute a script or list of scripts and return a list of results.
+ """
+ from cake.tools import env
+ # Try to find a matching variant dir and use that instead.
+ if isinstance(scripts, str):
+ dirName, baseName = os.path.split(scripts)
+ scripts = os.path.join(env._variantDirs.get(dirName, dirName), baseName)
+ objects = env.script.execute(scripts)
+ return SConscriptProxy(objects)
+
+class SConsEnvironment(EnvironmentTool):
+ """A class that mimics SConsEnvironment.
+ """
+ def __init__(self, configuration, compiler, projectTool, module):
+ EnvironmentTool.__init__(self, configuration)
+
+ self.compiler = compiler
+ self.compiler.enabled = not projectTool.enabled
+ self.module = module
+ self.filesys = FileSystemTool(configuration=configuration)
+ self.filesys.enabled = not projectTool.enabled
+ self.script = ScriptTool(configuration=configuration)
+ self.script.enabled = not projectTool.enabled
+ self.shell = ShellTool(configuration=configuration)
+ self.shell.enabled = not projectTool.enabled
+ self.projectTool = projectTool
+ self._variantDirs = {}
+ self._objectSuffixes = [
+ compiler.objectSuffix.lower(),
+ compiler.librarySuffix.lower(),
+ compiler.resourceSuffix.lower(),
+ ]
+
+ @waitForAsyncResult
+ def _buildObjectsFromSources(self, sources):
+ """Build any source files into object files and return them.
+ """
+ compiler = self._cloneCompiler()
+ basePath = self.configuration.basePath
+ prerequisites = self.script.execute(self['PREREQUISITES'])
+
+ objects = []
+ for s in sources:
+ sourceName, sourceExt = os.path.splitext(getPath(s))
+ if sourceExt.lower() in self._objectSuffixes:
+ objects.append(s) # Already an object file.
+ else:
+ if isinstance(s, FileTarget):
+ targetPath = sourceName # .c is already in build dir so put obj next to it
+ else:
+ targetPath = self._getTargetPath(sourceName)
+ objects.append(compiler._object(targetPath, basePath(s), prerequisites=prerequisites))
+ return objects
+
+ def _cloneCompiler(self):
+ """Clone and setup the compiler ready for a build.
+ """
+ compiler = self.compiler.clone()
+
+ for p in flatten(self['CPPPATH']):
+ if p:
+ compiler.addIncludePath(p)
+
+ for d in flatten(self['CPPDEFINES']):
+ if d.startswith('-D'):
+ d = d[2:] # Strip '-D'
+ if d:
+ compiler.addDefine(d)
+
+ for f in flatten(self['CFLAGS'] + self['CCFLAGS'] + self['CPPFLAGS']):
+ if f:
+ # HACK: If we find '/FR${TARGET}.sbr' use compiler.outputBrowseInfo instead.
+ # To support TARGET expansion we'd need to clone the environment and compiler
+ # for each object file which would double the incremental build time,
+ # eg. 3->6 seconds.
+ if f == '/FR${TARGET}.sbr':
+ compiler.outputBrowseInfo = True
+ else:
+ compiler.addCFlag(f)
+
+ for f in flatten(self['CXXFLAGS'] + self['CCFLAGS'] + self['CPPFLAGS']):
+ if f:
+ # HACK: If we find '/FR${TARGET}.sbr' replace it with compiler.outputBrowseInfo.
+ if f == '/FR${TARGET}.sbr':
+ compiler.outputBrowseInfo = True
+ else:
+ compiler.addCppFlag(f)
+ compiler.addMmFlag(f)
+
+ return compiler
+
+ def _command(self, target, source, action):
+ """Execute an action or command line string.
+ """
+ # Check if it's just a function action.
+ if isinstance(action, SCons.Action.FunctionAction):
+ exitCode = action.execute(target, source, self)
+ if exitCode != 0:
+ msg = "%s exited with code %i\n" % (action.execfunction.__name__, exitCode)
+ self.engine.raiseError(msg)
+ return [FileTarget(path=t, task=None) for t in flatten(target)]
+
+ # Else, it's a command line so we need to setup our env and run it.
+ shell = self.shell.clone()
+
+ # Make sure targets are built in the build dir.
+ newTargets = [self._getTargetPath(t) for t in flatten(target)]
+
+ # Expand the $TARGET variable.
+ if isinstance(action, str) and len(newTargets) == 1:
+ self['TARGET'] = '"' + getPath(newTargets[0]) + '"'
+ action = self.expand(action)
+
+ # Call _run() to avoid extra basePath() calls.
+ return shell._run(
+ args=action,
+ targets=newTargets,
+ sources=self.configuration.basePath(source),
+ shell=True,
+ )
+
+ def _getTargetPath(self, files):
+ """Obtain the matching build directory path for a given source file/s.
+ """
+ buildDir = self.module.root_build_dir
+ basePath = self.configuration.basePath
+ if isinstance(files, str):
+ return os.path.join(buildDir, basePath(files))
+ else:
+ return [os.path.join(buildDir, basePath(f)) for f in flatten(files)]
+
+ def _library(self, target, source):
+ """Create a library.
+ """
+ compiler = self._cloneCompiler()
+ objects = self._buildObjectsFromSources(flatten(source))
+ prerequisites = self.script.execute(self['PREREQUISITES'])
+ return [compiler.library(target, objects, prerequisites=prerequisites)]
+
+ def _object(self, target, source):
+ """Create an object.
+ """
+ compiler = self._cloneCompiler()
+ sources = flatten(source)
+ assert len(sources) == 1 # Only supporting one source->target at the moment.
+ return [compiler.object(target, sources[0])]
+
+ def _program(self, target, source):
+ """Create a program.
+ """
+ compiler = self._cloneCompiler()
+
+ for f in flatten(self['LINKFLAGS']):
+ if f:
+ compiler.addProgramFlag(f)
+
+ for p in flatten(self['LIBPATH']):
+ if p:
+ # Note: We need to use an absolute path to match SCons' executable size.
+ compiler.addLibraryPath(self.GetBuildPath(p))
+
+ for l in flatten(self['LIBS']):
+ # Strip the library flag if it has been added already (mingw config does this).
+ if l.startswith('-l'):
+ l = l[2:]
+ # Expand any variables (needed for BF_PYTHON_VERSION under mingw).
+ l = self.expand(l)
+ if l:
+ compiler.addLibrary(l)
+
+ objects = self._buildObjectsFromSources(flatten(source))
+ prerequisites = self.script.execute(self['PREREQUISITES'])
+ return [compiler.program(target, objects, prerequisites=prerequisites)]
+
+ def _res(self, target, source):
+ """Create a resource.
+ """
+ compiler = self._cloneCompiler()
+
+ for f in flatten(self['RCFLAGS']):
+ if f:
+ compiler.addResourceFlag(f)
+
+ return [compiler.resource(target, source)]
+
+ def Action(self, action, cmd=None):
+ """Create a function action that can be executed later.
+ """
+ return SCons.Action.Action(action, cmd)
+
+ def AddPostAction(self, target, action):
+ """Add an action to be executed after target has been built.
+ """
+ # Don't execute actions during project generation.
+ if not self.projectTool.enabled:
+ def execute():
+ action.execute(target, None, self)
+ task = self.engine.createTask(execute)
+ task.startAfter(getTasks(flatten(target)))
+
+ def Alias(self, alias, targets=None, action=None):
+ """Create an alias for a build target.
+ """
+ pass
+
+ def Append(self, **kwargs):
+ """Append keyword arguments to the environment.
+ """
+ return self.append(**kwargs)
+
+ def Clone(self, **kwargs):
+ """Clone the current environment replacing current values with the keyword
+ arguments provided.
+ """
+ env = self.clone()
+ env.update(**kwargs) # Override cloned env with kwargs.
+ return env
+
+ def Command(self, target, source, action, **kwargs):
+ """Execute an action or command line string.
+ """
+ lenv = self.Clone(TARGET=self._getTargetPath(target), **kwargs)
+ return lenv._command(target, source, action)
+
+ def Default(self, *targets):
+ """Set the default targets if none are given on the command line.
+ """
+ return Default(*targets)
+
+ def Depends(self, target, dependency):
+ """Add a build dependency to target.
+ """
+ return Depends(target, dependency)
+
+ def Dictionary(self, *args):
+ """Obtain a dictionary of all construction variables.
+ """
+ return dict()
+
+ def Dir(self, *args, **kwargs):
+ """Create an object containing information about a directory.
+ """
+ return Dir(*args, **kwargs)
+
+ def Execute(self, action, *args, **kwargs):
+ """Execute an action.
+ """
+ action()
+
+ def GetBuildPath(self, files):
+ """Obtain the absolute path to a file/s.
+ """
+ basePath = self.configuration.basePath
+ absPath = self.configuration.abspath
+ if isinstance(files, str):
+ return absPath(basePath(files))
+ else:
+ return [absPath(basePath(f)) for f in flatten(files)]
+
+ def has_key(self, key):
+ """Determine whether the environment has a value for the given key.
+ """
+ return key in self
+
+ def Install(self, dir, source):
+ """Install files to a target directory only if they are newer.
+ """
+ return self.filesys.copyFiles(sources=source, targetDir=dir)
+
+ def Library(self, target, source, **kwargs):
+ """Create a library given a list of source files.
+ """
+ lenv = self.Clone(TARGET=self._getTargetPath(target), **kwargs)
+ return lenv._library(target, source)
+
+ def MSVSProject(self, target, srcs=None, buildtarget=None, variant=None, auto_build_solution=0):
+ """Create an MSVS project file.
+ """
+ target = os.path.splitext(target)[0] # Remove the existing extension.
+ output = flatten(buildtarget)[0] # program() result is probably a list.
+ return self.projectTool.project(
+ target=target,
+ items=srcs,
+ output=output,
+ )
+
+ def MSVSSolution(self, target, projects=None, variant=None):
+ """Create an MSVS solution file.
+ """
+ target = os.path.splitext(target)[0] # Remove the existing extension.
+ return self.projectTool.solution(
+ target=target,
+ projects=projects,
+ )
+
+ def Object(self, target, source, **kwargs):
+ """Create an object file given a source file.
+ """
+ lenv = self.Clone(TARGET=self._getTargetPath(target), **kwargs)
+ return lenv._object(target, source)
+
+ def Prepend(self, **kwargs):
+ """Prepend keyword arguments to the environment.
+ """
+ return self.prepend(**kwargs)
+
+ def Program(self, target, source, **kwargs):
+ """Create a program/executable given a list of source files/libraries.
+ """
+ lenv = self.Clone(TARGET=self._getTargetPath(target), **kwargs)
+ return lenv._program(target, source)
+
+ def Replace(self, **kwargs):
+ """Replace environment variables with the keyword arguments given.
+ """
+ return self.replace(**kwargs)
+
+ def RES(self, target, source, **kwargs):
+ """Create a resource given a source file.
+ """
+ lenv = self.Clone(TARGET=self._getTargetPath(target), **kwargs)
+ return lenv._res(target, source)
+
+ def subst(self, input):
+ """Expand a string using the values in this environment.
+ """
+ return self.expand(input)
Index: build_files/cake/SCons/Tool.py
===================================================================
--- build_files/cake/SCons/Tool.py (revision 0)
+++ build_files/cake/SCons/Tool.py (working copy)
@@ -0,0 +1,38 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Tools that mimic SCons.Tool.
+#
+import SCons.Builder
+
+def createObjBuilders():
+ return SCons.Builder.Builder()
+
+def createProgBuilder():
+ return SCons.Builder.Builder()
+
+def createStaticLibBuilder():
+ return SCons.Builder.Builder()
Index: build_files/cake/SCons/Util.py
===================================================================
--- build_files/cake/SCons/Util.py (revision 0)
+++ build_files/cake/SCons/Util.py (working copy)
@@ -0,0 +1,42 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Tools that mimic SCons.Util.
+#
+from cake.library import flatten as flttn
+
+def flatten(obj):
+ """Flatten a sequence to a non-nested list.
+ """
+ return flttn(obj)
+
+def Split(arg):
+ """Split a string at whitespace separators.
+ """
+ if isinstance(arg, str):
+ return arg.split()
+ else:
+ return arg
Index: build_files/cake/SCons/Variables.py
===================================================================
--- build_files/cake/SCons/Variables.py (revision 0)
+++ build_files/cake/SCons/Variables.py (working copy)
@@ -0,0 +1,77 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Tools that mimic SCons.Variables.
+#
+from cake.library import flatten
+
+def BoolVariable(a, b, c):
+ """Create a boolean variable.
+ """
+ return a, b, c
+
+class Variable(object):
+ """A class that describes a variable.
+ """
+ def __init__(self, key, help="", default=None):
+ self.key = key
+ self.help = help
+ self.default = default
+
+class Variables(object):
+ """A class that keeps tracks of variables from various sources.
+ """
+ def __init__(self, files=[], args={}):
+ """Construct a Variables object from a list of files and/or a dictionary.
+ """
+ self.files = flatten(files)
+ self.args = args
+ self.options = []
+
+ def AddVariables(self, *optlist):
+ """Add arguments to the list of variables.
+ """
+ self.options.extend(Variable(*o) for o in optlist)
+
+ def Update(self, env):
+ """Update an environment with our variable values.
+ """
+ # Start with the default values.
+ values = dict((o.key, o.default) for o in self.options)
+
+ # Update with variables from the files.
+ for filename in self.files:
+ execfile(filename, values)
+
+ # Update with arguments from the command line.
+ values.update(self.args)
+
+ # Only set values from the options list.
+ for option in self.options:
+ try:
+ env[option.key] = values[option.key]
+ except KeyError:
+ pass
Index: build_files/cake/variant.cake
===================================================================
--- build_files/cake/variant.cake (revision 0)
+++ build_files/cake/variant.cake (working copy)
@@ -0,0 +1,439 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Variant definition file used by the Cake build system.
+# Describes a single build variant such as 'windows-msvc-x86' or
+# 'linux-gcc-i386'.
+# This file mirrors the setup/argument parsing section of the SConstruct
+# file (used by the SCons build system).
+#
+import os
+import os.path
+import platform as pltfrm
+import shutil
+import sys
+
+import cake.task
+from cake.script import Script
+
+import Blender
+import btools
+import caketools
+from SCons.Script.SConscript import Dir, Exit
+
+# Make sure the Blender module gets a new instance on the next import.
+del sys.modules["Blender"]
+
+# Get some variables from the current script instance.
+engine = Script.getCurrent().engine
+configuration = Script.getCurrent().configuration
+variant = Script.getCurrent().variant
+platformName = variant["platform"]
+compilerName = variant["compiler"]
+architecture = variant["architecture"]
+target = variant["target"]
+compiler = variant.tools["compiler"]
+projectTool = variant.tools["projectTool"]
+
+# Configure the compiler and project tool.
+caketools.configureCompiler(Blender, compiler)
+caketools.configureProjectTool(
+ projectTool,
+ platformName,
+ compilerName,
+ architecture,
+ target,
+ )
+
+# Grab arguments passed from args.cake.
+ARGUMENTS = engine.ARGUMENTS
+COMMAND_LINE_TARGETS = engine.COMMAND_LINE_TARGETS
+
+# Use the build architecture to determine bitness.
+if architecture in ["alpha", "amd64", "ia64", "ppc64", "s390x", "x64", "x86_64"]:
+ bitness = 64
+elif architecture in ["i386", "i486", "i586", "i686", "powerpc", "s390", "x86"]:
+ bitness = 32
+# Fall back on the bitness of the Python executable.
+# Need a better way to do this. Automagical maybe is not the best thing, maybe it is.
+elif pltfrm.architecture()[0] == '64bit':
+ bitness = 64
+else:
+ bitness = 32
+
+# Before we do anything, let's check if we have a sane os.environ
+if not btools.check_environ():
+ Exit()
+
+BlenderEnvironment = Blender.BlenderEnvironment
+B = Blender
+
+VERSION = btools.VERSION # This is used in creating the local config directories
+VERSION_RELEASE_CYCLE = btools.VERSION_RELEASE_CYCLE
+
+### globals ###
+platform = sys.platform
+quickie = None
+quickdebug = None
+
+##### BEGIN SETUP #####
+
+B.possible_types = ['core', 'player', 'player2', 'intern', 'extern']
+
+B.binarykind = ['blender' , 'blenderplayer']
+##################################
+# target and argument validation #
+##################################
+# XX cheating for BF_FANCY, we check for BF_FANCY before args are validated
+use_color = ARGUMENTS.get('BF_FANCY', '1')
+if platform=='win32':
+ use_color = None
+
+if not use_color=='1':
+ B.bc.disable()
+
+ #on defaut white Os X terminal, some colors are totally unlegible
+if platform=='darwin':
+ B.bc.OKGREEN = '\033[34m'
+ B.bc.WARNING = '\033[36m'
+
+# arguments
+print B.bc.HEADER+'Command-line arguments'+B.bc.ENDC
+B.arguments = btools.validate_arguments(ARGUMENTS, B.bc)
+btools.print_arguments(B.arguments, B.bc)
+
+# targets
+print B.bc.HEADER+'Command-line targets'+B.bc.ENDC
+B.targets = btools.validate_targets(COMMAND_LINE_TARGETS, B.bc)
+btools.print_targets(B.targets, B.bc)
+
+##########################
+# setting up environment #
+##########################
+
+# handling cmd line arguments & config file
+
+# bitness stuff
+tempbitness = int(B.arguments.get('BF_BITNESS', bitness)) # default to bitness found as per starting python
+if tempbitness in (32, 64): # only set if 32 or 64 has been given
+ bitness = int(tempbitness)
+
+if bitness:
+ B.bitness = bitness
+else:
+ B.bitness = tempbitness
+
+
+# first check cmdline for toolset and we create env to work on
+quickie = B.arguments.get('BF_QUICK', None)
+quickdebug = B.arguments.get('BF_QUICKDEBUG', None)
+
+if quickdebug:
+ B.quickdebug=string.split(quickdebug, ',')
+else:
+ B.quickdebug=[]
+
+if quickie:
+ B.quickie=string.split(quickie,',')
+else:
+ B.quickie=[]
+
+env = BlenderEnvironment(configuration, compiler, projectTool, B)
+variant.tools["env"] = env
+
+if not env:
+ print "Could not create a build environment"
+ Exit()
+
+# Set some environment variables SCons would normally set.
+env['CC'] = ''
+env['CPPDEFINES'] = []
+env['CPPPATH'] = []
+env['CXX'] = ''
+env['LIBPATH'] = []
+env['LIBPREFIX'] = compiler.libraryPrefix
+env['LIBSUFFIX'] = compiler.librarySuffix
+env['LINKFLAGS'] = []
+env['RCFLAGS'] = []
+
+# Set some environment variables used by Cake.
+env['BUILD_SYSTEM'] = 'Cake'
+env['PREREQUISITES'] = []
+
+# Set the blender build platform.
+platform = caketools.blenderPlatform(platformName, compilerName, architecture)
+env['OURPLATFORM'] = platform
+
+# Beware! Scons platform is the Python execution environment, not necessarily
+# the current build platform.
+env['PLATFORM'] = caketools.sconsPlatform()
+
+configfile = os.path.join("build_files", "scons", "config", platform + "-config.py")
+
+if os.path.exists(configfile):
+ print B.bc.OKGREEN + "Using config file: " + B.bc.ENDC + configfile
+else:
+ print B.bc.FAIL + configfile + " doesn't exist" + B.bc.ENDC
+
+userconfig = B.arguments.get('BF_CONFIG', 'user-config.py')
+# first read platform config. B.arguments will override
+optfiles = [configfile]
+if os.path.exists(userconfig):
+ print B.bc.OKGREEN + "Using user-config file: " + B.bc.ENDC + userconfig
+ optfiles += [userconfig]
+else:
+ print B.bc.WARNING + userconfig + " not found, no user overrides" + B.bc.ENDC
+
+opts = btools.read_opts(env, optfiles, B.arguments)
+opts.Update(env)
+
+# Append the target to the build directory names.
+env['BF_BUILDDIR'] = env['BF_BUILDDIR'] + '-' + target
+env['BF_INSTALLDIR'] = env['BF_INSTALLDIR'] + '-' + target
+
+# Turn on BF_DEBUG?
+if target == "debug":
+ env['BF_DEBUG'] = True
+
+# Fix some library names.
+if env['OURPLATFORM'] == "win32-mingw":
+ env['BF_JPEG_LIB'] = 'libjpeg'
+elif env['OURPLATFORM'] == "darwin":
+ env['BF_PNG_LIB'] = 'png12'
+ env['BF_FFTW3_LIB'] = 'fftw3'
+
+if not env['BF_FANCY']:
+ B.bc.disable()
+
+
+# remove install dir so old and new files are not mixed.
+# NOTE: only do the scripts directory for now, otherwise is too disruptive for developers
+# TODO: perhaps we need an option (off by default) to not do this altogether...
+if not env['WITHOUT_BF_INSTALL'] and not env['WITHOUT_BF_OVERWRITE_INSTALL']:
+ scriptsDir = os.path.join(env['BF_INSTALLDIR'], VERSION, 'scripts')
+ if os.path.isdir(scriptsDir):
+ print B.bc.OKGREEN + "Clearing installation directory%s: %s" % (B.bc.ENDC, os.path.abspath(scriptsDir))
+ shutil.rmtree(scriptsDir)
+
+# Check whether we should only generate projects.
+if projectTool.enabled:
+ env["BF_MSVS"] = True
+else:
+ numJobs = cake.task.getDefaultThreadPool().numWorkers
+ print B.bc.OKGREEN + "Build with parallel jobs%s: %s" % (B.bc.ENDC, numJobs)
+ print B.bc.OKGREEN + "Build with debug symbols%s: %s" % (B.bc.ENDC, env['BF_DEBUG'])
+
+if 'blenderlite' in B.targets:
+ target_env_defs = {}
+ target_env_defs['WITH_BF_GAMEENGINE'] = False
+ target_env_defs['WITH_BF_OPENAL'] = False
+ target_env_defs['WITH_BF_OPENEXR'] = False
+ target_env_defs['WITH_BF_OPENMP'] = False
+ target_env_defs['WITH_BF_ICONV'] = False
+ target_env_defs['WITH_BF_INTERNATIONAL'] = False
+ target_env_defs['WITH_BF_OPENJPEG'] = False
+ target_env_defs['WITH_BF_FFMPEG'] = False
+ target_env_defs['WITH_BF_QUICKTIME'] = False
+ target_env_defs['WITH_BF_REDCODE'] = False
+ target_env_defs['WITH_BF_DDS'] = False
+ target_env_defs['WITH_BF_CINEON'] = False
+ target_env_defs['WITH_BF_HDR'] = False
+ target_env_defs['WITH_BF_ZLIB'] = False
+ target_env_defs['WITH_BF_SDL'] = False
+ target_env_defs['WITH_BF_JPEG'] = False
+ target_env_defs['WITH_BF_PNG'] = False
+ target_env_defs['WITH_BF_BULLET'] = False
+ target_env_defs['WITH_BF_BINRELOC'] = False
+ target_env_defs['BF_BUILDINFO'] = False
+ target_env_defs['WITH_BF_FLUID'] = False
+ target_env_defs['WITH_BF_OCEANSIM'] = False
+ target_env_defs['WITH_BF_DECIMATE'] = False
+ target_env_defs['WITH_BF_BOOLEAN'] = False
+ target_env_defs['WITH_BF_PYTHON'] = False
+ target_env_defs['WITH_BF_3DMOUSE'] = False
+
+ # Merge blenderlite, let command line to override
+ for k,v in target_env_defs.iteritems():
+ if k not in B.arguments:
+ env[k] = v
+
+# Extended OSX_SDK and 3D_CONNEXION_CLIENT_LIBRARY and JAckOSX detection for OSX
+if env['OURPLATFORM']=='darwin':
+ print B.bc.OKGREEN + "Detected Xcode version: -- " + B.bc.ENDC + env['XCODE_CUR_VER'][:9] + " --"
+ print "Available " + env['MACOSX_SDK_CHECK']
+ if not 'Mac OS X 10.5' in env['MACOSX_SDK_CHECK']:
+ print B.bc.OKGREEN + "MacOSX10.5.sdk not available:" + B.bc.ENDC + " using MacOSX10.6.sdk"
+ else:
+ print B.bc.OKGREEN + "Found recommended sdk :" + B.bc.ENDC + " using MacOSX10.5.sdk"
+
+ # for now, Mac builders must download and install the 3DxWare 10 Beta 4 driver framework from 3Dconnexion
+ # necessary header file lives here when installed:
+ # /Library/Frameworks/3DconnexionClient.framework/Versions/Current/Headers/ConnexionClientAPI.h
+ if env['WITH_BF_3DMOUSE'] == 1:
+ if not os.path.exists('/Library/Frameworks/3DconnexionClient.framework'):
+ print "3D_CONNEXION_CLIENT_LIBRARY not found, disabling WITH_BF_3DMOUSE" # avoid build errors !
+ env['WITH_BF_3DMOUSE'] = 0
+ else:
+ env.Append(LINKFLAGS=['-Xlinker','-weak_framework','-Xlinker','3DconnexionClient'])
+
+ # for now, Mac builders must download and install the JackOSX framework
+ # necessary header file lives here when installed:
+ # /Library/Frameworks/Jackmp.framework/Versions/A/Headers/jack.h
+ if env['WITH_BF_JACK'] == 1:
+ if not os.path.exists('/Library/Frameworks/Jackmp.framework'):
+ print "JackOSX install not found, disabling WITH_BF_JACK" # avoid build errors !
+ env['WITH_BF_JACK'] = 0
+ else:
+ env.Append(LINKFLAGS=['-Xlinker','-weak_framework','-Xlinker','Jackmp'])
+
+if env['WITH_BF_OPENMP'] == 1:
+ if env['OURPLATFORM'] in ('win32-vc', 'win64-vc'):
+ env['CCFLAGS'].append('/openmp')
+ else:
+ if env['CC'].endswith('icc'): # to be able to handle CC=/opt/bla/icc case
+ env.Append(LINKFLAGS=['-openmp', '-static-intel'])
+ env['CCFLAGS'].append('-openmp')
+ else:
+ env.Append(CCFLAGS=['-fopenmp'])
+
+if env['WITH_GHOST_COCOA'] == True:
+ env.Append(CPPFLAGS=['-DGHOST_COCOA'])
+
+if env['USE_QTKIT'] == True:
+ env.Append(CPPFLAGS=['-DUSE_QTKIT'])
+
+#check for additional debug libnames
+
+if env.has_key('BF_DEBUG_LIBS'):
+ B.quickdebug += env['BF_DEBUG_LIBS']
+
+printdebug = B.arguments.get('BF_LISTDEBUG', 0)
+
+if len(B.quickdebug) > 0 and printdebug != 0:
+ print B.bc.OKGREEN + "Buildings these libs with debug symbols:" + B.bc.ENDC
+ for l in B.quickdebug:
+ print "\t" + l
+
+# remove stdc++ from LLIBS if we are building a statc linked CXXFLAGS
+if env['WITH_BF_STATICCXX']:
+ if 'stdc++' in env['LLIBS']:
+ env['LLIBS'].remove('stdc++')
+ else:
+ print '\tcould not remove stdc++ library from LLIBS, WITH_BF_STATICCXX may not work for your platform'
+
+# check target for blenderplayer. Set WITH_BF_PLAYER if found on cmdline
+if 'blenderplayer' in B.targets:
+ env['WITH_BF_PLAYER'] = True
+
+if 'blendernogame' in B.targets:
+ env['WITH_BF_GAMEENGINE'] = False
+
+# build without elbeem (fluidsim)?
+if env['WITH_BF_FLUID'] == 1:
+ env['CPPFLAGS'].append('-DWITH_MOD_FLUID')
+
+# build with ocean sim?
+if env['WITH_BF_OCEANSIM'] == 1:
+ env['WITH_BF_FFTW3'] = 1 # ocean needs fftw3 so enable it
+ env['CPPFLAGS'].append('-DWITH_MOD_OCEANSIM')
+
+
+if btools.ENDIAN == "big":
+ env['CPPFLAGS'].append('-D__BIG_ENDIAN__')
+else:
+ env['CPPFLAGS'].append('-D__LITTLE_ENDIAN__')
+
+# TODO, make optional
+env['CPPFLAGS'].append('-DWITH_AUDASPACE')
+
+# lastly we check for root_build_dir ( we should not do before, otherwise we might do wrong builddir
+B.root_build_dir = env['BF_BUILDDIR']
+B.doc_build_dir = os.path.join(env['BF_INSTALLDIR'], 'doc')
+if not B.root_build_dir[-1]==os.sep:
+ B.root_build_dir += os.sep
+if not B.doc_build_dir[-1]==os.sep:
+ B.doc_build_dir += os.sep
+
+# We do a shortcut for clean when no quicklist is given: just delete
+# builddir without reading in SConscripts
+do_clean = None
+if 'clean' in B.targets:
+ do_clean = True
+
+if not quickie and do_clean:
+ if os.path.exists(B.doc_build_dir):
+ print B.bc.HEADER+'Cleaning doc dir...'+B.bc.ENDC
+ dirs = os.listdir(B.doc_build_dir)
+ for entry in dirs:
+ if os.path.isdir(B.doc_build_dir + entry) == 1:
+ print "clean dir %s"%(B.doc_build_dir+entry)
+ shutil.rmtree(B.doc_build_dir+entry)
+ else: # remove file
+ print "remove file %s"%(B.doc_build_dir+entry)
+ os.remove(B.root_build_dir+entry)
+ if os.path.exists(B.root_build_dir):
+ print B.bc.HEADER+'Cleaning build dir...'+B.bc.ENDC
+ dirs = os.listdir(B.root_build_dir)
+ for entry in dirs:
+ if os.path.isdir(B.root_build_dir + entry) == 1:
+ print "clean dir %s"%(B.root_build_dir+entry)
+ shutil.rmtree(B.root_build_dir+entry)
+ else: # remove file
+ print "remove file %s"%(B.root_build_dir+entry)
+ os.remove(B.root_build_dir+entry)
+ for confile in ['extern/ffmpeg/config.mak', 'extern/x264/config.mak',
+ 'extern/xvidcore/build/generic/platform.inc', 'extern/ffmpeg/include']:
+ if os.path.exists(confile):
+ print "clean file %s"%confile
+ if os.path.isdir(confile):
+ for root, dirs, files in os.walk(confile):
+ for name in files:
+ os.remove(os.path.join(root, name))
+ else:
+ os.remove(confile)
+ print B.bc.OKGREEN+'...done'+B.bc.ENDC
+ else:
+ print B.bc.HEADER+'Already Clean, nothing to do.'+B.bc.ENDC
+ Exit()
+
+
+# ensure python header is found since detection can fail, this could happen
+# with _any_ library but since we used a fixed python version this tends to
+# be most problematic.
+if env['WITH_BF_PYTHON']:
+ py_h = os.path.join(Dir(env.subst('${BF_PYTHON_INC}')).abspath, "Python.h")
+
+ if not os.path.exists(py_h):
+ print("\nMissing: \"" + env.subst('${BF_PYTHON_INC}') + os.sep + "Python.h\",\n"
+ " Set 'BF_PYTHON_INC' to point "
+ "to a valid python include path.\n Containing "
+ "Python.h for python version \"" + env.subst('${BF_PYTHON_VERSION}') + "\"")
+
+ Exit()
+ del py_h
+
+print B.bc.HEADER+'Building in: ' + B.bc.ENDC + os.path.abspath(B.root_build_dir)
+B.init_lib_dict()
Index: build_files/scons/tools/Blender.py
===================================================================
--- build_files/scons/tools/Blender.py (revision 42633)
+++ build_files/scons/tools/Blender.py (working copy)
@@ -365,6 +365,10 @@
build_type = "Release"
build_cflags = ' '.join(lenv['CFLAGS'] + lenv['CCFLAGS'] + lenv['REL_CFLAGS'] + lenv['REL_CCFLAGS'] + lenv['CPPFLAGS'])
build_cxxflags = ' '.join(lenv['CCFLAGS'] + lenv['CXXFLAGS'] + lenv['REL_CXXFLAGS'] + lenv['REL_CCFLAGS'] + lenv['CPPFLAGS'])
+ if 'BUILD_SYSTEM' in lenv:
+ build_system = lenv['BUILD_SYSTEM']
+ else:
+ build_system = "SCons"
build_linkflags = ' '.join(lenv['PLATFORM_LINKFLAGS'])
@@ -379,7 +383,7 @@
'BUILD_CFLAGS=\\"%s\\"'%(build_cflags),
'BUILD_CXXFLAGS=\\"%s\\"'%(build_cxxflags),
'BUILD_LINKFLAGS=\\"%s\\"'%(build_linkflags),
- 'BUILD_SYSTEM=\\"SCons\\"'
+ 'BUILD_SYSTEM=\\"%s\\"'%(build_system)
])
lenv.Append (CPPPATH = [root_build_dir+'source/blender/blenkernel'])
@@ -739,7 +743,7 @@
SConsEnvironment.Default(self, res)
resources.append(res)
- def BlenderLib(self=None, libname=None, sources=None, includes=[], defines=[], libtype='common', priority = 100, compileflags=None, cc_compileflags=None, cxx_compileflags=None, cc_compilerchange=None, cxx_compilerchange=None):
+ def BlenderLib(self=None, libname=None, sources=None, includes=[], defines=[], libtype='common', priority = 100, compileflags=None, cc_compileflags=None, cxx_compileflags=None, cc_compilerchange=None, cxx_compilerchange=None, **kwargs):
global vcp
if not self or not libname or not sources:
print bc.FAIL+'Cannot continue. Missing argument for BuildBlenderLib '+libname+bc.ENDC
@@ -797,14 +801,13 @@
targetdir = root_build_dir+'lib/' + libname
if not (root_build_dir[0]==os.sep or root_build_dir[1]==':'):
targetdir = '#'+targetdir
- lib = lenv.Library(target= targetdir, source=sources)
+ lib = lenv.Library(target= targetdir, source=sources, **kwargs)
SConsEnvironment.Default(self, lib) # we add to default target, because this way we get some kind of progress info during build
- if self['BF_MSVS'] and self['OURPLATFORM'] in ('win32-vc', 'win64-vc'):
- #if targetdir[0] == '#':
- # targetdir = targetdir[1:-1]
- print "! ",targetdir+ '.vcproj' # + self['MSVSPROJECTSUFFIX']
- vcproject = self.MSVSProject(target = targetdir + '.vcproj', # + self['MSVSPROJECTSUFFIX'],
- srcs = sources,
+ if self['BF_MSVS'] and self['OURPLATFORM'] in ('win32-vc', 'win64-vc', 'win32-mingw'):
+ projectpath = '#../build/project/lib/' + libname + '.vcproj' # + self['MSVSPROJECTSUFFIX']
+ print "! ",projectpath
+ vcproject = self.MSVSProject(target = projectpath,
+ srcs = [str(s) for s in SCons.Util.flatten(sources)], # Scons doesn't like program targets here.
buildtarget = lib,
variant = 'Release',
auto_build_solution=0)
@@ -850,11 +853,21 @@
brs = lenv.Command(f, prog, [bsc])
SConsEnvironment.Default(self, brs)
SConsEnvironment.Default(self, prog)
- if self['BF_MSVS'] and self['OURPLATFORM'] in ('win32-vc', 'win64-vc') and progname == 'blender':
- print "! ",builddir + "/" + progname + '.sln'
- sln = self.MSVSProject(target = builddir + "/" + progname + '.sln',
- projects= vcp,
- variant = 'Release')
+ if self['BF_MSVS'] and self['OURPLATFORM'] in ('win32-vc', 'win64-vc', 'win32-mingw') and progname == 'blender':
+ projectpath = '#../build/project/lib/' + progname + '.vcproj' # + self['MSVSPROJECTSUFFIX']
+ print "! ",projectpath
+ vcproject = self.MSVSProject(target = projectpath,
+ #srcs = sources, # Note: These are only obj's and lib's.
+ buildtarget = prog,
+ variant = 'Release',
+ auto_build_solution=0)
+ vcp.append(vcproject)
+ SConsEnvironment.Default(self, vcproject)
+ solutionpath = '#../build/project/' + progname + '.sln'
+ print "! ",solutionpath
+ sln = self.MSVSSolution(target = solutionpath,
+ projects = vcp,
+ variant = 'Release')
SConsEnvironment.Default(self, sln)
program_list.append(prog)
if lenv['OURPLATFORM']=='darwin':
Index: cake/cake/__init__.py
===================================================================
--- cake/cake/__init__.py (revision 0)
+++ cake/cake/__init__.py (working copy)
@@ -0,0 +1,46 @@
+"""Cake Build System.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import sys
+
+# We want the 'cake.tools' module to have contents based on the current
+# thread's Script.
+# Cake scripts can get access to their tools using standard python import
+# statements.
+
+class ToolsProxy(object):
+
+ def __getattribute__(self, key):
+ from cake.script import Script
+ script = Script.getCurrent()
+ if script is None:
+ raise AttributeError("No current script.")
+ else:
+ try:
+ return script.tools[key]
+ except KeyError:
+ raise AttributeError("No such tool '%s'" % key)
+
+ def __setattr__(self, key, value):
+ from cake.script import Script
+ script = Script.getCurrent()
+ if script is None:
+ raise AttributeError("No current script.")
+ else:
+ script.tools[key] = value
+
+tools = ToolsProxy()
+"""Cake tools module.
+
+This is the main module for Cake tools. It allows users to import tools
+using the standard Python import statement, eg::
+
+ from cake.tools import compiler
+
+ compiler.library(target="myLibrary", sources=myObjects)
+"""
+sys.modules['cake.tools'] = tools
Index: cake/cake/bytecode.py
===================================================================
--- cake/cake/bytecode.py (revision 0)
+++ cake/cake/bytecode.py (working copy)
@@ -0,0 +1,110 @@
+"""Utilities for loading Byte-Compiled Scripts.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import __builtin__
+import imp
+import marshal
+import os
+import struct
+import platform
+
+# Magic header written at start of file
+_MAGIC = imp.get_magic()
+_MAGIC_LEN = len(_MAGIC)
+_NOTMAGIC = '\0' * _MAGIC_LEN
+
+assert _MAGIC != _NOTMAGIC
+
+# Define an internal helper according to the platform
+if platform.system() in ['Darwin']:
+ import MacOS
+ def _setCreatorType(file):
+ MacOS.SetCreatorAndType(file, 'Pyth', 'PYC ')
+else:
+ def _setCreatorType(file):
+ pass
+
+def loadCode(file, cfile=None, dfile=None, cached=True):
+ """Load the code object for the specified python file.
+
+ Uses the bytecode cache file if it exists and matches the timestamp of
+ the source file. These files have the same format as the .pyc/.pyo files
+ used by Python module import logic.
+
+ @param file: Path of the source file to load.
+
+ @param cfile: If specified, the path of the bytecode cache file.
+ Defaults to path of the original file with either 'c' or 'o' appended.
+
+ @param dfile: If specified, the path of the file to show in error
+ messages. Defaults to C{file}.
+
+ @param cached: True if the byte code should be cached to a separate
+ file for quicker loading next time.
+ @type cached: bool
+
+ @return: The code object resulting from compiling the python source file.
+ This can be executed by the 'exec' statement/function.
+ """
+ if cfile is None:
+ cfile = file + (__debug__ and 'c' or 'o')
+
+ timestamp = None
+
+ if cached:
+ # Try to load the cache file if possible, don't sweat if we can't
+ try:
+ f = open(cfile, 'rb')
+ try:
+ if f.read(_MAGIC_LEN) == _MAGIC:
+ cacheTimestamp = struct.unpack('<I', f.read(4))[0]
+ timestamp = long(os.stat(file).st_mtime)
+ if timestamp == cacheTimestamp:
+ return marshal.load(f)
+ finally:
+ f.close()
+ except Exception:
+ # Failed to load the cache file
+ pass
+
+ # Load the source file
+ f = open(file, 'rU')
+ try:
+ if timestamp is None:
+ try:
+ timestamp = long(os.fstat(f.fileno()).st_mtime)
+ except AttributeError:
+ timestamp = long(os.stat(file).st_mtime)
+ codestring = f.read()
+ finally:
+ f.close()
+
+ # Source needs a trailing newline to compile correctly
+ if not codestring.endswith('\n'):
+ codestring = codestring + '\n'
+
+ # Compile the source
+ codeobject = __builtin__.compile(codestring, dfile or file, 'exec')
+
+ if cached:
+ # Try to save the cache file if possible, don't sweat if we can't
+ try:
+ f = open(cfile, 'wb')
+ try:
+ f.write(_NOTMAGIC)
+ f.write(struct.pack('<I', timestamp))
+ marshal.dump(codeobject, f)
+ f.flush()
+ f.seek(0, 0)
+ f.write(_MAGIC)
+ finally:
+ f.close()
+ _setCreatorType(cfile)
+ except Exception:
+ pass
+
+ return codeobject
Index: cake/cake/config.cake
===================================================================
--- cake/cake/config.cake (revision 0)
+++ cake/cake/config.cake (working copy)
@@ -0,0 +1,144 @@
+#-------------------------------------------------------------------------------
+# Default configuration used if none is passed on the command line or
+# found by searching up from the working directory.
+#-------------------------------------------------------------------------------
+from cake.engine import Variant
+from cake.library import waitForAsyncResult
+from cake.library.compilers import CompilerNotFoundError
+from cake.library.compilers.dummy import DummyCompiler
+from cake.library.env import EnvironmentTool
+from cake.library.filesys import FileSystemTool
+from cake.library.logging import LoggingTool
+from cake.library.project import ProjectTool
+from cake.library.script import ScriptTool
+from cake.library.shell import ShellTool
+from cake.library.variant import VariantTool
+from cake.library.zipping import ZipTool
+from cake.script import Script
+
+import cake.path
+import cake.system
+
+platform = cake.system.platform().lower()
+hostArchitecture = cake.system.architecture().lower()
+configuration = Script.getCurrent().configuration
+engine = Script.getCurrent().engine
+
+# Override the configuration basePath() function.
+def basePath(value):
+ from cake.tools import script, env
+
+ @waitForAsyncResult
+ def _basePath(path):
+ if isinstance(path, basestring):
+ path = env.expand(path)
+ if path.startswith("#"):
+ if path[1] in '\\/': # Keep project paths relative but remove slashes.
+ return path[2:]
+ else:
+ return path[1:]
+ elif cake.path.isAbs(path):
+ return path # Keep absolute paths as found.
+ else:
+ return script.cwd(path) # Prefix relative paths with scripts dir.
+ elif isinstance(path, (list, set)): # Convert set->list in case of valid duplicates.
+ return list(_basePath(p) for p in path)
+ elif isinstance(path, tuple):
+ return tuple(_basePath(p) for p in path)
+ elif isinstance(path, dict):
+ return dict((k, _basePath(v)) for k, v in path.iteritems())
+ else:
+ return path # Could be a FileTarget. Leave it as is.
+
+ return _basePath(value)
+
+configuration.basePath = basePath
+
+# Create the project tool, only enabled during project generation.
+projectTool = ProjectTool(configuration=configuration)
+projectTool.product = ProjectTool.VS2010 # Build projects for VS2010.
+projectTool.enabled = hasattr(engine.options, "createProjects") and engine.options.createProjects
+
+# Add a build success callback that will do the actual project generation.
+engine.addBuildSuccessCallback(projectTool.build)
+
+def createVariants(platform, architecture, compiler):
+ for target in ["debug", "release"]:
+ variant = Variant(
+ platform=platform,
+ architecture=architecture,
+ compiler=compiler.name,
+ target=target,
+ )
+ variant.tools["env"] = env = EnvironmentTool(configuration=configuration)
+ variant.tools["script"] = ScriptTool(configuration=configuration)
+ variant.tools["logging"] = LoggingTool(configuration=configuration)
+ variant.tools["variant"] = VariantTool(configuration=configuration)
+ variant.tools["shell"] = ShellTool(configuration=configuration)
+ variant.tools["filesys"] = FileSystemTool(configuration=configuration)
+ variant.tools["zipping"] = ZipTool(configuration=configuration)
+ variant.tools["compiler"] = compilerClone = compiler.clone()
+ variant.tools["project"] = projectClone = projectTool.clone()
+
+ # Set a build directory specific to this variant.
+ env["VARIANT"] = "-".join([platform, compiler.name, architecture, target])
+
+ # Turn on debug symbols for the debug target.
+ compilerClone.debugSymbols = target == "debug"
+
+ # Set the project config and platform names for this variant. Note that if
+ # these are not set a default will be used that is based on the variants
+ # keywords.
+ projectClone.projectConfigName = '%s %s (%s) %s' % (
+ platform.capitalize(),
+ compiler.name.capitalize(),
+ architecture,
+ target.capitalize(),
+ )
+ projectClone.solutionConfigName = target.capitalize()
+ projectClone.solutionPlatformName = '%s %s (%s)' % (
+ platform.capitalize(),
+ compiler.name.capitalize(),
+ architecture,
+ )
+
+ # Disable all other tools if the project tool is enabled.
+ if projectTool.enabled:
+ for tool in variant.tools.itervalues():
+ if not isinstance(tool, ProjectTool):
+ tool.enabled = False
+
+ configuration.addVariant(variant)
+
+# Create Dummy Compiler.
+compiler = DummyCompiler(configuration=configuration)
+createVariants(platform, "none", compiler)
+
+# Create GCC Compiler.
+try:
+ from cake.library.compilers.gcc import findGccCompiler
+ compiler = findGccCompiler(configuration=configuration)
+ compiler.addLibrary("stdc++")
+ createVariants(platform, hostArchitecture, compiler)
+except CompilerNotFoundError:
+ pass
+
+if cake.system.isWindows():
+ # Create MinGW Compiler.
+ try:
+ from cake.library.compilers.gcc import findMinGWCompiler
+ compiler = findMinGWCompiler(configuration=configuration)
+ createVariants(platform, hostArchitecture, compiler)
+ except CompilerNotFoundError:
+ pass
+ # Create MSVC Compilers.
+ try:
+ from cake.library.compilers.msvc import findMsvcCompiler
+ for architecture in ["x86", "amd64", "ia64"]:
+ compiler = findMsvcCompiler(configuration=configuration, architecture=architecture)
+ compiler.addDefine("WIN32")
+ if architecture in ["amd64", "ia64"]:
+ compiler.addDefine("WIN64")
+ createVariants(platform, architecture, compiler)
+ except CompilerNotFoundError:
+ pass
Index: cake/cake/engine.py
===================================================================
--- cake/cake/engine.py (revision 0)
+++ cake/cake/engine.py (working copy)
@@ -0,0 +1,1112 @@
+"""Engine-Level Classes and Utilities.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import codecs
+import threading
+import traceback
+import sys
+import os
+import os.path
+import time
+
+import math
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+import cake.bytecode
+import cake.task
+import cake.path
+import cake.hash
+import cake.filesys
+import cake.threadpool
+
+from cake.script import Script as _Script
+
+class BuildError(Exception):
+ """Exception raised when a build fails.
+
+ This exception is treated as expected by the Cake build system as it won't
+ output the stack-trace if raised by a task.
+ """
+ pass
+
+class DependencyInfoError(Exception):
+ """Exception raised when a dependency info file fails to load.
+ """
+ pass
+
+class Variant(object):
+ """A container for build configuration information.
+
+ @ivar tools: The available tools for this variant.
+ @type tools: dict
+ """
+
+ constructionScriptPath = None
+ """Path to the script used to construct this variant before it is used for the first time.
+
+ @type: string or None
+ """
+
+ def __init__(self, **keywords):
+ """Construct an empty variant.
+ """
+ self.keywords = keywords
+ self.tools = {}
+ self._constructionLock = threading.Lock()
+ self._isConstructed = False
+
+ def __repr__(self):
+ keywords = ", ".join('%s=%r' % (k, v) for k, v in self.keywords.iteritems())
+ return "Variant(%s)" % keywords
+
+ def __getitem__(self, key):
+ """Return a keywords value given its key.
+
+ @param key: The key of the keyword variable to get.
+ @return: The value of the keyword variable.
+ """
+ return self.keywords[key]
+
+ def _construct(self, configuration):
+ # Do an initial check without acquiring the lock (which is slow).
+ if self._isConstructed:
+ return
+
+ self._constructionLock.acquire()
+ try:
+ # Check again in case someone else got here first.
+ if not self._isConstructed:
+ if self.constructionScriptPath is not None:
+ script = _Script(
+ path=self.constructionScriptPath,
+ configuration=configuration,
+ variant=self,
+ task=None,
+ engine=configuration.engine,
+ )
+ script.execute()
+ self._isConstructed = True
+ finally:
+ self._constructionLock.release()
+
+ def matches(*args, **keywords):
+ """Query if this variant matches the specified keywords.
+ """
+ # Don't use self in signature in case the user wants a keyword of
+ # self.
+ self, = args
+ variantKeywords = self.keywords
+ for key, value in keywords.iteritems():
+ variantValue = variantKeywords.get(key, None)
+ if isinstance(value, (list, tuple)):
+ for v in value:
+ if variantValue == v:
+ break
+ else:
+ return False
+ elif value == "all" and variantValue is not None:
+ continue
+ elif variantValue != value:
+ return False
+ else:
+ return True
+
+ def clone(self, **keywords):
+ """Create an independent copy of this variant.
+
+ @param keywords: The name/value pairs that define the new variant.
+ @type keywords: dict of string->string
+
+ @return: The new Variant.
+ """
+ newKeywords = self.keywords.copy()
+ newKeywords.update(keywords)
+ v = Variant(**newKeywords)
+ v.tools = dict((name, tool.clone()) for name, tool in self.tools.iteritems())
+ return v
+
+class Engine(object):
+ """Main object that holds all of the singleton resources for a build.
+
+ @ivar scriptThreadPool: The scriptThreadPool is a single-threaded thread
+ pool that is used to speed up incremental builds on multi-core platforms.
+ It is used to execute scripts and check dependencies, both of which
+ mainly use Python code. Threaded Python code executes under a
+ notoriously slow GIL (Global Interpreter Lock). By executing most
+ Python code on the same thread we can avoid the expensive GIL locking.
+ @type scriptThreadPool: L{ThreadPool}
+
+ @ivar logger: The object used to output build messages.
+ @type logger: L{Logger}
+
+ @ivar parser: The object used to parse command line arguments.
+ @type parser: L{OptionParser}
+
+ @ivar args: The command line arguments.
+ @type args: list of string
+
+ @ivar options: The options found after parsing command line arguments.
+ @type options: L{Option}
+
+ @ivar oscwd: The initial working directory when Cake was first started.
+ @type oscwd: string
+ """
+
+ scriptCachePath = None
+ """Path to the script cache files.
+
+ The absolute path to the directory that should store
+ script cache files. If None the script cache files will be put next to the
+ script files themselves with a different extension (usually .cakec).
+ @type: string or None
+ """
+ dependencyInfoPath = None
+ """Path to store dependency info files.
+
+ The absolute path to the directory that should store
+ dependency info files. If None the dependency info files will be put next to the
+ target files themselves with a different extension (usually .dep).
+ @type: string or None
+ """
+
+ forceBuild = False
+ defaultConfigScriptName = "config.cake"
+ maximumErrorCount = None
+
+ def __init__(self, logger, parser, args):
+ """Default Constructor.
+ """
+ self._byteCodeCache = {}
+ self._timestampCache = {}
+ self._digestCache = {}
+ self._dependencyInfoCache = {}
+ self._searchUpCache = {}
+ self._configurations = {}
+ self.scriptThreadPool = cake.threadpool.ThreadPool(1)
+ self.errors = []
+ self.warnings = []
+ self.logger = logger
+ self.parser = parser
+ self.args = args
+ self.options = None
+ self.oscwd = os.getcwd() # Save original cwd in case someone changes it.
+ self.buildSuccessCallbacks = []
+ self.buildFailureCallbacks = []
+
+ @property
+ def errorCount(self):
+ return len(self.errors)
+
+ @property
+ def warningCount(self):
+ return len(self.warnings)
+
+ def searchUpForFile(self, path, fileName):
+ """Attempt to find a file in a particular path or any of its parent
+ directories.
+
+ Caches previous search results for efficiency.
+
+ @param path: The path to search for the file.
+ @type path: string
+
+ @param fileName: The name of the file to search for.
+ @type path: string
+
+ @return: Absolute path of the file found in the path or its nearest
+ ancestor that contains the file, otherwise None if the file wasn't
+ found.
+ @rtype: string or None
+ """
+
+ searchUpCache = self._searchUpCache.get(fileName, None)
+ if searchUpCache is None:
+ searchUpCache = self._searchUpCache.setdefault(fileName, {})
+
+ undefined = object()
+ undefinedPaths = []
+ path = os.path.normcase(os.path.abspath(path))
+ while True:
+ configPath = searchUpCache.get(path, undefined)
+ if configPath is not undefined:
+ break
+
+ undefinedPaths.append(path)
+
+ candidate = cake.path.join(path, fileName)
+ if cake.filesys.isFile(candidate):
+ configPath = cake.path.fileSystemPath(candidate)
+ break
+
+ parent = cake.path.dirName(path)
+ if parent == path:
+ configPath = None
+ break
+
+ path = parent
+
+ for undefinedPath in undefinedPaths:
+ searchUpCache[undefinedPath] = configPath
+
+ return configPath
+
+ def findConfigScriptPath(self, path, configScriptName=None):
+ """Attempt to find the path of the config script to use for building
+ a particular path.
+
+ @param path: Absolute path to start searching for the config script file.
+ @type path: string
+
+ @param configScriptName: Name of the config script file to search for
+ or None to use the default configScriptName.
+ @type configScriptName: string or None
+
+ @return: Path to the config script file if found otherwise None.
+ @rtype: string or None
+ """
+ if configScriptName is None:
+ configScriptName = self.defaultConfigScriptName
+
+ return self.searchUpForFile(path, configScriptName)
+
+ def getConfiguration(self, path):
+ """Get the configuration for a specified config script path.
+
+ Executes the config script if not already executed.
+
+ @param path: Absolute path of the config script used to
+ populate the configuration.
+ @type path: string
+
+ @return: The Configuration that has been configured with the
+ specified config script.
+ @rtype: L{Configuration}
+ """
+ configuration = self._configurations.get(path, None)
+ if configuration is None:
+ configuration = Configuration(path=path, engine=self)
+ script = _Script(
+ path=cake.path.baseName(path),
+ configuration=configuration,
+ variant=None,
+ engine=self,
+ task=None,
+ parent=None,
+ )
+ script.execute()
+ configuration = self._configurations.setdefault(path, configuration)
+ return configuration
+
+ def findConfiguration(self, path, configScriptName=None):
+ """Find the configuration for a particular path.
+
+ @param path: Absolute path to start searching for a config script.
+ @type path: string
+
+ @param configScriptName: Name of the config script to search for.
+ If not supplied then self.defaultConfigScriptName is used.
+ @type configScriptName: string or None
+
+ @return: The initialised Configuration object corresponding
+ to the found config script.
+ @rtype: L{Configuration}
+
+ @raise BuildError: If the config script could not be found.
+ """
+ configScript = self.findConfigScriptPath(path, configScriptName)
+ # Fall back on the default config script in this files path
+ if configScript is None:
+ configScript = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ self.defaultConfigScriptName,
+ )
+ return self.getConfiguration(configScript)
+
+ def execute(self, path, configScript=None, configScriptName=None, keywords={}):
+ """Execute a script at specified path with all matching variants.
+
+ The variants the script is executed with are determined by the
+ keywords specified here.
+
+ @param path: Absolute path of the script to execute.
+ @type path: string.
+
+ @param configScript: Absolute path of the config script to execute the
+ script with, pass None to search for the config script.
+ @type configScript: string or None
+
+ @param configScriptName: Name of the config script file to search for
+ if configScript was passed as None. If None then use the engine's
+ default config script name.
+ @type configScriptName: string or None
+
+ @param keywords: Keywords used to filter the set of variants the
+ script will be executed with.
+ @type keywords: dictionary of string -> string or list of string
+
+ @return: A task that will complete when the script and any tasks
+ it spawns finishes executing.
+ @rtype: L{Task}
+ """
+ if configScript is None:
+ configuration = self.findConfiguration(path, configScriptName)
+ else:
+ configuration = self.getConfiguration(configScript)
+
+ path = cake.path.relativePath(path, configuration.baseDir)
+
+ tasks = []
+ for variant in configuration.findAllVariants(keywords):
+ task = configuration.execute(path, variant).task
+ tasks.append(task)
+
+ if not tasks:
+ if keywords:
+ args = " ".join("%s=%s" % (k, ",".join(v)) for k, v in keywords.items())
+ self.raiseError(
+ "No build variants found in '%s' that match the keywords '%s'.\n" % (configuration.path, args)
+ )
+ else:
+ self.raiseError(
+ "No build variants found in '%s'.\n" % configuration.path
+ )
+ elif len(tasks) > 1:
+ task = self.createTask()
+ task.completeAfter(tasks)
+ task.start()
+ return task
+ else:
+ return tasks[0]
+
+ def addBuildSuccessCallback(self, callback):
+ """Register a callback to be run if the build completes successfully.
+
+ @param callback: The callback to run when the build completes
+ successfully.
+ @type callback: any callable
+ """
+ self.buildSuccessCallbacks.append(callback)
+
+ def addBuildFailureCallback(self, callback):
+ """Register a callback to be run if the build fails.
+
+ @param callback: The callback to run when the build fails.
+ @type callback: any callable
+ """
+ self.buildFailureCallbacks.append(callback)
+
+ def onBuildSucceeded(self):
+ """Execute build success callbacks.
+ """
+ for callback in self.buildSuccessCallbacks:
+ callback()
+
+ def onBuildFailed(self):
+ """Execute build failure callbacks.
+ """
+ for callback in self.buildFailureCallbacks:
+ callback()
+
+ def createTask(self, func=None):
+ """Construct a new task that will call the specified function.
+
+ This function wraps the function in an exception handler that prints out
+ the stacktrace and exception details if an exception is raised by the
+ function.
+
+ @param func: The function that will be called with no args by the task once
+ the task has been started.
+ @type func: any callable
+
+ @return: The newly created Task.
+ @rtype: L{Task}
+ """
+ if func is None:
+ return cake.task.Task()
+
+ # Save the script that created the task so that the task
+ # inherits that same script when executed.
+ currentScript = _Script.getCurrent()
+
+ def _wrapper():
+ if self.maximumErrorCount and self.errorCount >= self.maximumErrorCount:
+ # TODO: Output some sort of message saying the build is being terminated
+ # because of too many errors. But only output it once. Perhaps just set
+ # a flag and check that in the runner.
+ raise BuildError()
+
+ try:
+ # Restore the old script
+ oldScript = _Script.getCurrent()
+ _Script._current.value = currentScript
+ try:
+ return func()
+ finally:
+ _Script._current.value = oldScript
+ except BuildError:
+ # Assume build errors have already been reported
+ raise
+ except Exception, e:
+ tbs = [traceback.extract_tb(sys.exc_info()[2])]
+
+ t = task
+ while t is not None:
+ tb = getattr(t, "traceback", None)
+ if tb is not None:
+ tbs.append(t.traceback)
+ t = t.parent
+
+ tracebackString = ''.join(
+ ''.join(traceback.format_list(tb)) for tb in reversed(tbs)
+ )
+ exceptionString = ''.join(traceback.format_exception_only(e.__class__, e))
+ message = 'Unhandled Task Exception:\n%s%s' % (tracebackString, exceptionString)
+ if not self.logger.debugEnabled("stack"):
+ message += "Pass '--debug=stack' if you require a more complete stack trace.\n"
+ self.logger.outputError(message)
+ self.errors.append(message)
+ raise
+
+ task = cake.task.Task(_wrapper)
+
+ # Set a traceback for the parent script task
+ if self.logger.debugEnabled("stack"):
+ if currentScript is not None:
+ task.traceback = traceback.extract_stack()[:-1]
+
+ return task
+
+ def raiseError(self, message):
+ """Log an error and raise the BuildError exception.
+
+ @param message: The error message to output.
+ @type message: string
+
+ @raise BuildError: Raises a build error that should cause the current
+ task to fail.
+ """
+ self.logger.outputError(message)
+ self.errors.append(message)
+ raise BuildError(message)
+
+ def getByteCode(self, path, cached=True):
+ """Load a python file and return the compiled byte-code.
+
+ @param path: The path of the python file to load.
+ @type path: string
+
+ @param cached: True if the byte code should be cached to a separate
+ file for quicker loading next time.
+ @type cached: bool
+
+ @return: A code object that can be executed with the python 'exec'
+ statement.
+ @rtype: C{types.CodeType}
+ """
+ byteCode = self._byteCodeCache.get(path, None)
+ if byteCode is None:
+ # Cache the code in a user-supplied directory if provided.
+ if self.scriptCachePath is not None:
+ assert cake.path.isAbs(path) # Need an absolute path to get a unique hash.
+ pathDigest = cake.hash.sha1(path.encode("utf8")).digest()
+ pathDigestStr = cake.hash.hexlify(pathDigest)
+ cacheFilePath = cake.path.join(
+ self.scriptCachePath,
+ pathDigestStr[0],
+ pathDigestStr[1],
+ pathDigestStr[2],
+ pathDigestStr
+ )
+ cake.filesys.makeDirs(cake.path.dirName(cacheFilePath))
+ else:
+ cacheFilePath = None
+ byteCode = cake.bytecode.loadCode(path, cfile=cacheFilePath, cached=cached)
+ self._byteCodeCache[path] = byteCode
+ return byteCode
+
+ def notifyFileChanged(self, path):
+ """Let the engine know a file has changed.
+
+ This allows the engine to invalidate any information about the file
+ it may have previously cached.
+
+ @param path: The path of the file that has changed.
+ @type path: string
+ """
+ self._timestampCache.pop(path, None)
+
+ def getTimestamp(self, path):
+ """Get the timestamp of the file at the specified path.
+
+ @param path: Path of the file whose timestamp you want.
+ @type path: string
+
+ @return: The timestamp in seconds since 1 Jan, 1970 UTC.
+ @rtype: float
+ """
+ timestamp = self._timestampCache.get(path, None)
+ if timestamp is None:
+ stat = os.stat(path)
+ timestamp = time.mktime(time.gmtime(stat.st_mtime))
+ # The above calculation truncates to the nearest second so we need to
+ # re-add the fractional part back to the timestamp otherwise
+ timestamp += math.fmod(stat.st_mtime, 1)
+ self._timestampCache[path] = timestamp
+ return timestamp
+
+ def updateFileDigestCache(self, path, timestamp, digest):
+ """Update the internal cache of file digests with a new entry.
+
+ @param path: The path of the file.
+ @param timestamp: The timestamp of the file at the time the digest
+ was calculated.
+ @param digest: The digest of the contents of the file.
+ """
+ key = (path, timestamp)
+ self._digestCache[key] = digest
+
+ def getFileDigest(self, path):
+ """Get the SHA1 digest of a file's contents.
+
+ @param path: Path of the file to digest.
+ @type path: string
+
+ @return: The SHA1 digest of the file's contents.
+ @rtype: string of 20 bytes
+ """
+ timestamp = self.getTimestamp(path)
+ key = (path, timestamp)
+ digest = self._digestCache.get(key, None)
+ if digest is None:
+ hasher = cake.hash.sha1()
+ f = open(path, 'rb')
+ try:
+ blockSize = 512 * 1024
+ data = f.read(blockSize)
+ while data:
+ hasher.update(data)
+ data = f.read(blockSize)
+ finally:
+ f.close()
+ digest = hasher.digest()
+ self._digestCache[key] = digest
+
+ return digest
+
+ def getDependencyInfo(self, target):
+ """Load the dependency info for the specified target.
+
+ The dependency info contains information about the parameters and
+ dependencies of a target at the time it was last built.
+
+ @param target: The absolute path of the target.
+ @type target: string
+
+ @return: A DependencyInfo object for the target.
+ @rtype: L{DependencyInfo}
+
+ @raise DependencyInfoError: if the dependency info could not be retrieved.
+ """
+ dependencyInfo = self._dependencyInfoCache.get(target, None)
+ if dependencyInfo is None:
+ depPath = self.getDependencyInfoPath(target)
+
+ # Read entire file at once otherwise thread-switching will kill performance.
+ try:
+ fileContents = cake.filesys.readFile(depPath)
+ except EnvironmentError:
+ raise DependencyInfoError("doesn't exist")
+
+ # Split magic signature from the pickled dependency info.
+ magicLength = len(DependencyInfo.MAGIC)
+ dependencyString = fileContents[:-magicLength]
+ dependencyMagic = fileContents[-magicLength:]
+
+ if dependencyMagic != DependencyInfo.MAGIC:
+ raise DependencyInfoError("has an invalid signature")
+
+ try:
+ dependencyInfo = pickle.loads(dependencyString)
+ except:
+ raise DependencyInfoError("could not be understood")
+
+ # Check that the dependency info is valid
+ if not isinstance(dependencyInfo, DependencyInfo):
+ raise DependencyInfoError("has an invalid instance")
+
+ if dependencyInfo.version != DependencyInfo.VERSION:
+ raise DependencyInfoError("version has changed")
+
+ self._dependencyInfoCache[target] = dependencyInfo
+
+ return dependencyInfo
+
+ def getDependencyInfoPath(self, target):
+ """Get the path of a dependency info file given it's associated target.
+ """
+ # We need an absolute path to generate a unique hash.
+ assert cake.path.isAbs(target)
+ if self.dependencyInfoPath is not None:
+ pathDigest = cake.hash.sha1(target.encode("utf8")).digest()
+ pathDigestStr = cake.hash.hexlify(pathDigest)
+ return cake.path.join(
+ self.dependencyInfoPath,
+ pathDigestStr[0],
+ pathDigestStr[1],
+ pathDigestStr[2],
+ pathDigestStr[3],
+ pathDigestStr
+ )
+ else:
+ return target + '.dep'
+
+ def storeDependencyInfo(self, target, dependencyInfo):
+ """Store dependency info for the specified target.
+
+ @param target: Absolute path of the target.
+ @type target: string
+
+ @param dependencyInfo: The dependency info object to store.
+ @type dependencyInfo: L{DependencyInfo}
+ """
+ depPath = self.getDependencyInfoPath(target)
+
+ dependencyString = pickle.dumps(dependencyInfo, pickle.HIGHEST_PROTOCOL)
+
+ cake.filesys.writeFile(depPath, dependencyString + DependencyInfo.MAGIC)
+
+ self._dependencyInfoCache[target] = dependencyInfo
+
+class DependencyInfo(object):
+ """Object that holds the dependency info for a target.
+
+ @ivar version: The version of this dependency info.
+ @type version: int
+ @ivar targets: A list of target file paths.
+ @type targets: list of strings
+ @ivar args: The arguments used for the build.
+ @type args: usually a list of string's
+ """
+
+ VERSION = 3
+ """The most recent DependencyInfo version.
+
+ @type: int
+ """
+
+ MAGIC = "CKDP"
+ """A magic value stored in dependency files to ensure they are valid.
+
+ This value is written to the end of the dependency file. If the power goes
+ off or the computer stops while the dependency file is being written it will
+ be regarded as invalid unless this value has been written.
+ @type: string
+ """
+
+ def __init__(self, targets, args):
+ self.version = self.VERSION
+ self.targets = targets
+ self.args = args
+ self.depPaths = None
+ self.depTimestamps = None
+ self.depDigests = None
+
+class Configuration(object):
+ """A configuration is a collection of related Variants.
+
+ It is typically populated by a config.cake script.
+
+ @ivar engine: The Engine this configuration object belongs to.
+ @type engine: L{Engine}
+
+ @ivar path: The absolute path of the config script that was used to
+ initialise this configuration.
+ @type path: string
+
+ @ivar dir: The absolute path of the directory containing the config
+ script.
+ @type dir: string
+
+ @ivar baseDir: The absolute path of the directory that all relative
+ paths will be assumed to be relative to. Defaults to the directory
+ of the config script but may be overridden by the config script.
+ @type baseDir: string
+
+ @ivar scriptGlobals: A dictionary that will provide the initial
+ values of each scripts global variables.
+ @type scriptGlobals: dict
+ """
+
+ defaultBuildScriptName = 'build.cake'
+ """The name of the build script to execute if the user asked to
+ build a directory.
+ """
+
+ def __init__(self, path, engine):
+ """Construct a new Configuration.
+
+ @param path: Absolute path of the config script that will be
+ used to initialise this configuration.
+ @type path: string
+
+ @param engine: The Engine object this configuration belongs to.
+ @type engine: L{Engine}
+ """
+ self.engine = engine
+ self.path = path
+ self.dir = cake.path.dirName(path)
+ self.baseDir = self.dir
+ self.scriptGlobals = {}
+ self._variants = {}
+ self._executed = {}
+ self._executedLock = threading.Lock()
+
+ def basePath(self, path):
+ """Allows user-supplied conversion of a path passed to a Tool.
+
+ @param path: The path to convert.
+ @type path: string
+
+ @return: The path converted via a user-supplied function. If this
+ function hasn't been overriden by a user-supplied function the path
+ is returned as is.
+ @rtype: string
+ """
+ return path
+
+ def abspath(self, path):
+ """Convert a path to be absolute.
+
+ @param path: The path to convert to an absolute path.
+ @type path: string
+
+ @return: If the path was a relative path then returns the path
+ appended to self.baseDir, otherwise returns the path unchanged.
+ @rtype: string
+ """
+ if not os.path.isabs(path):
+ path = os.path.join(self.baseDir, path)
+ return path
+
+ def addVariant(self, variant):
+ """Register a new variant with this engine.
+
+ @param variant: The Variant object to register.
+ @type variant: L{Variant}
+
+ @param default: If True then make this newly added variant the default
+ build variant.
+ @type default: C{bool}
+ """
+ key = frozenset(variant.keywords.iteritems())
+ if key in self._variants:
+ raise KeyError("Already added variant with these keywords: %r" % variant)
+
+ self._variants[key] = variant
+
+ def findAllVariants(self, keywords={}):
+ """Find all variants that match the specified keywords.
+
+ @param keywords: A collection of keywords to match against.
+ @type keywords: dictionary of string -> string or list of string
+
+ @return: Sequence of Variant objects that match the keywords.
+ @rtype: sequence of L{Variant}
+ """
+ for variant in self._variants.itervalues():
+ if variant.matches(**keywords):
+ yield variant
+
+ def findVariant(self, keywords, baseVariant=None):
+ """Find the variant that matches the specified keywords.
+
+ @param keywords: A dictionary of key/value pairs the variant needs
+ to match. The value can be either a string, "all", a list of
+ strings or None.
+
+ @param baseVariant: If specified then attempts to find the variant
+ that has the same keywords as this variant when the keyword is
+ not specified in 'keywords'.
+ @type baseVariant: L{Variant} or C{None}
+
+ @return: The variant that matches the keywords.
+ @rtype: L{Variant}
+
+ @raise LookupError: If no variants matched or more than one variant
+ matched the criteria.
+ """
+ if baseVariant is None:
+ results = list(self.findAllVariants(keywords))
+ else:
+ results = []
+ getBaseValue = baseVariant.keywords.get
+ for variant in self.findAllVariants(keywords):
+ for key, value in variant.keywords.iteritems():
+ if key not in keywords:
+ baseValue = getBaseValue(key, None)
+ if value != baseValue:
+ break
+ else:
+ results.append(variant)
+
+ if not results:
+ raise LookupError("No variants matched criteria.")
+ elif len(results) > 1:
+ msg = "Found %i variants that matched criteria.\n" % len(results)
+ msg += "".join("- %r\n" % v for v in results)
+ raise LookupError(msg)
+
+ return results[0]
+
+ def execute(self, path, variant):
+ """Execute a build script.
+
+ Uses this configuration with specified build variant.
+
+ @param path: Path of the build script.
+ @param variant: The variant to execute the script with.
+
+ @return: The Script object representing the script that will
+ be executed. Use the returned script's .task to wait for the
+ script to finish executing.
+ """
+ absPath = self.abspath(path)
+
+ if cake.filesys.isDir(absPath):
+ absPath = cake.path.join(absPath, self.defaultBuildScriptName)
+
+ absPath = os.path.normpath(absPath)
+
+ path = cake.path.relativePath(absPath, self.baseDir)
+
+ key = (os.path.normcase(path), variant)
+
+ currentScript = _Script.getCurrent()
+ if currentScript:
+ currentVariant = currentScript.variant
+ currentConfiguration = currentScript.configuration
+ else:
+ currentVariant = None
+ currentConfiguration = None
+
+ # Make sure the variant is constructed and ready for use.
+ variant._construct(self)
+
+ self._executedLock.acquire()
+ try:
+ script = self._executed.get(key, None)
+ if script is None:
+ tools = {}
+ for name, tool in variant.tools.items():
+ tools[name] = tool.clone()
+
+ def execute():
+ if self is not currentConfiguration:
+ self.engine.logger.outputInfo("Building with %s - %s\n" % (self.path, variant))
+ elif variant is not currentVariant:
+ self.engine.logger.outputInfo("Building with %s\n" % str(variant))
+ self.engine.logger.outputDebug(
+ "script",
+ "Executing %s\n" % script.path,
+ )
+ script.execute()
+ task = self.engine.createTask(execute)
+ script = _Script(
+ path=path,
+ configuration=self,
+ variant=variant,
+ task=task,
+ tools=tools,
+ engine=self.engine,
+ )
+ self._executed[key] = script
+ task.addCallback(
+ lambda: self.engine.logger.outputDebug(
+ "script",
+ "Finished %s\n" % script.path,
+ )
+ )
+ task.start(threadPool=self.engine.scriptThreadPool)
+ finally:
+ self._executedLock.release()
+
+ return script
+
+ def createDependencyInfo(self, targets, args, dependencies, calculateDigests=False):
+ """Construct a new DependencyInfo object.
+
+ @param targets: A list of file paths of targets.
+ @type targets: list of string
+ @param args: A value representing the parameters of the build.
+ @type args: object
+ @param dependencies: A list of file paths of dependencies.
+ @type dependencies: list of string
+ @param calculateDigests: Whether or not to store the digests of
+ dependencies in the DependencyInfo.
+ @type calculateDigests: bool
+
+ @return: A DependencyInfo object.
+ """
+ dependencyInfo = DependencyInfo(targets=list(targets), args=args)
+ paths = dependencyInfo.depPaths = list(dependencies)
+ abspath = self.abspath
+ paths = [abspath(p) for p in paths]
+ getTimestamp = self.engine.getTimestamp
+ dependencyInfo.depTimestamps = [getTimestamp(p) for p in paths]
+ if calculateDigests:
+ getFileDigest = self.engine.getFileDigest
+ dependencyInfo.depDigests = [getFileDigest(p) for p in paths]
+ return dependencyInfo
+
+ def storeDependencyInfo(self, dependencyInfo):
+ """Call this method after a target was built to save the
+ dependencies of the target.
+
+ @param dependencyInfo: The dependency info object to be stored.
+ @type dependencyInfo: L{DependencyInfo}
+ """
+ absTargetPath = self.abspath(dependencyInfo.targets[0])
+ self.engine.storeDependencyInfo(absTargetPath, dependencyInfo)
+
+ def checkDependencyInfo(self, targetPath, args):
+ """Check dependency info to see if the target is up to date.
+
+ The dependency info contains information about the parameters and
+ dependencies of a target at the time it was last built.
+
+ @param targetPath: The path of the target.
+ @type targetPath: string
+ @param args: The current arguments.
+ @type args: list of string
+
+ @return: A tuple containing the previous DependencyInfo or None if not
+ found, and the string reason to build or None if the target is up
+ to date.
+ @rtype: tuple of (L{DependencyInfo} or None, string or None)
+ """
+ abspath = self.abspath
+ absTargetPath = abspath(targetPath)
+ try:
+ dependencyInfo = self.engine.getDependencyInfo(absTargetPath)
+ except DependencyInfoError, e:
+ return None, "'" + targetPath + ".dep' " + str(e)
+
+ if self.engine.forceBuild:
+ return dependencyInfo, "rebuild has been forced"
+
+ if args != dependencyInfo.args:
+ return dependencyInfo, "'" + repr(args) + "' != '" + repr(dependencyInfo.args) + "'"
+
+ isFile = cake.filesys.isFile
+ for target in dependencyInfo.targets:
+ if not isFile(abspath(target)):
+ return dependencyInfo, "'" + target + "' doesn't exist"
+
+ getTimestamp = self.engine.getTimestamp
+ paths = dependencyInfo.depPaths
+ timestamps = dependencyInfo.depTimestamps
+ assert len(paths) == len(timestamps)
+ for i in xrange(len(paths)):
+ path = paths[i]
+ try:
+ if getTimestamp(abspath(path)) != timestamps[i]:
+ return dependencyInfo, "'" + path + "' has been changed"
+ except EnvironmentError:
+ return dependencyInfo, "'" + path + "' no longer exists"
+
+ return dependencyInfo, None
+
+ def checkReasonToBuild(self, targets, sources):
+ """Check for a reason to build given a list of targets and sources.
+
+ @param targets: A list of target files.
+ @type targets: list of string
+ @param sources: A list of source files.
+ @type sources: list of string
+
+ @return: A reason to build if a rebuild is required, otherwise None.
+ @rtype: string or None
+ """
+
+ abspath = self.abspath
+
+ if self.engine.forceBuild:
+ return "rebuild has been forced"
+
+ getTimestamp = self.engine.getTimestamp
+
+ oldestTimestamp = None
+ for t in targets:
+ try:
+ timestamp = getTimestamp(abspath(t))
+ except EnvironmentError:
+ return "'" + t + "' doesn't exist"
+ if oldestTimestamp is None or timestamp < oldestTimestamp:
+ oldestTimestamp = timestamp
+
+ newestTimestamp = None
+ for s in sources:
+ try:
+ timestamp = getTimestamp(abspath(s))
+ except EnvironmentError:
+ return "'" + s + "' doesn't exist"
+ if newestTimestamp is None or timestamp > newestTimestamp:
+ newestTimestamp = timestamp
+ newestSource = s
+
+ if newestTimestamp is not None and oldestTimestamp is not None and newestTimestamp > oldestTimestamp:
+ return "'" + newestSource + "' has been changed"
+
+ return None
+
+ def primeFileDigestCache(self, dependencyInfo):
+ """Prime the engine's file-digest cache using any cached
+ information stored in this dependency info.
+ """
+ if dependencyInfo.depDigests and dependencyInfo.depTimestamps:
+ paths = dependencyInfo.depPaths
+ timestamps = dependencyInfo.depTimestamps
+ digests = dependencyInfo.depDigests
+ assert len(digests) == len(paths)
+ assert len(timestamps) == len(paths)
+ updateFileDigestCache = self.engine.updateFileDigestCache
+ abspath = self.abspath
+ for i in xrange(len(paths)):
+ updateFileDigestCache(abspath(paths[i]), timestamps[i], digests[i])
+
+ def calculateDigest(self, dependencyInfo):
+ """Calculate the digest of the sources/dependencies.
+
+ @return: The current digest of the dependency info.
+ @rtype: string of 20 bytes
+ """
+ self.primeFileDigestCache(dependencyInfo)
+
+ hasher = cake.hash.sha1()
+ addToDigest = hasher.update
+
+ encodeToUtf8 = lambda value, encode=codecs.utf_8_encode: encode(value)[0]
+ getFileDigest = self.engine.getFileDigest
+
+ # Include the paths of the targets in the digest
+ for target in dependencyInfo.targets:
+ addToDigest(encodeToUtf8(target))
+
+ # Include parameters of the build
+ addToDigest(encodeToUtf8(repr(dependencyInfo.args)))
+
+ abspath = self.abspath
+ for path in dependencyInfo.depPaths:
+ # Include the dependency file's path and content digest in
+ # this digest.
+ addToDigest(encodeToUtf8(path))
+ addToDigest(getFileDigest(abspath(path)))
+
+ return hasher.digest()
Index: cake/cake/filesys.py
===================================================================
--- cake/cake/filesys.py (revision 0)
+++ cake/cake/filesys.py (working copy)
@@ -0,0 +1,203 @@
+"""File System Utilities.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import shutil
+import os
+import os.path
+import time
+
+import cake.path
+
+def toUtc(timestamp):
+ """Convert a timestamp from local time-zone to UTC.
+ """
+ return time.mktime(time.gmtime(timestamp))
+
+def exists(path):
+ """Check if a file or directory exists at the path.
+
+ @param path: The path to check for.
+ @type path: string
+
+ @return: True if a file or directory exists, otherwise False.
+ @rtype: bool
+ """
+ return cake.path.exists(path)
+
+def isFile(path):
+ """Check if a file exists at the path.
+
+ @param path: The path of the file to check for.
+ @type path: string
+
+ @return: True if the file exists, otherwise False.
+ @rtype: bool
+ """
+ return cake.path.isFile(path)
+
+def isDir(path):
+ """Check if a directory exists at the path.
+
+ @param path: The path of the directory to check for.
+ @type path: string
+
+ @return: True if the directory exists, otherwise False.
+ @rtype: bool
+ """
+ return cake.path.isDir(path)
+
+def remove(path):
+ """Remove a file.
+
+ Unlike os.remove() this function fails silently if the
+ file does not exist.
+
+ @param path: The path of the file to remove.
+ @type path: string
+ """
+ try:
+ os.remove(path)
+ except EnvironmentError:
+ # Ignore failure if file doesn't exist. Fail if it's a directory.
+ if os.path.exists(path):
+ raise
+
+def removeTree(path):
+ """Recursively delete all files and directories at the specified path.
+
+ Unlike os.removedirs() this function stops deleting entries when
+ the specified path and all it's children have been deleted.
+
+ os.removedirs() will continue deleting parent directories if they are
+ empty.
+
+ @param path: Path to the directory containing the tree to remove
+ """
+ for root, dirs, files in os.walk(path, topdown=False):
+ for name in files:
+ p = os.path.join(root, name)
+ remove(p)
+ for name in dirs:
+ p = os.path.join(root, name)
+ os.rmdir(p)
+ os.rmdir(path)
+
+def copyFile(source, target):
+ """Copy a file from source path to target path.
+
+ Overwrites the target path if it exists and is writeable.
+
+ @param source: The path of the source file.
+ @type source: string
+ @param target: The path of the target file.
+ @type target: string
+ """
+ shutil.copyfile(source, target)
+
+def makeDirs(path):
+ """Recursively create directories.
+
+ Unlike os.makedirs(), it does not throw an exception if the
+ directory already exists.
+
+ @param path: The path of the directory to create.
+ @type path: string
+ """
+ # Don't try to create an empty directory, eg. if someone calls
+ # makeDirs(os.path.dirname("somefile.txt")).
+ if not path:
+ return
+
+ # Don't try to create directory at the root level, eg: 'C:\\'.
+ if cake.path.isMount(path):
+ return
+
+ head, tail = os.path.split(path)
+ if not tail:
+ head, tail = os.path.split(head)
+ if head and tail and not os.path.exists(head):
+ makeDirs(head)
+ if tail == os.curdir: # xxx/newdir/. exists if xxx/newdir exists.
+ return
+
+ try:
+ os.mkdir(path)
+ except EnvironmentError:
+ # Ignore failure due to directory already existing.
+ if not os.path.isdir(path):
+ raise
+
+def walkTree(path, recursive=True, includeMatch=None):
+ """Walk a directory for file and directory names.
+
+ @param path: The path of the directory to search under.
+ @type path: string
+
+ @param recursive: Whether or not to recursively walk through
+ sub-directories.
+ @type recursive: bool
+
+ @param includeMatch: A callable used to decide whether to include
+ certain files in the result. This could be a python callable that
+ returns True to include the file or False to exclude it, or a regular
+ expression function such as re.compile().match or re.match.
+ @type includeMatch: any callable
+
+ @return: A sequence of file and directory paths relative
+ to the specified directory path.
+ """
+ if recursive:
+ firstChar = len(path) + 1
+ for dirPath, dirNames, fileNames in os.walk(path):
+ dirPath = dirPath[firstChar:] # Make dirPath relative to path
+
+ newDirNames = []
+ for name in dirNames:
+ path = os.path.join(dirPath, name)
+ if includeMatch is None or includeMatch(path):
+ newDirNames.append(name)
+ yield path
+ dirNames[:] = newDirNames # Modify dirNames so we don't recurse into excluded directories.
+
+ for name in fileNames:
+ path = os.path.join(dirPath, name)
+ if includeMatch is None or includeMatch(path):
+ yield path
+ else:
+ for name in os.listdir(path):
+ if includeMatch is None or includeMatch(name):
+ yield name
+
+def readFile(path):
+ """Read data from a file.
+
+ @param path: The path of the file to read.
+ @type path: string
+
+ @return: The data read from the file.
+ @rtype: string
+ """
+ f = open(path, "rb")
+ try:
+ return f.read()
+ finally:
+ f.close()
+
+def writeFile(path, data):
+ """Write data to a file.
+
+ @param path: The path of the file to write.
+ @type path: string
+ @param data: The data to write to the file.
+ @type data: string
+ """
+ makeDirs(os.path.dirname(path))
+ f = open(path, "wb")
+ try:
+ f.write(data)
+ finally:
+ f.close()
Index: cake/cake/gnu.py
===================================================================
--- cake/cake/gnu.py (revision 0)
+++ cake/cake/gnu.py (working copy)
@@ -0,0 +1,55 @@
+"""Utilities for dealing with GNU tools.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+def parseDependencyFile(path, targetSuffix):
+ """Parse a .d file and return the list of dependencies.
+
+ @param path: The path to the dependency file.
+ @type path: string
+ @param targetSuffix: Suffix used by targets.
+ @type targetSuffix: string
+ @return: A list of dependencies.
+ @rtype: list of string
+ """
+ dependencies = []
+ uniqueDeps = set()
+
+ def addPath(path):
+ if path and path not in uniqueDeps:
+ uniqueDeps.add(path)
+ path = path.replace('\\ ', ' ') # fix escaped spaces
+ dependencies.append(path)
+
+ f = open(path, 'rt')
+ try:
+ text = f.read()
+ text = text.replace('\\\n', ' ') # join escaped lines
+ text = text.replace('\n', ' ') # join other lines
+ text = text.lstrip() # strip leading whitespace
+
+ # Find the 'target:' rule
+ i = text.find(targetSuffix + ':')
+ if i != -1:
+ text = text[i+len(targetSuffix)+1:] # strip target + ':'
+
+ while True:
+ text = text.lstrip() # strip leading whitespace
+
+ i = text.find(' ')
+ while i != -1 and text[i-1] == '\\': # Skip escaped spaces
+ i = text.find(' ', i+1)
+
+ if i == -1:
+ addPath(text)
+ break
+ else:
+ addPath(text[:i])
+ text = text[i:]
+ finally:
+ f.close()
+
+ return dependencies
Index: cake/cake/hash.py
===================================================================
--- cake/cake/hash.py (revision 0)
+++ cake/cake/hash.py (working copy)
@@ -0,0 +1,35 @@
+"""Utilities for creating hashes.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import binascii
+
+try:
+ import hashlib
+ def sha1(*args, **kwargs):
+ return hashlib.sha1(*args, **kwargs)
+ def md5(*args, **kwargs):
+ return hashlib.md5(*args, **kwargs)
+except ImportError:
+ import sha
+ def sha1(*args, **kwargs):
+ return sha.new(*args, **kwargs)
+ import md5 as md5lib
+ def md5(*args, **kwargs):
+ return md5lib.new(*args, **kwargs)
+
+def hexlify(digest):
+ """Get the hex-string representation of a digest.
+
+ @param digest: A series of bytes comprising the digest.
+ eg. A SHA-1 digest will be 20 bytes.
+ @type digest: str/bytes
+
+ @return: A string containing the hexadecimal string representation of the
+ digest.
+ @rtype: unicode
+ """
+ return binascii.hexlify(digest).decode("utf8")
Index: cake/cake/library/__init__.py
===================================================================
--- cake/cake/library/__init__.py (revision 0)
+++ cake/cake/library/__init__.py (working copy)
@@ -0,0 +1,349 @@
+"""Base Class and Utilities for Cake Tools.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+from cake.task import Task
+from cake.script import AsyncResult, DeferredResult, Script, ScriptResult
+
+# These classes are also exported from this module.
+AsyncResult = AsyncResult
+DeferredResult = DeferredResult
+ScriptResult = ScriptResult
+
+class ToolMetaclass(type):
+ """This metaclass ensures that new instance variables can only be added to
+ an instance during its __init__.
+ """
+
+ def __init__(cls, name, bases, dct):
+ super(ToolMetaclass, cls).__init__(name, bases, dct)
+
+ cls._initCount = 0
+
+ oldInit = cls.__init__
+ def __init__(self, *args, **kwargs):
+ self._initCount = self._initCount + 1
+ oldInit(self, *args, **kwargs)
+ self._initCount = self._initCount - 1
+ cls.__init__ = __init__
+
+ oldSetattr = cls.__setattr__
+ def __setattr__(self, name, value):
+ if not self._initCount and not hasattr(self, name):
+ raise AttributeError(name)
+ oldSetattr(self, name, value)
+ cls.__setattr__ = __setattr__
+
+def memoise(func):
+ """Decorator that can be placed on Tool methods to memoise the result.
+
+ The result cache is invalidated whenever an attribute is set on the
+ instance.
+
+ @param func: The function to memoise.
+ @type func: function
+ """
+
+ undefined = object()
+ def run(*args, **kwargs):
+ kwargsTuple = tuple((k,v) for k, v in kwargs.iteritems())
+
+ self = args[0]
+ key = (func, args[1:], kwargsTuple)
+
+ cache = self._Tool__memoise
+ result = cache.get(key, undefined)
+ if result is undefined:
+ result = func(*args, **kwargs)
+ cache[key] = result
+ return result
+
+ try:
+ run.func_name = func.func_name
+ run.func_doc = func.func_doc
+ except AttributeError:
+ pass
+
+ return run
+
+class Tool(object):
+ """Base class for user-defined Cake tools.
+ """
+
+ __metaclass__ = ToolMetaclass
+
+ enabled = True
+ """Enabled/disable this tool.
+
+ If the tool is disabled it should not produce any output files but
+ it should still return the paths to those potential output files so
+ other tools can use them.
+
+ @type: bool
+ """
+
+ def __init__(self, configuration):
+ self.__memoise = {}
+ self.configuration = configuration
+ self.engine = configuration.engine
+
+ def __setattr__(self, name, value):
+ if name != '_Tool__memoise' and hasattr(self, '_Tool__memoise'):
+ self._clearCache()
+ super(Tool, self).__setattr__(name, value)
+
+ def _clearCache(self):
+ """Clear the memoise cache due to some change.
+ """
+ self.__memoise.clear()
+
+ def clone(self):
+ """Return an independent clone of this tool.
+
+ The default clone behaviour performs a deep copy of any builtin
+ types, and a clone of any Tool-derived objects. Everything else
+ will be shallow copied. You should override this method if you
+ need a more sophisticated clone.
+ """
+ new = object.__new__(self.__class__)
+ new.__dict__ = cloneTools(self.__dict__)
+ return new
+
+class DirectoryTarget(object):
+ """A class returned by tools that produce a directory result.
+
+ @ivar path: The path to the target directory.
+ @type path: string
+ @ivar task: A task that completes when the target directory has been written.
+ @type task: L{Task}
+ """
+
+ def __init__(self, path, task):
+ """Construct a DirectoryTarget from a path and task.
+ """
+ self.path = path
+ self.task = task
+
+ def __str__(self):
+ """Return the string representation of this object.
+ """
+ return self.path
+
+class FileTarget(object):
+ """A class returned by tools that produce a file result.
+
+ @ivar path: The path to the target file.
+ @type path: string
+ @ivar task: A task that completes when the target file has been written.
+ @type task: L{Task}
+ """
+
+ def __init__(self, path, task):
+ """Construct a FileTarget from a path and task.
+ """
+ self.path = path
+ self.task = task
+
+ def __str__(self):
+ """Return the string representation of this object.
+ """
+ return self.path
+
+def _findAsyncResults(value):
+ """Return a sequence of AsyncResult objects found in the specified value.
+
+ Recursively searches builtin types 'list', 'tuple' and 'dict'.
+ """
+ if isinstance(value, AsyncResult):
+ yield value
+ elif isinstance(value, (list, tuple)):
+ for item in value:
+ for result in _findAsyncResults(item):
+ yield result
+ elif isinstance(value, dict):
+ for k, v in value.iteritems():
+ for result in _findAsyncResults(k):
+ yield result
+ for result in _findAsyncResults(v):
+ yield result
+
+def _resolveAsyncResults(value):
+ """Return the equivalent value with all AsyncResults resolved with their
+ actual results.
+
+ Caller must ensure that all AsyncResult values have completed before calling this.
+ """
+ while isinstance(value, AsyncResult):
+ assert value.task.completed
+ value = value.result
+
+ if isinstance(value, (tuple, list)):
+ return type(value)(_resolveAsyncResults(x) for x in value)
+ elif isinstance(value, dict):
+ return type(value)(
+ (_resolveAsyncResults(k), _resolveAsyncResults(v)) for k, v in value.iteritems()
+ )
+ else:
+ return value
+
+def waitForAsyncResult(func):
+ """Decorator to be used with functions that need to
+ wait for its argument values to become available before
+ calling the function.
+
+ eg.
+ @waitForAsyncResult
+ def someFunction(source):
+ return source + '.obj'
+
+ Calling above someFunction() with an AsyncResult will return an AsyncResult
+ whose result is the return value of the function
+ """
+ def call(*args, **kwargs):
+
+ asyncResults = list(_findAsyncResults(args))
+ asyncResults.extend(_findAsyncResults(kwargs))
+
+ if not asyncResults:
+ return func(*args, **kwargs)
+
+ currentScript = Script.getCurrent()
+ if currentScript is not None:
+ engine = currentScript.engine
+ createTask = engine.createTask
+ else:
+ createTask = Task
+
+ def onAsyncResultReady(asyncResult):
+ """Called when an AsyncResult is ready.
+
+ Recurse on the result to see if it contains any nested AsyncResult objects.
+ If so then the task for this callback will only complete after those nested
+ AsyncResult values are available.
+ """
+ for result in _findAsyncResults(asyncResult.result):
+ waitTask = createTask(lambda r=result: onAsyncResultReady(r))
+ waitTask.startAfter(result.task)
+
+ waitTasks = []
+ for asyncResult in asyncResults:
+ waitTask = createTask(lambda r=asyncResult: onAsyncResultReady(r))
+ waitTask.startAfter(asyncResult.task)
+ waitTasks.append(waitTask)
+
+ def run():
+ newArgs = _resolveAsyncResults(args)
+ newKwargs = _resolveAsyncResults(kwargs)
+ return func(*newArgs, **newKwargs)
+
+ runTask = createTask(run)
+ runTask.startAfter(waitTasks)
+
+ return DeferredResult(runTask)
+
+ return call
+
+@waitForAsyncResult
+def flatten(value):
+ """Flattens lists/tuples recursively to a single flat list of items.
+
+ @param value: A potentially nested list of items, potentially containing
+ AsyncResult values.
+
+ @return: The flattened list or if any of the items are AsyncResult values then
+ an AsyncResult value that results in the flattened items.
+ """
+ sequenceTypes = (list, tuple, set)
+
+ def _flatten(value):
+ if isinstance(value, sequenceTypes):
+ for item in value:
+ for x in _flatten(item):
+ yield x
+ else:
+ yield value
+
+ return list(_flatten(value))
+
+def getTask(value):
+ """Get the task that builds this file.
+
+ @param value: The ScriptResult, FileTarget, Task or string
+ representing the value.
+
+ @return:
+ """
+ if isinstance(value, Task):
+ return value
+ elif hasattr(value, "task") and isinstance(value.task, Task):
+ return value.task
+ else:
+ return None
+
+def getTasks(files):
+ """Get the set of all tasks that build these files.
+
+ @param files: A list of ScriptResult, FileTarget, Task or string
+ representing the sources of some operation.
+
+ @return: A list of the Task that build the
+ """
+ tasks = []
+ for f in files:
+ task = getTask(f)
+ if task is not None:
+ tasks.append(task)
+ return tasks
+
+def getResult(value):
+ """Get the result of a value that may be a ScriptResult.
+ """
+ while isinstance(value, AsyncResult):
+ value = value.result
+ return value
+
+def getResults(values):
+ """Get the results of a list of values that may be ScriptResult
+ objects.
+ """
+ for value in values:
+ yield getResult(value)
+
+def getPath(file):
+ """Get the set of paths from the build.
+ """
+ file = getResult(file)
+
+ if isinstance(file, FileTarget):
+ return file.path
+ elif isinstance(file, Task):
+ return None
+ else:
+ return file
+
+def getPaths(files):
+ paths = []
+ for f in files:
+ path = getPath(f)
+ if path is not None:
+ paths.append(path)
+ return paths
+
+def cloneTools(obj):
+ """Return a deep copy of any Tool-derived objects or builtin types.
+
+ @param obj: The given object to copy.
+ @return: A copy of the given object for Tool-dervied or builtin types,
+ and references to the same object for user-defined types.
+ """
+ if isinstance(obj, Tool):
+ return obj.clone()
+ elif isinstance(obj, dict):
+ return dict((cloneTools(k), cloneTools(v)) for k, v in obj.iteritems())
+ elif isinstance(obj, (list, tuple, set)):
+ return type(obj)(cloneTools(i) for i in obj)
+ else:
+ return obj
Index: cake/cake/library/compilers/__init__.py
===================================================================
--- cake/cake/library/compilers/__init__.py (revision 0)
+++ cake/cake/library/compilers/__init__.py (working copy)
@@ -0,0 +1,2537 @@
+"""Base Class and Utilities for C/C++ Compiler Tools.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import sys
+import weakref
+import os
+import os.path
+import tempfile
+import subprocess
+import itertools
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+import cake.filesys
+import cake.hash
+import cake.path
+import cake.system
+import cake.zipping
+
+from cake.gnu import parseDependencyFile
+from cake.library import (
+ Tool, FileTarget, AsyncResult,
+ memoise, waitForAsyncResult, flatten,
+ getPaths, getPath, getResult, getResults, getTasks, getTask,
+ )
+from cake.task import Task
+
+class CompilerNotFoundError(Exception):
+ """Exception raised when a compiler cannot be found.
+
+ This exception may be raised by the findCompiler() group of
+ functions such as L{cake.library.compilers.msvc.findMsvcCompiler}
+ and L{cake.library.compilers.gcc.findGccCompiler}.
+ """
+ pass
+
+class CompilerTarget(FileTarget):
+ """Base class for compiler targets.
+
+ @ivar compiler: The compiler usd to build the target.
+ @type compiler: L{Compiler}
+ """
+ def __init__(self, path, task, compiler):
+ FileTarget.__init__(self, path, task)
+ self.compiler = compiler
+
+class PchTarget(CompilerTarget):
+ """A precompiled header target.
+
+ @ivar pch: The pch file target.
+ @type pch: L{FileTarget}
+ @ivar object: The object file target.
+ @type object: L{FileTarget}
+ @ivar header: The #include used to build the pch.
+ @type header: string
+ """
+ def __init__(self, path, task, compiler, header, object):
+ CompilerTarget.__init__(self, path, task, compiler)
+ self.pch = FileTarget(path, task)
+ if object is None:
+ self.object = None
+ else:
+ self.object = FileTarget(object, task)
+ self.header = header
+
+class ObjectTarget(CompilerTarget):
+ """An object target.
+
+ @ivar object: The object file target.
+ @type object: L{FileTarget}
+ """
+ def __init__(self, path, task, compiler):
+ CompilerTarget.__init__(self, path, task, compiler)
+ self.object = FileTarget(path, task)
+
+class LibraryTarget(CompilerTarget):
+ """A library target.
+
+ @ivar library: The library file target.
+ @type library: L{FileTarget}
+ """
+ def __init__(self, path, task, compiler):
+ CompilerTarget.__init__(self, path, task, compiler)
+ self.library = FileTarget(path, task)
+
+class ModuleTarget(CompilerTarget):
+ """A module target.
+
+ @ivar module: The module file target.
+ @type module: L{FileTarget}
+ @ivar library: An optional import library file target.
+ @type library: L{FileTarget}
+ @ivar manifest: An optional manifest file target.
+ @type manifest: L{FileTarget}
+ """
+ def __init__(self, path, task, compiler, library, manifest):
+ CompilerTarget.__init__(self, path, task, compiler)
+ self.module = FileTarget(path, task)
+ if library is None:
+ self.library = None
+ else:
+ self.library = FileTarget(library, task)
+ if manifest is None:
+ self.manifest = None
+ else:
+ self.manifest = FileTarget(manifest, task)
+
+class ProgramTarget(CompilerTarget):
+ """A program target.
+
+ @ivar program: The program file target.
+ @type program: L{FileTarget}
+ @ivar manifest: An optional manifest file target.
+ @type manifest: L{FileTarget}
+ """
+ def __init__(self, path, task, compiler, manifest):
+ CompilerTarget.__init__(self, path, task, compiler)
+ self.program = FileTarget(path, task)
+ if manifest is None:
+ self.manifest = None
+ else:
+ self.manifest = FileTarget(manifest, task)
+
+class ResourceTarget(CompilerTarget):
+ """A resource target.
+
+ @ivar resource: The resource file target.
+ @type resource: L{FileTarget}
+ """
+ def __init__(self, path, task, compiler):
+ CompilerTarget.__init__(self, path, task, compiler)
+ self.resource = FileTarget(path, task)
+
+def getLinkPaths(files):
+ paths = []
+ for f in files:
+ while isinstance(f, AsyncResult):
+ f = f.result
+ if isinstance(f, PchTarget):
+ if f.object is not None:
+ paths.append(f.object.path)
+ elif isinstance(f, FileTarget):
+ paths.append(f.path)
+ else:
+ paths.append(f)
+ return paths
+
+def getLibraryPaths(files):
+ paths = []
+ for f in files:
+ while isinstance(f, AsyncResult):
+ f = f.result
+ if isinstance(f, (list, set, tuple)):
+ paths.extend(getLibraryPaths(f))
+ elif isinstance(f, ModuleTarget):
+ paths.append(f.library.path)
+ elif isinstance(f, FileTarget):
+ paths.append(f.path)
+ else:
+ paths.append(f)
+ return paths
+
+class Command(object):
+
+ def __init__(self, args, func):
+ self.args = args
+ self.func = func
+
+ def __repr__(self):
+ return repr(self.args)
+
+ def __call__(self, *args):
+ return self.func(*args)
+
+def makeCommand(args):
+ def run(func):
+ return Command(args, func)
+ return run
+
+def _escapeArg(arg):
+ if ' ' in arg:
+ return '"' + arg + '"'
+ else:
+ return arg
+
+def _escapeArgs(args):
+ return [_escapeArg(arg) for arg in args]
+
+class Compiler(Tool):
+ """Base class for C/C++ compiler tools.
+ """
+
+ NO_OPTIMISATION = 0
+ """No optimisation.
+
+ Your code should run slowest at this level, but debugging should
+ be easiest. The code you step through with a debugger should closely
+ match the original source.
+
+ Related compiler options::
+ GCC: -O0
+ MSVC: /Od
+ MWCW: -opt off
+ """
+ PARTIAL_OPTIMISATION = 1
+ """Code is partially optimised.
+
+ Depending on the compiler this may include everything up to but
+ not including link-time code generation.
+
+ Related compiler options::
+ GCC: -O2
+ MSVC: /Ox
+ MWCW: -opt level=2
+ """
+ FULL_OPTIMISATION = 2
+ """Code is fully optimised.
+
+ This may include link-time code generation for compilers that
+ support it.
+
+ Related compiler options::
+ GCC: -O4
+ MSVC: /GL
+ MWCW: -opt level=4
+ """
+ MSVS_CLICKABLE = 0
+ """Messages are clickable in Microsoft Visual Studio.
+
+ When this options is chosen compiler warnings and error messages
+ will be formatted to be clickable in Microsoft Visual Studio.
+
+ The format of each message will be as follows::
+ sourceFile(lineNumber) : message
+
+ Note that if 'MsvcCompiler.outputFullPath' is set to False this
+ option may need to be enabled so that relative source file paths
+ are converted to clickable absolute paths.
+ """
+ debugSymbols = None
+ """Enable debug symbols.
+
+ Enabling debug symbols will allow you to debug your code, but will
+ significantly increase the size of the executable.
+
+ Related compiler options::
+ GCC: -g
+ MSVC: /Z7
+ MWCW: -sym dwarf-2
+ @type: bool
+ """
+ keepDependencyFile = False
+ """Whether to keep the compiler generated dependency file.
+
+ If the value is set then Cake will keep the compiler generated dependency
+ file after a build. The dependency file is used by Cake to obtain a list
+ of source files an object file is dependent on. It will be located next to
+ the target object file with a '.d' extension. This switch is only relevant
+ for compilers that use a dependency file (eg. GCC/MWCW).
+
+ Related compiler options::
+ GCC/MWCW: -MD
+ @type: bool
+ """
+ optimisation = None
+ """Set the optimisation level.
+
+ Available enum values are: L{NO_OPTIMISATION} L{PARTIAL_OPTIMISATION}
+ L{FULL_OPTIMISATION}
+
+ If the value is None the compiler default is used.
+ @type: enum or None
+ """
+ messageStyle = None
+ """Set the message style.
+
+ Available enum values are: L{MSVS_CLICKABLE}
+
+ If the value is None the compiler default output is used.
+ @type: enum or None
+ """
+ enableRtti = None
+ """Enable Run-Time Type Information for C++ compilation.
+
+ Disabling RTTI can reduce the executable size, but will prevent you from
+ using dynamic_cast to downcast between classes, or typeid() to determine
+ the type of a class or struct.
+
+ If the value is None the compiler default is used.
+
+ Related compiler options::
+ GCC: -frtti
+ MSVC: /GR
+ MWCW: -RTTI on
+ @type: bool or None
+ """
+ enableExceptions = None
+ """Enable exception handling.
+
+ Disabling exceptions can significantly reduce the size of the executable.
+
+ If the value is None the compiler default is used.
+
+ Related compiler options::
+ GCC: -fexceptions
+ MSVC: /EHsc
+ MWCW: -cpp_exceptions on
+ @type: bool or None
+ """
+ warningLevel = None
+ """Set the warning level.
+
+ What the warning level does may depend on the compiler, but in general
+ setting it to 0 will disable all warnings, and setting it to 4 will
+ enable all warnings.
+
+ If the value is None the compiler default is used.
+
+ Related compiler options (warning level 0)::
+ GCC: -w
+ MSVC: /W0
+
+ Related compiler options (warning level 4)::
+ GCC: -Wall
+ MSVC: /W4
+ @type: int or None
+ """
+ warningsAsErrors = None
+ """Treat warnings as errors.
+
+ If enabled warnings will be treated as errors and may prevent compilation
+ from succeeding.
+
+ Related compiler options::
+ GCC: -Werror
+ MSVC: /WX
+ MWCW: -w error
+ @type: bool
+ """
+ linkObjectsInLibrary = None
+ """Link objects rather than libraries.
+
+ Linking objects can provide faster program/module links, especially
+ if incremental linking is also enabled.
+
+ Note that libraries will still be built, but only the object files will
+ be passed to the compilers link line.
+
+ If the linker you're using doesn't support response files then linking
+ objects may quickly cause the link command line to go over the command
+ line limit, causing your link to fail with unexpected results.
+ @type: bool
+ """
+ outputMapFile = None
+ """Output a map file.
+
+ If enabled the compiler will output a map file that matches the name of
+ the executable with an appropriate extension (usually .map). The map file
+ will contain a list of symbols used in the program or module and their
+ addresses.
+
+ Related compiler options::
+ GCC: -Map=<target>.map
+ MSVC: /MAP:<target>.map
+ MWCW: -map <target>.map
+ @type: bool
+ """
+ useResponseFile = None
+ """Use a response file.
+
+ If enabled a response file will be generated containing the compiler
+ command line options, and this file will be passed to the compiler
+ rather than the options themselves.
+
+ This enables you to compile large projects on systems that have
+ restrictive command line length limits.
+
+ Note that not all compiler versions will support response files, so
+ turning it on may prevent compilation from succeeding.
+ @type: bool
+ """
+ useIncrementalLinking = None
+ """Use incremental linking.
+
+ Incremental linking may speed up linking, but will also increase the size
+ of the program or module.
+
+ If the value is None the compiler default is used.
+
+ Related compiler options::
+ MSVC: /INCREMENTAL
+ @type: bool
+ """
+ useFunctionLevelLinking = None
+ """Use function-level linking.
+
+ When function-level linking is enabled the linker will strip out any unused
+ functions. For some compilers this option will also strip out any unused
+ data.
+
+ If the value is None the compiler default is used.
+
+ Related compiler options::
+ GCC: -ffunction-sections, -fdata-sections, --gc-sections
+ MSVC: /Gy, /OPT:REF, /OPT:ICF
+ @type: bool
+ """
+ stackSize = None
+ """Set the stack size of a program or module.
+
+ If the value is None the compiler will use it's default stack sizes.
+ If the value is a single int then the value is the stack reserve size.
+ If the value is a tuple(int, int) then the first value is the reserve
+ size and the second value is the commit size.
+
+ Note that some compilers may require you to set the stack size in the linker
+ script instead (see L{linkerScript}).
+
+ Related compiler options::
+ MSVC: /STACK
+ @type: None or int or tuple(int, int)
+ """
+ heapSize = None
+ """Set the heap size of a program or module.
+
+ If the value is None the compiler will use it's default heap sizes.
+ If the value is a single int then the value is the heap reserve size.
+ If the value is a tuple(int, int) then the first value is the reserve
+ size and the second value is the commit size.
+
+ Related compiler options::
+ MSVC: /HEAP
+ @type: None or int or tuple(int, int)
+ """
+ linkerScript = None
+ """Set the linker script for a program or module.
+
+ This should be set to the path of linker script file.
+
+ Related compiler options::
+ MWCW: -lcf <linkerScript>
+ @type: string or None
+ """
+ objectCachePath = None
+ """Set the path to the object cache.
+
+ Setting this to a path will enable caching of object files for
+ compilers that support it. If an object file with the same checksum of
+ dependencies exists in the cache then it will be copied from the cache
+ rather than being compiled.
+
+ You can share an object cache with others by putting the object cache
+ on a network share. You will also have to make sure all of your project
+ paths match. This could be done by using a virtual drive. An alternative
+ is to set a workspace root, but this can be problematic for debugging
+ (see L{objectCacheWorkspaceRoot}).
+
+ If the value is None then object caching will be turned off.
+ @type: string or None
+ """
+ objectCacheWorkspaceRoot = None
+ """Set the object cache workspace root.
+
+ Set this if the object cache is to be shared across workspaces.
+ This will cause objects and their dependencies under this directory
+ to be stored as paths relative to this directory. This allows
+ workspaces at different paths to reuse object files with the
+ potential danger of debug information embedded in the object
+ files referring to paths in the wrong workspace.
+ @type: string or None
+ """
+ language = None
+ """Set the compilation language.
+
+ If the value is set then the compiler will compile all source files
+ using the specified language. Example languages are 'c', 'c++'.
+ If the value is None then the language is determined automatically
+ based on the extension of each source file.
+
+ Related compiler options::
+ GCC: -x <language>
+ MSVC: /Tc or /Tp
+ MWCW: -lang <language>
+ @type: string or None
+ """
+ pdbFile = None
+ """Set the path to the program database file.
+
+ If set to a string path the program database file will be generated
+ at the given path.
+
+ If set to None a program database may still be generated with the
+ name of the executable and the extension .pdb.
+
+ Related compiler options::
+ MSVC: /PDB
+ @type: string or None
+ """
+ strippedPdbFile = None
+ """Set the path to the stripped program database file.
+
+ If set to a string path a stripped version of the PDB file will be
+ generated at the given path. The stripped version will only include
+ public symbols. It will not contain type information or line number
+ information.
+
+ If set to None a stripped PDB file will not be generated.
+
+ Related compiler options::
+ MSVC: /PDBSTRIPPED
+ @type: string or None
+ """
+ subSystem = None
+ """Set the sub-system.
+
+ Set the sub-system for a Windows executable build. Possible values
+ are CONSOLE, NATIVE, POSIX, WINDOWS, WINDOWSCE. The optional values
+ [,major[.minor]] can be appended that specify the minimum required
+ version of the sub-system.
+
+ If set to None and WinMain or wWinMain is defined, WINDOWS will
+ be the default.
+ If set to None and main or wmain is defined, CONSOLE will be the
+ default.
+
+ Related compiler options::
+ MSVC: /SUBSYSTEM
+ @type: string or None
+ """
+ embedManifest = None
+ """Embed the manifest in the executable.
+
+ If True the manifest file is embedded within the executable, otherwise
+ no manifest file is generated.
+
+ Related compiler options::
+ MSVC: /MANIFESTFILE
+ @type: bool
+ """
+ useSse = None
+ """Use Streaming SIMD Extensions.
+
+ If SSE if turned on the compiler may choose to optimise scalar floating
+ point math by using SSE instructions and registers that can perform
+ multiple operations in parallel.
+
+ Note that if this value is turned on it is up to you to make sure the
+ architecture you are compiling for supports SSE instructions.
+
+ Related compiler options::
+ GCC: -msse
+ @type: bool
+ """
+ cSuffixes = frozenset(['.c'])
+ """A collection of valid c file suffixes.
+
+ @type: set of string
+ """
+ cppSuffixes = frozenset(['.C', '.cc', '.cp', '.cpp', '.CPP', '.cxx', '.c++'])
+ """A collection of valid c++ file suffixes.
+
+ @type: set of string
+ """
+ mSuffixes = frozenset(['.m'])
+ """A collection of valid objective c file suffixes.
+
+ @type: set of string
+ """
+ mmSuffixes = frozenset(['.M', '.mm'])
+ """A collection of valid objective c++ file suffixes.
+
+ @type: set of string
+ """
+ sSuffixes = frozenset(['.s'])
+ """A collection of valid assembler file suffixes.
+
+ @type: set of string
+ """
+ objectSuffix = '.o'
+ """The suffix to use for object files.
+
+ @type: string
+ """
+ libraryPrefixSuffixes = [('lib', '.a')]
+ """A collection of valid library file prefixes and suffixes.
+
+ The first prefix and suffix in the collection will be used as the
+ default prefix/suffix.
+
+ @type: list of tuple(string, string)
+ """
+ modulePrefixSuffixes = [('lib', '.so')]
+ """The suffix to use for module files.
+
+ @type: string
+ """
+ programSuffix = ''
+ """The suffix to use for program files.
+
+ @type: string
+ """
+ pchSuffix = '.gch'
+ """The suffix to use for precompiled header files.
+
+ @type: string
+ """
+ pchObjectSuffix = None
+ """The suffix to use for precompiled header object files.
+
+ @type: string or None
+ """
+ manifestSuffix = None
+ """The suffix to use for manifest files.
+
+ @type: string or None
+ """
+ resourceSuffix = '.o'
+ """The suffix to use for resource files.
+
+ @type: string or None
+ """
+
+ # The name of this compiler
+ _name = 'unknown'
+
+ # Map of engine to map of library path to list of object paths
+ __libraryObjects = weakref.WeakKeyDictionary()
+
+ def __init__(
+ self,
+ configuration,
+ binPaths=None,
+ includePaths=None,
+ libraryPaths=None,
+ ):
+ super(Compiler, self).__init__(configuration)
+ self.cFlags = []
+ self.cppFlags = []
+ self.mFlags = []
+ self.mmFlags = []
+ self.libraryFlags = []
+ self.moduleFlags = []
+ self.programFlags = []
+ self.resourceFlags = []
+ if includePaths is None:
+ self.includePaths = []
+ else:
+ self.includePaths = includePaths
+ self.defines = []
+ self.forcedIncludes = []
+ if libraryPaths is None:
+ self.libraryPaths = []
+ else:
+ self.libraryPaths = libraryPaths
+ self.libraries = []
+ self.modules = []
+ self.objectPrerequisites = []
+ self.__binPaths = binPaths
+
+ @property
+ def name(self):
+ """Get the name of the compiler, eg. 'gcc' or 'msvc'.
+ """
+ return self._name
+
+ @property
+ def libraryPrefix(self):
+ """The prefix to use for library files.
+ """
+ return self.libraryPrefixSuffixes[0][0]
+
+ @property
+ def librarySuffix(self):
+ """The suffix to use for library files.
+ """
+ return self.libraryPrefixSuffixes[0][1]
+
+ def addCFlag(self, flag):
+ """Add a flag to be used during .c compilation.
+
+ @param flag: The flag to add.
+ @type flag: string
+ """
+ self.cFlags.append(flag)
+ self._clearCache()
+
+ def addCppFlag(self, flag):
+ """Add a flag to be used during .cpp compilation.
+
+ @param flag: The flag to add.
+ @type flag: string
+ """
+ self.cppFlags.append(flag)
+ self._clearCache()
+
+ def addMFlag(self, flag):
+ """Add a flag to be used during Objective C compilation.
+
+ @param flag: The flag to add.
+ @type flag: string
+ """
+ self.mFlags.append(flag)
+ self._clearCache()
+
+ def addMmFlag(self, flag):
+ """Add a flag to be used during Objective C++ compilation.
+
+ @param flag: The flag to add.
+ @type flag: string
+ """
+ self.mmFlags.append(flag)
+ self._clearCache()
+
+ def addLibraryFlag(self, flag):
+ """Add a flag to be used during library compilation.
+
+ @param flag: The flag to add.
+ @type flag: string
+ """
+ self.libraryFlags.append(flag)
+ self._clearCache()
+
+ def addModuleFlag(self, flag):
+ """Add a flag to be used during linking of modules.
+
+ @param flag: The flag to add.
+ @type flag: string
+ """
+ self.moduleFlags.append(flag)
+ self._clearCache()
+
+ def addProgramFlag(self, flag):
+ """Add a flag to be used during linking of programs.
+
+ @param flag: The flag to add.
+ @type flag: string
+ """
+ self.programFlags.append(flag)
+ self._clearCache()
+
+ def addResourceFlag(self, flag):
+ """Add a flag to be used during resource compilation.
+
+ @param flag: The flag to add.
+ @type flag: string
+ """
+ self.resourceFlags.append(flag)
+ self._clearCache()
+
+ def addIncludePath(self, path):
+ """Add an include path to the preprocessor search path.
+
+ The newly added path will have search precedence over any
+ existing paths.
+
+ @param path: The path to add.
+ @type path: string
+ """
+ self.includePaths.append(self.configuration.basePath(path))
+ self._clearCache()
+
+ def insertIncludePath(self, index, path):
+ """Insert an include path into the preprocessor search paths.
+
+ Include paths inserted at the back will have precedence over
+ those inserted at the front.
+
+ @param index: The index to insert at.
+ @type index: int
+ @param path: The path to add.
+ @type path: string
+ """
+ self.includePaths.insert(index, self.configuration.basePath(path))
+ self._clearCache()
+
+ def getIncludePaths(self):
+ """Get an iterator for include paths.
+
+ The iterator will return include paths in the order they
+ should be searched.
+ """
+ return self.includePaths
+
+ def addDefine(self, name, value=None):
+ """Add a define to the preprocessor command-line.
+
+ The newly added define will have precedence over any
+ existing defines with the same name.
+
+ @param name: The name of the define to set.
+ @type name: string
+ @param value: An optional value for the define.
+ @type value: string or None
+ """
+ if value is None:
+ self.defines.append(name)
+ else:
+ self.defines.append("%s=%s" % (name, value))
+ self._clearCache()
+
+ def insertDefine(self, index, name, value=None):
+ """Insert a define into the preprocessor command-line.
+
+ Defines inserted at the back will have precedence over those
+ inserted at the front.
+
+ @param index: The index to insert at.
+ @type index: int
+ @param name: The name of the define to set.
+ @type name: string
+ @param value: An optional value for the define.
+ @type value: string or None
+ """
+ if value is None:
+ self.defines.insert(index, name)
+ else:
+ self.defines.insert(index, "%s=%s" % (name, value))
+ self._clearCache()
+
+ def getDefines(self):
+ """Get an iterator for preprocessor defines.
+
+ The iterator will return defines in the order they should be
+ set. Defines set later should have precedence over those set
+ first.
+ """
+ return self.defines
+
+ def addForcedInclude(self, path):
+ """Add a file to be forcibly included on the command-line.
+
+ The newly added forced include will be included after any
+ previous forced includes.
+
+ @param path: The path to the forced include file. This may need
+ to be relative to a previously defined includePath.
+ @type path: string
+ """
+ self.forcedIncludes.append(self.configuration.basePath(path))
+ self._clearCache()
+
+ def insertForcedInclude(self, index, path):
+ """Insert a forcibly included file into the command-line.
+
+ Forced includes will be included in order.
+
+ @param index: The index to insert at.
+ @type index: int
+ @param path: The path to the forced include file. This may need
+ to be relative to a previously defined includePath.
+ @type path: string
+ """
+ self.forcedIncludes.insert(index, self.configuration.basePath(path))
+ self._clearCache()
+
+ def getForcedIncludes(self):
+ """Get an iterator for forced includes.
+
+ The iterator will return forced includes in the order they
+ should be included.
+ """
+ return self.forcedIncludes
+
+ def addObjectPrerequisites(self, prerequisites):
+ """Add a prerequisite that must complete before building object files.
+
+ Use this for defining prerequisites such as generated headers that are
+ required to be built before attempting to compile C/C++ source files.
+
+ Cake is not able to determine such dependencies on generated headers
+ automatically and so adding a prerequisite is required to ensure
+ correct compilation order.
+
+ @param prerequisites: A Task/FileTarget/AsyncResult or sequence of these.
+ The object file will not be built before all of the tasks associated with
+ these have completed successfully.
+ """
+ self.objectPrerequisites.append(prerequisites)
+
+ def addLibrary(self, name):
+ """Add a library to the list of libraries to link with.
+
+ The newly added library will have search precedence over any
+ existing libraries.
+
+ @param name: Name/path of the library to link with.
+ @type name: string
+ """
+ self.libraries.append(name)
+ self._clearCache()
+
+ def insertLibrary(self, index, name):
+ """Insert a library into the list of libraries to link with.
+
+ Libraries inserted at the back will have precedence over those
+ inserted at the front.
+
+ @param index: The index to insert at.
+ @type index: int
+ @param name: Name/path of the library to link with.
+ @type name: string
+ """
+ self.libraries.insert(index, name)
+ self._clearCache()
+
+ def getLibraries(self):
+ """Get an iterator for libraries.
+
+ The iterator will return libraries in the order they
+ should be searched.
+ """
+ return self.libraries
+
+ def addLibraryPath(self, path):
+ """Add a path to the list of library search paths.
+
+ The newly added path will have search precedence over any
+ existing paths.
+
+ @param path: The path to add.
+ @type path: string
+ """
+ self.libraryPaths.append(self.configuration.basePath(path))
+ self._clearCache()
+
+ def insertLibraryPath(self, index, path):
+ """Insert a path into the list of library search paths.
+
+ Library paths inserted at the back will have precedence over
+ those inserted at the front.
+
+ @param index: The index to insert at.
+ @type index: int
+ @param path: The path to add.
+ @type path: string
+ """
+ self.libraryPaths.insert(index, self.configuration.basePath(path))
+ self._clearCache()
+
+ def getLibraryPaths(self):
+ """Get an iterator for library paths.
+
+ The iterator will return library paths in the order they
+ should be searched.
+ """
+ return self.libraryPaths
+
+ def addModule(self, path):
+ """Add a module to the list of modules to copy.
+
+ @param path: Path of the module to copy.
+ @type path: string
+ """
+ self.modules.append(self.configuration.basePath(path))
+ self._clearCache()
+
+ def copyModulesTo(self, targetDir, **kwargs):
+ """Copy modules to the given target directory.
+
+ The modules copied are those previously specified by the
+ addModule() function.
+
+ @param targetDir: The directory to copy modules to.
+ @type targetDir: string
+
+ @return: A list of FileTarget objects, one for each module being
+ copied.
+ @rtype: list of L{FileTarget}
+ """
+ compiler = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(compiler, k, v)
+
+ return compiler._copyModulesTo(self.configuration.basePath(targetDir))
+
+ def _copyModulesTo(self, targetDir):
+
+ def doCopy(source, target):
+
+ abspath = self.configuration.abspath
+ engine = self.engine
+
+ targetAbsPath = abspath(target)
+ sourceAbsPath = abspath(source)
+
+ if engine.forceBuild:
+ reasonToBuild = "rebuild has been forced"
+ elif not cake.filesys.isFile(targetAbsPath):
+ reasonToBuild = "it doesn't exist"
+ elif engine.getTimestamp(sourceAbsPath) > engine.getTimestamp(targetAbsPath):
+ reasonToBuild = "'%s' has been changed" % source
+ else:
+ # up-to-date
+ return
+
+ engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '%s' because %s.\n" % (target, reasonToBuild),
+ )
+ engine.logger.outputInfo("Copying %s to %s\n" % (source, target))
+
+ try:
+ cake.filesys.makeDirs(cake.path.dirName(targetAbsPath))
+ cake.filesys.copyFile(sourceAbsPath, targetAbsPath)
+ except EnvironmentError, e:
+ engine.raiseError("%s: %s\n" % (target, str(e)))
+
+ engine.notifyFileChanged(targetAbsPath)
+
+ @waitForAsyncResult
+ def run(sources, targetDir):
+ results = []
+
+ for source in sources:
+ sourcePath = getPath(source)
+ targetPath = os.path.join(targetDir, os.path.basename(sourcePath))
+
+ if self.enabled:
+ sourceTask = getTask(source)
+ copyTask = self.engine.createTask(lambda s=sourcePath, t=targetPath: doCopy(s, t))
+ copyTask.startAfter(sourceTask)
+ else:
+ copyTask = None
+
+ results.append(FileTarget(path=targetPath, task=copyTask))
+
+ return results
+
+ # TODO: Handle copying .manifest files if present for MSVC
+ # built DLLs.
+
+ return run(self.modules, targetDir)
+
+ def pch(self, target, source, header, prerequisites=[],
+ forceExtension=True, **kwargs):
+ """Compile an individual header to a pch file.
+
+ @param target: Path to the target pch file.
+ @type target: string
+
+ @param header: Path to the header as it would be included
+ by other source files.
+ @type header: string.
+
+ @param prerequisites: An optional list of extra prerequisites that should
+ complete building before building this pch.
+ @type prerequisites: list of Task or FileTarget
+
+ @param forceExtension: If true then the target path will have
+ the default pch file extension appended if it doesn't already
+ have it.
+ @type forceExtension: bool
+
+ @return: A PchTarget containing the path of the pch file
+ that will be built and the task that will build it.
+ @rtype: L{PchTarget}
+ """
+
+ # Take a snapshot of the build settings at this point and use that.
+ compiler = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(compiler, k, v)
+
+ basePath = self.configuration.basePath
+
+ return compiler._pch(basePath(target), basePath(source), header, prerequisites, forceExtension)
+
+ def pchMessage(self, target, source, header, cached=False):
+ """Returns the message to display when compiling a precompiled header file.
+
+ Override this function to display a different message when compiling
+ a precompiled header file.
+
+ @param target: Path of the target object file.
+ @type target: string
+
+ @param source: Path of the source file.
+ @type source: string
+
+ @param header: Path to the header as it would be included
+ by other source files.
+ @type header: string.
+
+ @param cached: True if the target will be copied from the cache instead
+ of being compiled.
+ @type cached: bool
+
+ @return: The message to display.
+ @rtype: string
+ """
+ if cached:
+ return "Cached %s\n" % source
+ else:
+ return "Compiling %s\n" % source
+
+ def _pch(self, target, source, header, prerequisites=[],
+ forceExtension=True):
+
+ @waitForAsyncResult
+ def run(target, source, header, prerequisites):
+ if forceExtension:
+ target = cake.path.forceExtension(target, self.pchSuffix)
+
+ if self.pchObjectSuffix is None:
+ object = None
+ else:
+ object = cake.path.stripExtension(target) + self.pchObjectSuffix
+
+ if self.enabled:
+ tasks = getTasks([source])
+ tasks.extend(getTasks(prerequisites))
+ pchTask = self.engine.createTask(
+ lambda t=target, s=source, h=header, o=object, c=self:
+ c.buildPch(t, getPath(s), h, o)
+ )
+ pchTask.startAfter(tasks, threadPool=self.engine.scriptThreadPool)
+ else:
+ pchTask = None
+
+ return PchTarget(
+ path=target,
+ task=pchTask,
+ compiler=self,
+ header=header,
+ object=object,
+ )
+
+ allPrerequisites = flatten([
+ prerequisites,
+ self.objectPrerequisites,
+ self._getObjectPrerequisiteTasks(),
+ ])
+
+ return run(target, source, header, allPrerequisites)
+
+ def object(self, target, source, pch=None, prerequisites=[],
+ forceExtension=True, **kwargs):
+ """Compile an individual source to an object file.
+
+ @param target: Path of the target object file.
+ @type target: string
+
+ @param source: Path of the source file.
+ @type source: string or FileTarget.
+
+ @param pch: A precompiled header file to use. This file can be built
+ with the pch() function.
+ @type pch: L{PchTarget}
+
+ @param prerequisites: An optional list of extra prerequisites that should
+ complete building before building this object.
+ @type prerequisites: list of Task or FileTarget
+
+ @param forceExtension: If true then the target path will have
+ the default object file extension appended if it doesn't already
+ have it.
+ @type forceExtension: bool
+
+ @return: A FileTarget containing the path of the object file
+ that will be built and the task that will build it.
+ @rtype: L{ObjectTarget}
+ """
+
+ # Take a snapshot of the build settings at this point and use that.
+ compiler = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(compiler, k, v)
+
+ basePath = self.configuration.basePath
+
+ return compiler._object(basePath(target), basePath(source), pch, prerequisites, forceExtension)
+
+ def objectMessage(self, target, source, pch=None, shared=False, cached=False):
+ """Returns the message to display when compiling an object file.
+
+ Override this function to display a different message when compiling
+ an object file.
+
+ @param target: Path of the target object file.
+ @type target: string
+
+ @param source: Path of the source file.
+ @type source: string
+
+ @param pch: Path of a precompiled header file to use or None.
+ @type pch: string or None
+
+ @param shared: True if this object file is being built for use in a
+ shared library/module.
+ @type shared: bool
+
+ @param cached: True if the target will be copied from the cache instead
+ of being compiled.
+ @type cached: bool
+
+ @return: The message to display.
+ @rtype: string
+ """
+ if cached:
+ return "Cached %s\n" % source
+ else:
+ return "Compiling %s\n" % source
+
+ def _object(self, target, source, pch=None, prerequisites=[],
+ forceExtension=True, shared=False):
+
+ @waitForAsyncResult
+ def run(target, source, pch, prerequisites):
+ if forceExtension:
+ target = cake.path.forceExtension(target, self.objectSuffix)
+
+ if self.enabled:
+ tasks = getTasks([source])
+ tasks.extend(getTasks([pch]))
+ tasks.extend(getTasks(prerequisites))
+ objectTask = self.engine.createTask(
+ lambda t=target, s=source, p=pch, h=shared, c=self:
+ c.buildObject(t, getPath(s), getResult(p), h)
+ )
+ objectTask.startAfter(tasks, threadPool=self.engine.scriptThreadPool)
+ else:
+ objectTask = None
+
+ return ObjectTarget(
+ path=target,
+ task=objectTask,
+ compiler=self,
+ )
+
+ allPrerequisites = flatten([
+ prerequisites,
+ self.objectPrerequisites,
+ self._getObjectPrerequisiteTasks(),
+ ])
+
+ return run(target, source, pch, allPrerequisites)
+
+ @memoise
+ def _getObjectPrerequisiteTasks(self):
+ """Return a list of the tasks that are prerequisites for
+ building an object file.
+ """
+ return getTasks(self.forcedIncludes)
+
+ def objects(self, targetDir, sources, pch=None, prerequisites=[], **kwargs):
+ """Build a collection of objects to a target directory.
+
+ @param targetDir: Path to the target directory where the built objects
+ will be placed.
+ @type targetDir: string
+
+ @param sources: A list of source files to compile to object files.
+ @type sources: sequence of string or FileTarget objects
+
+ @param pch: A precompiled header file to use. This file can be built
+ with the pch() function.
+ @type pch: L{PchTarget}
+
+ @param prerequisites: An optional list of extra prerequisites that should
+ complete building before building these objects.
+ @type prerequisites: list of Task or FileTarget
+
+ @return: A list of FileTarget objects, one for each object being
+ built.
+ """
+ compiler = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(compiler, k, v)
+
+ @waitForAsyncResult
+ def run(targetDir, sources, prerequisites):
+ results = []
+ for source in sources:
+ sourcePath = getPath(source)
+ sourceName = cake.path.baseNameWithoutExtension(sourcePath)
+ targetPath = cake.path.join(targetDir, sourceName)
+ results.append(compiler._object(targetPath, source,
+ pch=pch, prerequisites=prerequisites))
+ return results
+
+ basePath = self.configuration.basePath
+
+ return run(basePath(targetDir), basePath(flatten(sources)), prerequisites)
+
+ def sharedObjects(self, targetDir, sources, pch=None, prerequisites=[],
+ **kwargs):
+ """Build a collection of objects used by a shared library/module to a target directory.
+
+ @param targetDir: Path to the target directory where the built objects
+ will be placed.
+ @type targetDir: string
+
+ @param sources: A list of source files to compile to object files.
+ @type sources: sequence of string or FileTarget objects
+
+ @param pch: A precompiled header file to use. This file can be built
+ with the pch() function.
+ @type pch: L{PchTarget}
+
+ @param prerequisites: An optional list of extra prerequisites that should
+ complete building before building these objects.
+ @type prerequisites: list of Task or FileTarget
+
+ @return: A list of FileTarget objects, one for each object being
+ built.
+ """
+ compiler = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(compiler, k, v)
+
+ @waitForAsyncResult
+ def run(targetDir, sources, prerequisites):
+ results = []
+ for source in sources:
+ sourcePath = getPath(source)
+ sourceName = cake.path.baseNameWithoutExtension(sourcePath)
+ targetPath = cake.path.join(targetDir, sourceName)
+ results.append(compiler._object(
+ targetPath,
+ source,
+ pch=pch,
+ prerequisites=prerequisites,
+ shared=True
+ ))
+ return results
+
+ basePath = self.configuration.basePath
+
+ return run(basePath(targetDir), basePath(sources), prerequisites)
+
+ def library(self, target, sources, prerequisites=[], forceExtension=True, **kwargs):
+ """Build a library from a collection of objects.
+
+ @param target: Path of the library file to build.
+ @type target: string
+
+ @param sources: A list of object files to archive.
+ @type sources: list of string or FileTarget
+
+ @param prerequisites: An optional list of extra prerequisites that should
+ complete building before building these objects.
+ @type prerequisites: list of Task or FileTarget
+
+ @param forceExtension: If True then the target path will have
+ the default library extension appended to it if it not already
+ present.
+
+ @return: A FileTarget object representing the library that will
+ be built and the task that will build it.
+ @rtype: L{LibraryTarget}
+ """
+
+ # Take a snapshot of the current compiler settings
+ compiler = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(compiler, k, v)
+
+ basePath = self.configuration.basePath
+
+ return compiler._library(
+ basePath(target),
+ basePath(sources),
+ prerequisites,
+ forceExtension
+ )
+
+ def libraryMessage(self, target, sources, cached=False):
+ """Returns the message to display when compiling a library file.
+
+ Override this function to display a different message when compiling
+ a library file.
+
+ @param target: Path of the target library file.
+ @type target: string
+
+ @param sources: Paths to the source files.
+ @type sources: list(string)
+
+ @param cached: True if the target has been copied from the cache instead
+ of being compiled.
+ @type cached: bool
+
+ @return: The message to display.
+ @rtype: string
+ """
+ if cached:
+ return "Cached %s\n" % target
+ else:
+ return "Archiving %s\n" % target
+
+ def _library(self, target, sources, prerequisites=[], forceExtension=True):
+
+ @waitForAsyncResult
+ def run(target, sources, prerequisites):
+ if forceExtension:
+ prefix, suffix = self.libraryPrefix, self.librarySuffix
+ target = cake.path.forcePrefixSuffix(target, prefix, suffix)
+
+ if self.enabled:
+ def build():
+ paths = getLinkPaths(sources)
+ self._setObjectsInLibrary(target, paths)
+ self.buildLibrary(target, paths)
+
+ tasks = getTasks(sources)
+ tasks.extend(getTasks(prerequisites))
+ libraryTask = self.engine.createTask(build)
+ libraryTask.startAfter(tasks, threadPool=self.engine.scriptThreadPool)
+ else:
+ libraryTask = None
+
+ return LibraryTarget(
+ path=target,
+ task=libraryTask,
+ compiler=self,
+ )
+
+ return run(target, flatten(sources), flatten(prerequisites))
+
+ def module(self, target, sources, importLibrary=None, installName=None, prerequisites=[], forceExtension=True, **kwargs):
+ """Build a module/dynamic-library.
+
+ Modules are executable code that can be dynamically loaded at
+ runtime. On some platforms they are referred to as shared-libraries
+ or dynamically-linked-libraries (DLLs).
+
+ @param target: Path of the module file to build.
+ @type target: string
+
+ @param sources: A list of source objects/libraries to be linked
+ into the module.
+ @type sources: sequence of string/FileTarget
+
+ @param importLibrary: Optional path to an import library that should be
+ built. Programs can link against the import library to use the modules
+ functions.
+
+ Related compiler options::
+ GCC: --out-implib
+ MSVC: /IMPLIB
+ @type importLibrary: string or None
+
+ @param installName: Optional dyld install_name for a shared library.
+ @type installName: string or None
+
+ @param prerequisites: An optional list of extra prerequisites that should
+ complete building before building these objects.
+ @type prerequisites: list of Task or FileTarget
+
+ @param forceExtension: If True then the target path will have
+ the default module extension appended to it if it not already
+ present.
+
+ @return: A FileTarget object representing the module that will
+ be built and the task that will build it.
+ @rtype: L{ModuleTarget}
+ """
+
+ # Take a snapshot of the current compiler settings
+ compiler = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(compiler, k, v)
+
+ basePath = self.configuration.basePath
+
+ return compiler._module(
+ basePath(target),
+ basePath(sources),
+ basePath(importLibrary),
+ installName,
+ prerequisites,
+ forceExtension,
+ )
+
+ def moduleMessage(self, target, sources, cached=False):
+ """Returns the message to display when compiling a module file.
+
+ Override this function to display a different message when compiling
+ a module file.
+
+ @param target: Path of the target module file.
+ @type target: string
+
+ @param sources: Paths to the source files.
+ @type sources: list(string)
+
+ @param cached: True if the target has been copied from the cache instead
+ of being compiled.
+ @type cached: bool
+
+ @return: The message to display.
+ @rtype: string
+ """
+ if cached:
+ return "Cached %s\n" % target
+ else:
+ return "Linking %s\n" % target
+
+ def _module(self, target, sources, importLibrary=None, installName=None, prerequisites=[], forceExtension=True):
+
+ @waitForAsyncResult
+ def run(target, sources, importLibrary, installName, prerequisites):
+ if forceExtension:
+ prefix, suffix = self.modulePrefixSuffixes[0]
+ target = cake.path.forcePrefixSuffix(target, prefix, suffix)
+ if importLibrary:
+ prefix, suffix = self.libraryPrefix, self.librarySuffix
+ importLibrary = cake.path.forcePrefixSuffix(
+ importLibrary,
+ prefix,
+ suffix,
+ )
+ if installName:
+ prefix, suffix = self.modulePrefixSuffixes[0]
+ installName = cake.path.forcePrefixSuffix(
+ installName,
+ prefix,
+ suffix,
+ )
+
+ if self.manifestSuffix is None:
+ manifest = None
+ else:
+ manifest = target + self.manifestSuffix
+
+ if self.enabled:
+ def build():
+ paths = getLinkPaths(sources)
+ self.buildModule(target, paths, importLibrary, installName)
+
+ tasks = getTasks(sources)
+ tasks.extend(getTasks(prerequisites))
+ tasks.extend(getTasks(self.getLibraries()))
+ moduleTask = self.engine.createTask(build)
+ moduleTask.startAfter(tasks, threadPool=self.engine.scriptThreadPool)
+ else:
+ moduleTask = None
+
+ return ModuleTarget(
+ path=target,
+ task=moduleTask,
+ compiler=self,
+ library=importLibrary,
+ manifest=manifest,
+ )
+
+ return run(target, flatten(sources), importLibrary, installName, flatten(prerequisites))
+
+ def program(self, target, sources, prerequisites=[], forceExtension=True, **kwargs):
+ """Build an executable program.
+
+ @param target: Path to the target executable.
+ @type target: string
+
+ @param sources: A list of source objects/libraries to be linked
+ into the executable.
+ @type sources: sequence of string/FileTarget
+
+ @param prerequisites: An optional list of extra prerequisites that should
+ complete building before building these objects.
+ @type prerequisites: list of Task or FileTarget
+
+ @param forceExtension: If True then target path will have the
+ default executable extension appended if it doesn't already have
+ it.
+
+ @return: A FileTarget object representing the executable that will
+ be built and the task that will build it.
+ @rtype: L{ProgramTarget}
+
+ """
+
+ # Take a snapshot of the current compiler settings
+ compiler = self.clone()
+ for name, value in kwargs.iteritems():
+ setattr(compiler, name, value)
+
+ basePath = self.configuration.basePath
+
+ return compiler._program(basePath(target), basePath(sources), prerequisites, forceExtension)
+
+ def programMessage(self, target, sources, cached=False):
+ """Returns the message to display when compiling a program file.
+
+ Override this function to display a different message when compiling
+ a program file.
+
+ @param target: Path of the target program file.
+ @type target: string
+
+ @param sources: Paths to the source files.
+ @type sources: list(string)
+
+ @param cached: True if the target has been copied from the cache instead
+ of being compiled.
+ @type cached: bool
+
+ @return: The message to display.
+ @rtype: string
+ """
+ if cached:
+ return "Cached %s\n" % target
+ else:
+ return "Linking %s\n" % target
+
+ def _program(self, target, sources, prerequisites=[], forceExtension=True, **kwargs):
+
+ @waitForAsyncResult
+ def run(target, sources, prerequisites):
+
+ if forceExtension:
+ target = cake.path.forceExtension(target, self.programSuffix)
+
+ if self.manifestSuffix is None:
+ manifest = None
+ else:
+ manifest = target + self.manifestSuffix
+
+ if self.enabled:
+ def build():
+ paths = getLinkPaths(sources)
+ self.buildProgram(target, paths)
+
+ tasks = getTasks(sources)
+ tasks.extend(getTasks(prerequisites))
+ tasks.extend(getTasks(self.getLibraries()))
+ programTask = self.engine.createTask(build)
+ programTask.startAfter(tasks, threadPool=self.engine.scriptThreadPool)
+ else:
+ programTask = None
+
+ return ProgramTarget(
+ path=target,
+ task=programTask,
+ compiler=self,
+ manifest=manifest,
+ )
+
+ return run(target, flatten(sources), flatten(prerequisites))
+
+ def resource(self, target, source, prerequisites=[], forceExtension=True, **kwargs):
+ """Build a resource from a collection of sources.
+
+ @param target: Path of the resource file to build.
+ @type target: string
+
+ @param source: Path of the source file to compile.
+ @type source: string
+
+ @param prerequisites: An optional list of extra prerequisites that should
+ complete building before building these objects.
+ @type prerequisites: list of Task or FileTarget
+
+ @param forceExtension: If True then the target path will have
+ the default resource extension appended to it if it not already
+ present.
+
+ @return: A FileTarget object representing the resource that will
+ be built and the task that will build it.
+ @rtype: L{ResourceTarget}
+ """
+
+ # Take a snapshot of the current compiler settings
+ compiler = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(compiler, k, v)
+
+ basePath = self.configuration.basePath
+
+ return compiler._resource(basePath(target), basePath(source), prerequisites, forceExtension)
+
+ def resourceMessage(self, target, source, cached=False):
+ """Returns the message to display when compiling a resource file.
+
+ Override this function to display a different message when compiling
+ a resource file.
+
+ @param target: Path of the target resource file.
+ @type target: string
+
+ @param source: Path of the source file.
+ @type source: string
+
+ @param cached: True if the target has been copied from the cache instead
+ of being compiled.
+ @type cached: bool
+
+ @return: The message to display.
+ @rtype: string
+ """
+ if cached:
+ return "Cached %s\n" % source
+ else:
+ return "Compiling %s\n" % source
+
+ def _resource(self, target, source, prerequisites=[], forceExtension=True):
+
+ @waitForAsyncResult
+ def run(target, source, prerequisites):
+ if forceExtension:
+ target = cake.path.forceExtension(target, self.resourceSuffix)
+
+ if self.enabled:
+ def build():
+ path = getPath(source)
+ self.buildResource(target, path)
+
+ tasks = getTasks([source])
+ tasks.extend(getTasks(prerequisites))
+ resourceTask = self.engine.createTask(build)
+ resourceTask.startAfter(tasks, threadPool=self.engine.scriptThreadPool)
+ else:
+ resourceTask = None
+
+ return ResourceTarget(
+ path=target,
+ task=resourceTask,
+ compiler=self,
+ )
+
+ return run(target, source, flatten(prerequisites))
+
+ def resources(self, targetDir, sources, prerequisites=[], **kwargs):
+ """Build a collection of resources to a target directory.
+
+ @param targetDir: Path to the target directory where the built resources
+ will be placed.
+ @type targetDir: string
+
+ @param sources: A list of source files to compile to resource files.
+ @type sources: sequence of string or FileTarget objects
+
+ @param prerequisites: An optional list of extra prerequisites that should
+ complete building before building these objects.
+ @type prerequisites: list of Task or FileTarget
+
+ @return: A list of FileTarget objects, one for each resource being
+ built.
+ """
+ compiler = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(compiler, k, v)
+
+ @waitForAsyncResult
+ def run(targetDir, sources, prerequisites):
+ results = []
+ for source in sources:
+ sourcePath = getPath(source)
+ sourceName = cake.path.baseNameWithoutExtension(sourcePath)
+ targetPath = cake.path.join(targetDir, sourceName)
+ results.append(compiler._resource(
+ targetPath,
+ source,
+ prerequisites,
+ ))
+ return results
+
+ basePath = self.configuration.basePath
+
+ return run(basePath(targetDir), basePath(sources), prerequisites)
+
+ ###########################
+ # Internal methods not part of public API
+
+ def _generateDependencyFile(self, target):
+ if self.keepDependencyFile:
+ depPath = cake.path.stripExtension(target) + '.d'
+ depPath = self.configuration.abspath(depPath)
+ else:
+ fd, depPath = tempfile.mkstemp(prefix='CakeGccDep')
+ os.close(fd)
+ return depPath
+
+ def _getObjectsInLibrary(self, path):
+ """Get a list of the paths of object files in the specified library.
+
+ @param path: Path of the library previously built by a call to library().
+
+ @return: A tuple of the paths of objects in the library.
+ """
+ path = os.path.normcase(os.path.normpath(path))
+ libraryObjects = self.__libraryObjects.get(self.configuration, None)
+ if libraryObjects:
+ return libraryObjects.get(path, None)
+ else:
+ return None
+
+ @memoise
+ def _getProcessEnv(self):
+ temp = os.environ.get('TMP', os.environ.get('TEMP', os.getcwd()))
+ env = {
+ 'COMSPEC' : os.environ.get('COMSPEC', ''),
+ 'PATH' : '.',
+ 'PATHEXT' : ".COM;.EXE;.BAT;.CMD",
+ 'SYSTEMROOT' : os.environ.get('SYSTEMROOT', ''),
+ 'TEMP' : temp,
+ 'TMP' : temp,
+ 'TMPDIR' : temp,
+ }
+
+ if self.__binPaths is not None:
+ env['PATH'] = os.path.pathsep.join(
+ [env['PATH']] + self.__binPaths
+ )
+
+ if env['SYSTEMROOT']:
+ env['PATH'] = os.path.pathsep.join([
+ env['PATH'],
+ os.path.join(env['SYSTEMROOT'], 'System32'),
+ env['SYSTEMROOT'],
+ ])
+
+ return env
+
+ def _outputStderr(self, text):
+ text = text.replace("\r\n", "\n")
+ self.engine.logger.outputError(text)
+
+ def _outputStdout(self, text):
+ # Output stdout to stderr as well. Some compilers will output errors
+ # to stdout, and all unexpected output should be treated as an error,
+ # or handled/output by client code.
+ # An example of a compiler outputting errors to stdout is Msvc's link
+ # error, "LINK : fatal error LNK1104: cannot open file '<filename>'".
+ text = text.replace("\r\n", "\n")
+ self.engine.logger.outputError(text)
+
+ def _resolveObjects(self):
+ """Resolve the list of library names to object file paths.
+
+ @return: A tuple containing a list of paths to resolved objects,
+ followed by a list of unresolved libraries.
+ @rtype: tuple of (list of string, list of string)
+ """
+ objects = []
+ libraries = getLibraryPaths(self.getLibraries())
+
+ if not self.linkObjectsInLibrary:
+ return objects, libraries
+
+ paths = self._scanForLibraries(libraries, True)
+ newLibraries = []
+
+ for i, path in enumerate(paths):
+ if path is not None:
+ objectsInLib = self._getObjectsInLibrary(path)
+ if objectsInLib is not None:
+ objects.extend(objectsInLib)
+ continue
+ newLibraries.append(libraries[i])
+
+ return objects, newLibraries
+
+ def _runProcess(
+ self,
+ args,
+ target=None,
+ processStdout=None,
+ processStderr=None,
+ processExitCode=None,
+ allowResponseFile=True,
+ ):
+
+ if target is not None:
+ absTarget = self.configuration.abspath(target)
+ cake.filesys.makeDirs(cake.path.dirName(absTarget))
+
+ stdout = None
+ stderr = None
+ argsPath = None
+ try:
+ stdout = tempfile.TemporaryFile(mode="w+t")
+ stderr = tempfile.TemporaryFile(mode="w+t")
+
+ if allowResponseFile and self.useResponseFile:
+ argsTemp, argsPath = tempfile.mkstemp(text=True)
+ argsFileString = " ".join(_escapeArgs(args[1:]))
+ argsFile = os.fdopen(argsTemp, "wt")
+ argsFile.write(argsFileString)
+ argsFile.close()
+ args = [args[0], '@' + argsPath]
+
+ argsString = " ".join(_escapeArgs(args))
+
+ debugString = "run: %s\n" % argsString
+ if argsPath is not None:
+ debugString += "contents of %s: %s\n" % (argsPath, argsFileString)
+ self.engine.logger.outputDebug(
+ "run",
+ debugString,
+ )
+
+ if cake.system.isWindows():
+ # Use shell=False to avoid command line length limits.
+ executable = self.configuration.abspath(args[0])
+ shell = False
+ else:
+ # Use shell=True to allow arguments to be escaped exactly as they
+ # would be on the command line.
+ executable = None
+ shell = True
+
+ try:
+ p = subprocess.Popen(
+ args=argsString,
+ executable=executable,
+ shell=shell,
+ cwd=self.configuration.baseDir,
+ env=self._getProcessEnv(),
+ stdin=subprocess.PIPE,
+ stdout=stdout,
+ stderr=stderr,
+ )
+ except EnvironmentError, e:
+ self.engine.raiseError(
+ "cake: failed to launch %s: %s\n" % (args[0], str(e))
+ )
+ p.stdin.close()
+
+ exitCode = p.wait()
+
+ stdout.seek(0)
+ stderr.seek(0)
+
+ stdoutText = stdout.read()
+ stderrText = stderr.read()
+ finally:
+ if stdout is not None:
+ stdout.close()
+ if stderr is not None:
+ stderr.close()
+ if argsPath is not None:
+ os.remove(argsPath)
+
+ if stdoutText:
+ if processStdout is not None:
+ processStdout(stdoutText)
+ else:
+ self._outputStdout(stdoutText)
+
+ if stderrText:
+ if processStderr is not None:
+ processStderr(stderrText)
+ else:
+ self._outputStderr(stderrText)
+
+ if processExitCode is not None:
+ processExitCode(exitCode)
+ elif exitCode != 0:
+ self.engine.raiseError(
+ "%s: failed with exit code %i\n" % (args[0], exitCode)
+ )
+
+ # TODO: Return DLL's/EXE's used by gcc.exe or MSVC as well.
+ return [args[0]]
+
+ def _scanDependencyFile(self, depPath, target):
+ self.engine.logger.outputDebug(
+ "scan",
+ "scan: %s\n" % depPath,
+ )
+
+ dependencies = parseDependencyFile(
+ depPath,
+ cake.path.extension(target),
+ )
+
+ if not self.keepDependencyFile:
+ # Sometimes file removal will fail, perhaps because the compiler
+ # or a file watcher has the file open. Because it's a temp file
+ # this is OK, just let the system delete it later.
+ try:
+ os.remove(depPath)
+ except Exception:
+ self.engine.logger.outputDebug(
+ "scan",
+ "Unable to remove dependency file: %s\n" % depPath,
+ )
+
+ return dependencies
+
+ def _scanForLibraries(self, libraries, flagMissing=False):
+ paths = []
+ for library in libraries:
+ fileNames = [library]
+
+ libraryExtension = os.path.normcase(cake.path.extension(library))
+ for prefix, suffix in self.libraryPrefixSuffixes:
+ if libraryExtension != os.path.normcase(suffix):
+ fileNames.append(cake.path.addPrefix(library, prefix) + suffix)
+
+ # Add [""] so we search for the full path first
+ libraryPaths = itertools.chain([""], self.getLibraryPaths())
+ for candidate in cake.path.join(libraryPaths, fileNames):
+ absCandidate = self.configuration.abspath(candidate)
+ if cake.filesys.isFile(absCandidate):
+ paths.append(candidate)
+ break
+ else:
+ if flagMissing:
+ paths.append(None)
+ else:
+ self.engine.logger.outputDebug(
+ "scan",
+ "scan: Ignoring missing library '" + library + "'\n",
+ )
+ return paths
+
+ def _setObjectsInLibrary(self, path, objectPaths):
+ """Set the list of paths of object files in the specified library.
+
+ @param path: Path of the library previously built by a call to library().
+ @type path: string
+
+ @param objectPaths: A list of the objects built by a call to library().
+ @type objectPaths: list of strings
+ """
+ path = os.path.normcase(os.path.normpath(path))
+ libraryObjects = self.__libraryObjects.setdefault(self.configuration, {})
+ libraryObjects[path] = tuple(objectPaths)
+
+ def buildPch(self, target, source, header, object):
+ compile, args, _ = self.getPchCommands(
+ target,
+ source,
+ header,
+ object,
+ )
+
+ # Check if the target needs building
+ _, reasonToBuild = self.configuration.checkDependencyInfo(target, args)
+ if not reasonToBuild:
+ return # Target is up to date
+ self.engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '" + target + "' because " + reasonToBuild + ".\n",
+ )
+
+ targets = [target]
+ if object is not None:
+ targets.append(object)
+
+ def command():
+ message = self.pchMessage(target, source, header=header, cached=False)
+ self.engine.logger.outputInfo(message)
+ return compile()
+
+ compileTask = self.engine.createTask(command)
+ compileTask.start(immediate=True)
+
+ def storeDependencyInfo():
+ abspath = self.configuration.abspath
+ normpath = os.path.normpath
+ dependencies = [
+ normpath(abspath(p))
+ for p in compileTask.result
+ ]
+ newDependencyInfo = self.configuration.createDependencyInfo(
+ targets=[target],
+ args=args,
+ dependencies=dependencies,
+ calculateDigests=False,
+ )
+ self.configuration.storeDependencyInfo(newDependencyInfo)
+
+ storeDependencyTask = self.engine.createTask(storeDependencyInfo)
+ storeDependencyTask.startAfter(compileTask, immediate=True)
+
+ def buildObject(self, target, source, pch, shared):
+ """Perform the actual build of an object.
+
+ @param target: Path of the target object file.
+ @type target: string
+
+ @param source: Path of the source file.
+ @type source: string
+ """
+ compile, args, canBeCached = self.getObjectCommands(
+ target,
+ source,
+ pch,
+ shared
+ )
+
+ configuration = self.configuration
+
+ # Check if the target needs building
+ oldDependencyInfo, reasonToBuild = configuration.checkDependencyInfo(target, args)
+ if reasonToBuild is None:
+ return # Target is up to date
+ self.engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '" + target + "' because " + reasonToBuild + ".\n",
+ )
+
+ useCacheForThisObject = canBeCached and self.objectCachePath is not None
+ cacheDepMagic = "CKCH"
+
+ if useCacheForThisObject:
+ #######################
+ # USING OBJECT CACHE
+ #######################
+
+ # Prime the file digest cache from previous run so we don't have
+ # to recalculate file digests for files that haven't changed.
+ if oldDependencyInfo is not None:
+ configuration.primeFileDigestCache(oldDependencyInfo)
+
+ # We either need to make all paths that form the cache digest relative
+ # to the workspace root or all of them absolute.
+ targetDigestPath = configuration.abspath(target)
+ if self.objectCacheWorkspaceRoot is not None:
+ workspaceRoot = configuration.abspath(self.objectCacheWorkspaceRoot)
+ workspaceRoot = os.path.normcase(workspaceRoot)
+ targetDigestPathNorm = os.path.normcase(targetDigestPath)
+ if cake.path.commonPath(targetDigestPathNorm, workspaceRoot) == workspaceRoot:
+ targetDigestPath = targetDigestPath[len(workspaceRoot)+1:]
+
+ # Find the directory that will contain all cached dependency
+ # entries for this particular target object file.
+ targetDigest = cake.hash.sha1(targetDigestPath.encode("utf8")).digest()
+ targetDigestStr = cake.hash.hexlify(targetDigest)
+ targetCacheDir = cake.path.join(
+ self.objectCachePath,
+ targetDigestStr[0],
+ targetDigestStr[1],
+ targetDigestStr
+ )
+ targetCacheDir = configuration.abspath(targetCacheDir)
+
+ # Find all entries in the directory
+ entries = set()
+
+ # If doing a force build, pretend the cache is empty
+ if not self.engine.forceBuild:
+ try:
+ entries.update(os.listdir(targetCacheDir))
+ except EnvironmentError:
+ # Target cache dir doesn't exist, treat as if no entries
+ pass
+
+ hexChars = "0123456789abcdefABCDEF"
+
+ # Try to find the dependency files
+ for entry in entries:
+ # Skip any entry that's not a SHA-1 hash
+ if len(entry) != 40:
+ continue
+ skip = False
+ for c in entry:
+ if c not in hexChars:
+ skip = True
+ break
+ if skip:
+ continue
+
+ cacheDepPath = cake.path.join(targetCacheDir, entry)
+
+ try:
+ cacheDepContents = cake.filesys.readFile(cacheDepPath)
+ except EnvironmentError:
+ continue
+
+ # Check for the correct signature to make sure the file isn't corrupt
+ cacheDepMagicLen = len(cacheDepMagic)
+ cacheDepSignature = cacheDepContents[-cacheDepMagicLen:]
+ cacheDepContents = cacheDepContents[:-cacheDepMagicLen]
+
+ if cacheDepSignature != cacheDepMagic:
+ # Invalid signature
+ continue
+
+ try:
+ candidateDependencies = pickle.loads(cacheDepContents)
+ except Exception:
+ # Invalid dependency file for this entry
+ continue
+
+ if not isinstance(candidateDependencies, list):
+ # Data format change
+ continue
+
+ try:
+ newDependencyInfo = configuration.createDependencyInfo(
+ targets=[target],
+ args=args,
+ dependencies=candidateDependencies,
+ )
+ except EnvironmentError:
+ # One of the dependencies didn't exist
+ continue
+
+ # Check if the state of our files matches that of a cached object file.
+ cachedObjectDigest = configuration.calculateDigest(newDependencyInfo)
+ cachedObjectDigestStr = cake.hash.hexlify(cachedObjectDigest)
+ cachedObjectPath = cake.path.join(
+ self.objectCachePath,
+ cachedObjectDigestStr[0],
+ cachedObjectDigestStr[1],
+ cachedObjectDigestStr
+ )
+ cachedObjectPath = configuration.abspath(cachedObjectPath)
+ if cake.filesys.isFile(cachedObjectPath):
+ message = self.objectMessage(target, source, pch=getPath(pch), shared=shared, cached=True)
+ self.engine.logger.outputInfo(message)
+ try:
+ cake.zipping.decompressFile(cachedObjectPath, configuration.abspath(target))
+ except EnvironmentError:
+ continue # Invalid cache file
+ configuration.storeDependencyInfo(newDependencyInfo)
+ # Successfully restored object file and saved new dependency info file.
+ return
+
+ # Else, if we get here we didn't find the object in the cache so we need
+ # to actually execute the build.
+ def command():
+ message = self.objectMessage(target, source, pch=getPath(pch), shared=shared, cached=False)
+ self.engine.logger.outputInfo(message)
+ return compile()
+
+ def storeDependencyInfoAndCache():
+ # Since we are sharing this object in the object cache we need to
+ # make any paths in this workspace relative to the current workspace.
+ abspath = configuration.abspath
+ normpath = os.path.normpath
+ dependencies = []
+ if self.objectCacheWorkspaceRoot is None:
+ dependencies = [
+ normpath(abspath(p))
+ for p in compileTask.result
+ ]
+ else:
+ workspaceRoot = os.path.normcase(
+ configuration.abspath(self.objectCacheWorkspaceRoot)
+ ) + os.path.sep
+ workspaceRootLen = len(workspaceRoot)
+ for path in compileTask.result:
+ path = normpath(abspath(path))
+ pathNorm = os.path.normcase(path)
+ if pathNorm.startswith(workspaceRoot):
+ path = path[workspaceRootLen:]
+ dependencies.append(path)
+
+ newDependencyInfo = configuration.createDependencyInfo(
+ targets=[target],
+ args=args,
+ dependencies=dependencies,
+ calculateDigests=useCacheForThisObject,
+ )
+ configuration.storeDependencyInfo(newDependencyInfo)
+
+ # Finally update the cache if necessary
+ if useCacheForThisObject:
+ try:
+ objectDigest = configuration.calculateDigest(newDependencyInfo)
+ objectDigestStr = cake.hash.hexlify(objectDigest)
+
+ dependencyDigest = cake.hash.sha1()
+ for dep in dependencies:
+ dependencyDigest.update(dep.encode("utf8"))
+ dependencyDigest = dependencyDigest.digest()
+ dependencyDigestStr = cake.hash.hexlify(dependencyDigest)
+
+ cacheDepPath = cake.path.join(
+ targetCacheDir,
+ dependencyDigestStr
+ )
+ cacheObjectPath = cake.path.join(
+ self.objectCachePath,
+ objectDigestStr[0],
+ objectDigestStr[1],
+ objectDigestStr,
+ )
+ cacheObjectPath = configuration.abspath(cacheObjectPath)
+
+ # Copy the object file first, then the dependency file
+ # so that other processes won't find the dependency until
+ # the object file is ready.
+ cake.zipping.compressFile(configuration.abspath(target), cacheObjectPath)
+
+ if not cake.filesys.isFile(cacheDepPath):
+ dependencyString = pickle.dumps(dependencies, pickle.HIGHEST_PROTOCOL)
+ cake.filesys.writeFile(cacheDepPath, dependencyString + cacheDepMagic)
+
+ except EnvironmentError:
+ # Don't worry if we can't put the object in the cache
+ # The build shouldn't fail.
+ pass
+
+ compileTask = self.engine.createTask(command)
+ compileTask.start(immediate=True)
+
+ storeDependencyTask = self.engine.createTask(storeDependencyInfoAndCache)
+ storeDependencyTask.startAfter(compileTask, immediate=True)
+
+ def getPchCommands(self, target, source, header, object):
+ """Get the command-lines for compiling a precompiled header.
+
+ @return: A (compile, args, canCache) tuple where 'compile' is a function that
+ takes no arguments returns a task that completes with the list of paths of
+ dependencies when the compilation succeeds. 'args' is a value that indicates
+ the parameters of the command, if the args changes then the target will
+ need to be rebuilt; typically args includes the compiler's command-line.
+ 'canCache' is a boolean value that indicates whether the built object
+ file can be safely cached or not.
+ """
+ self.engine.raiseError("Don't know how to compile %s\n" % source)
+
+ def getObjectCommands(self, target, source, pch, shared):
+ """Get the command-lines for compiling a source to a target.
+
+ @return: A (compile, args, canCache) tuple where 'compile' is a function that
+ takes no arguments returns a task that completes with the list of paths of
+ dependencies when the compilation succeeds. 'args' is a value that indicates
+ the parameters of the command, if the args changes then the target will
+ need to be rebuilt; typically args includes the compiler's command-line.
+ 'canCache' is a boolean value that indicates whether the built object
+ file can be safely cached or not.
+ """
+ self.engine.raiseError("Don't know how to compile %s\n" % source)
+
+ def buildLibrary(self, target, sources):
+ """Perform the actual build of a library.
+
+ @param target: Path of the target library file.
+ @type target: string
+
+ @param sources: List of source object files.
+ @type sources: list of string
+
+ @param configuration: The Configuration object to use for dependency checking
+ etc.
+ """
+
+ archive, scan = self.getLibraryCommand(target, sources)
+
+ args = repr(archive)
+
+ # Check if the target needs building
+ _, reasonToBuild = self.configuration.checkDependencyInfo(target, args)
+ if not reasonToBuild:
+ return # Target is up to date
+ self.engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '" + target + "' because " + reasonToBuild + ".\n",
+ )
+
+ def command():
+ message = self.libraryMessage(target, sources, cached=False)
+ self.engine.logger.outputInfo(message)
+
+ archive()
+
+ targets, dependencies = scan()
+
+ newDependencyInfo = self.configuration.createDependencyInfo(
+ targets=targets,
+ args=args,
+ dependencies=dependencies,
+ )
+
+ self.configuration.storeDependencyInfo(newDependencyInfo)
+
+ archiveTask = self.engine.createTask(command)
+ archiveTask.start(immediate=True)
+
+ def getLibraryCommand(self, target, sources):
+ """Get the command for constructing a library.
+
+ @return: A tuple (build, scan) where build is the function to call to
+ build the library, scan is a function that when called returns a
+ (targets, dependencies) tuple.
+ """
+ self.engine.raiseError("Don't know how to archive %s\n" % target)
+
+ def buildModule(self, target, sources, importLibrary, installName):
+ """Perform the actual build of a module.
+ """
+ link, scan = self.getModuleCommands(target, sources, importLibrary, installName)
+
+ args = [repr(link), repr(scan)]
+
+ # Check if the target needs building
+ _, reasonToBuild = self.configuration.checkDependencyInfo(target, args)
+ if not reasonToBuild:
+ return # Target is up to date
+ self.engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '" + target + "' because " + reasonToBuild + ".\n",
+ )
+
+ def command():
+ message = self.moduleMessage(target, sources, cached=False)
+ self.engine.logger.outputInfo(message)
+
+ link()
+
+ targets, dependencies = scan()
+
+ newDependencyInfo = self.configuration.createDependencyInfo(
+ targets=targets,
+ args=args,
+ dependencies=dependencies,
+ )
+
+ self.configuration.storeDependencyInfo(newDependencyInfo)
+
+ moduleTask = self.engine.createTask(command)
+ moduleTask.start(immediate=True)
+
+ def getModuleCommands(self, target, sources, importLibrary, installName):
+ """Get the commands for linking a module.
+
+ @return: A tuple (link, scan) representing the commands that perform
+ the link and scan for dependencies respectively. The scan command
+ returns a tuple of (targets, dependencies).
+ """
+ self.engine.raiseError("Don't know how to link %s\n" % target)
+
+ def buildProgram(self, target, sources):
+ """Perform the actual build of a module.
+
+ @param target: Path of the target module file.
+ @type target: string
+
+ @param sources: Paths of the source object files and
+ libraries to link.
+ @type sources: list of string
+
+ @param configuration: The Configuration object to use for dependency checking
+ etc.
+ """
+
+ link, scan = self.getProgramCommands(target, sources)
+
+ args = [repr(link), repr(scan)]
+
+ # Check if the target needs building
+ _, reasonToBuild = self.configuration.checkDependencyInfo(target, args)
+ if not reasonToBuild:
+ return # Target is up to date
+ self.engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '" + target + "' because " + reasonToBuild + ".\n",
+ )
+
+ def command():
+ message = self.programMessage(target, sources, cached=False)
+ self.engine.logger.outputInfo(message)
+
+ link()
+
+ targets, dependencies = scan()
+
+ newDependencyInfo = self.configuration.createDependencyInfo(
+ targets=targets,
+ args=args,
+ dependencies=dependencies,
+ )
+
+ self.configuration.storeDependencyInfo(newDependencyInfo)
+
+ programTask = self.engine.createTask(command)
+ programTask.start(immediate=True)
+
+ def getProgramCommands(self, target, sources):
+ """Get the commands for linking a program.
+
+ @param target: path to the target file
+ @type target: string
+
+ @param sources: list of the object/library file paths to link into the
+ program.
+ @type sources: list of string
+
+ @return: A tuple (link, scan) representing the commands that perform
+ the link and scan for dependencies respectively. The scan command
+ returns the tuple (targets, dependencies).
+ """
+ self.engine.raiseError("Don't know how to link %s\n" % target)
+
+ def buildResource(self, target, source):
+ """Perform the actual build of a resource.
+
+ @param target: Path of the target resource file.
+ @type target: string
+
+ @param source: Path of the source file.
+ @type source: string
+ """
+
+ compile, scan = self.getResourceCommand(target, source)
+
+ args = repr(compile)
+
+ # Check if the target needs building
+ _, reasonToBuild = self.configuration.checkDependencyInfo(target, args)
+ if not reasonToBuild:
+ return # Target is up to date
+ self.engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '" + target + "' because " + reasonToBuild + ".\n",
+ )
+
+ def command():
+ message = self.resourceMessage(target, source, cached=False)
+ self.engine.logger.outputInfo(message)
+
+ compile()
+
+ targets, dependencies = scan()
+
+ newDependencyInfo = self.configuration.createDependencyInfo(
+ targets=targets,
+ args=args,
+ dependencies=dependencies,
+ )
+
+ self.configuration.storeDependencyInfo(newDependencyInfo)
+
+ resourceTask = self.engine.createTask(command)
+ resourceTask.start(immediate=True)
+
+ def getResourceCommand(self, target, sources):
+ """Get the command for constructing a resource.
+
+ @return: A tuple (build, scan) where build is the function to call to
+ build the resource, scan is a function that when called returns a
+ (targets, dependencies) tuple.
+ """
+ self.engine.raiseError("Don't know how to compile %s\n" % target)
Index: cake/cake/library/compilers/dummy.py
===================================================================
--- cake/cake/library/compilers/dummy.py (revision 0)
+++ cake/cake/library/compilers/dummy.py (working copy)
@@ -0,0 +1,122 @@
+"""A Dummy Compiler.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import cake.filesys
+import cake.path
+from cake.library import memoise, getPaths
+from cake.library.compilers import Compiler, makeCommand
+
+class DummyCompiler(Compiler):
+
+ objectSuffix = '.obj'
+ libraryPrefixSuffixes = [('', '.lib')]
+ modulePrefixSuffixes = [('', '.dll')]
+ programSuffix = '.exe'
+ pchSuffix = '.pch'
+ _name = 'dummy'
+
+ def __init__(self, configuration):
+ Compiler.__init__(self, configuration)
+
+ @memoise
+ def _getCompileArgs(self):
+ args = ['cc', '/c']
+ if self.debugSymbols:
+ args.append('/debug')
+ if self.optimisation != self.NO_OPTIMISATION:
+ args.append('/O')
+ if self.enableRtti:
+ args.append('/rtti')
+ if self.enableExceptions:
+ args.append('/ex')
+ if self.language:
+ args.append('/lang:%s' % self.language)
+ args.extend('/I%s' % p for p in self.getIncludePaths())
+ args.extend('/D%s' % d for d in self.getDefines())
+ args.extend('/FI%s' % p for p in getPaths(self.getForcedIncludes()))
+ return args
+
+ def getPchCommands(self, target, source, header, object):
+ compilerArgs = list(self._getCompileArgs())
+ compilerArgs += ['/H' + header, source, '/o' + target]
+
+ def compile():
+ self.engine.logger.outputDebug("run", "%s\n" % " ".join(compilerArgs))
+ absTarget = self.configuration.abspath(target)
+ cake.filesys.writeFile(absTarget, "".encode("latin1"))
+ dependencies = [source]
+ return dependencies
+
+ canBeCached = True
+ return compile, compilerArgs, canBeCached
+
+ def getObjectCommands(self, target, source, pch, shared):
+ compilerArgs = list(self._getCompileArgs())
+ compilerArgs += [source, '/o' + target]
+
+ def compile():
+ self.engine.logger.outputDebug("run", "%s\n" % " ".join(compilerArgs))
+ absTarget = self.configuration.abspath(target)
+ cake.filesys.writeFile(absTarget, "".encode("latin1"))
+
+ dependencies = [source]
+ if pch is not None:
+ dependencies.append(pch.path)
+ return dependencies
+
+ canBeCached = True
+ return compile, compilerArgs, canBeCached
+
+ def getLibraryCommand(self, target, sources):
+ args = ['ar'] + sources + ['/o' + target]
+
+ @makeCommand(args)
+ def archive():
+ self.engine.logger.outputDebug("run", "%s\n" % " ".join(args))
+ absTarget = self.configuration.abspath(target)
+ cake.filesys.writeFile(absTarget, "".encode("latin1"))
+
+ @makeCommand("dummy-scanner")
+ def scan():
+ return [target], sources
+
+ return archive, scan
+
+ def getProgramCommands(self, target, sources):
+ return self._getLinkCommands(target, sources, dll=False)
+
+ def getModuleCommands(self, target, sources, importLibrary, installName):
+ return self._getLinkCommands(target, sources, importLibrary, installName, dll=True)
+
+ def _getLinkCommands(self, target, sources, importLibrary=None, installName=None, dll=False):
+ objects, libraries = self._resolveObjects()
+
+ libFlags = ['-l' + lib for lib in libraries]
+ args = ['ld'] + sources + objects + libFlags + ['/o' + target]
+
+ if importLibrary:
+ importLibrary = self.configuration.abspath(importLibrary)
+
+ @makeCommand(args)
+ def link():
+ self.engine.logger.outputDebug("run", "%s\n" % " ".join(args))
+ absTarget = self.configuration.abspath(target)
+ cake.filesys.writeFile(absTarget, "".encode("latin1"))
+ if dll and importLibrary:
+ cake.filesys.writeFile(importLibrary, "".encode("latin1"))
+
+ @makeCommand("dummy-scanner")
+ def scan():
+ targets = [target]
+ if dll and importLibrary:
+ targets.append(importLibrary)
+ dependencies = list(sources)
+ dependencies += objects
+ dependencies += self._scanForLibraries(libraries)
+ return targets, dependencies
+
+ return link, scan
Index: cake/cake/library/compilers/gcc.py
===================================================================
--- cake/cake/library/compilers/gcc.py (revision 0)
+++ cake/cake/library/compilers/gcc.py (working copy)
@@ -0,0 +1,654 @@
+"""The Gcc Compiler.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+from cake.library import memoise, getPaths
+from cake.library.compilers import Compiler, makeCommand, CompilerNotFoundError
+import cake.filesys
+import cake.path
+import cake.system
+import os
+import os.path
+import re
+import subprocess
+
+def _getMinGWInstallDir():
+ """Returns the MinGW install directory.
+
+ Typically: 'C:\MinGW'.
+
+ @return: The path to the MinGW install directory.
+ @rtype: string
+
+ @raise WindowsError: If MinGW is not installed.
+ """
+ import _winreg
+
+ from cake.registry import queryString
+
+ possibleSubKeys = [
+ r"SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\MinGW",
+ r"SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\{AC2C1BDB-1E91-4F94-B99C-E716FE2E9C75}_is1",
+ ]
+
+ # Try all known registry locations.
+ for subKey in possibleSubKeys:
+ try:
+ return queryString(_winreg.HKEY_LOCAL_MACHINE, subKey, "InstallLocation")
+ except WindowsError:
+ # If this is the last possibility, re-raise the exception.
+ if subKey is possibleSubKeys[-1]:
+ raise
+
+def _getGccVersion(gccExe):
+ """Returns the Gcc version number given an executable.
+ """
+ args = [gccExe, '-dumpversion']
+ try:
+ p = subprocess.Popen(
+ args=args,
+ stdout=subprocess.PIPE,
+ )
+ except EnvironmentError, e:
+ raise EnvironmentError(
+ "cake: failed to launch %s: %s\n" % (args[0], str(e))
+ )
+ stdoutText = p.stdout.read()
+ p.stdout.close()
+ exitCode = p.wait()
+
+ if exitCode != 0:
+ raise EnvironmentError(
+ "%s: failed with exit code %i\n" % (args[0], exitCode)
+ )
+
+ return [
+ int(n) for n in stdoutText.strip().split(".")
+ ]
+
+def findMinGWCompiler(configuration):
+ """Returns a MinGW compiler if found.
+
+ @raise CompilerNotFoundError: When a valid MinGW compiler could not be found.
+ """
+ try:
+ installDir = _getMinGWInstallDir()
+ binDir = cake.path.join(installDir, "bin")
+ arExe = cake.path.join(binDir, "ar.exe")
+ gccExe = cake.path.join(binDir, "gcc.exe")
+ rcExe = cake.path.join(binDir, "windres.exe")
+
+ def checkFile(path):
+ if not cake.filesys.isFile(path):
+ raise WindowsError(path + " is not a file.")
+
+ checkFile(arExe)
+ checkFile(gccExe)
+ checkFile(rcExe)
+
+ try:
+ version = _getGccVersion(gccExe)
+ except EnvironmentError:
+ raise CompilerNotFoundError("Could not find MinGW version.")
+
+ return WindowsMinGWCompiler(
+ configuration=configuration,
+ arExe=arExe,
+ gccExe=gccExe,
+ rcExe=rcExe,
+ binPaths=[binDir],
+ version=version,
+ )
+ except WindowsError:
+ raise CompilerNotFoundError("Could not find MinGW install directory.")
+
+def findGccCompiler(configuration, platform=None):
+ """Returns a GCC compiler if found.
+
+ @param platform: The platform/operating system to compile for. If
+ platform is None then the current platform is used.
+
+ @raise CompilerNotFoundError: When a valid gcc compiler could not be found.
+ """
+ if platform is None:
+ platform = cake.system.platform()
+ platform = platform.lower()
+
+ isDarwin = platform.startswith("darwin")
+
+ paths = os.environ.get('PATH', '').split(os.path.pathsep)
+
+ try:
+ binPaths = []
+
+ def checkFile(path):
+ if not cake.filesys.isFile(path):
+ raise EnvironmentError(path + " is not a file.")
+
+ if isDarwin:
+ libtoolExe = cake.system.findExecutable("libtool", paths)
+ checkFile(libtoolExe)
+ binPaths.append(cake.path.dirName(libtoolExe))
+ else:
+ arExe = cake.system.findExecutable("ar", paths)
+ checkFile(arExe)
+ binPaths.append(cake.path.dirName(arExe))
+
+ gccExe = cake.system.findExecutable("gcc", paths)
+ checkFile(gccExe)
+ binPaths.append(cake.path.dirName(gccExe))
+
+ binPaths = list(set(binPaths)) # Only want unique paths
+
+ try:
+ version = _getGccVersion(gccExe)
+ except EnvironmentError:
+ raise CompilerNotFoundError("Could not find GCC version.")
+
+ if platform.startswith("windows") or platform.startswith("cygwin"):
+ return WindowsGccCompiler(
+ configuration=configuration,
+ arExe=arExe,
+ gccExe=gccExe,
+ binPaths=binPaths,
+ version=version,
+ )
+ elif isDarwin:
+ return MacGccCompiler(
+ configuration=configuration,
+ gccExe=gccExe,
+ libtoolExe=libtoolExe,
+ binPaths=binPaths,
+ version=version,
+ )
+ elif platform.startswith("ps3"):
+ return Ps3GccCompiler(
+ configuration=configuration,
+ arExe=arExe,
+ gccExe=gccExe,
+ binPaths=binPaths,
+ version=version,
+ )
+ else:
+ return GccCompiler(
+ configuration=configuration,
+ arExe=arExe,
+ gccExe=gccExe,
+ binPaths=binPaths,
+ version=version,
+ )
+ except EnvironmentError:
+ raise CompilerNotFoundError("Could not find GCC compiler, AR archiver or libtool.")
+
+class GccCompiler(Compiler):
+
+ _name = 'gcc'
+
+ def __init__(
+ self,
+ configuration,
+ arExe=None,
+ gccExe=None,
+ libtoolExe=None,
+ binPaths=None,
+ version=None,
+ ):
+ Compiler.__init__(self, configuration=configuration, binPaths=binPaths)
+ self._arExe = arExe
+ self._gccExe = gccExe
+ self._libtoolExe = libtoolExe
+ self.__version = version
+ self.__messageExpression = re.compile(r'^(.+?):(\d+)(:\d+)?:', re.MULTILINE)
+
+ @property
+ def version(self):
+ return self.__version
+
+ def _formatMessage(self, inputText):
+ """Format errors to be clickable in MS Visual Studio.
+ """
+ if self.messageStyle != self.MSVS_CLICKABLE:
+ return inputText
+
+ outputLines = []
+ pos = 0
+ while True:
+ m = self.__messageExpression.search(inputText, pos)
+ if m:
+ path, line, _column = m.groups()
+ startPos = m.start()
+ endPos = m.end()
+ if startPos != pos:
+ outputLines.append(inputText[pos:startPos])
+ path = self.configuration.abspath(os.path.normpath(path))
+ outputLines.append('%s(%s) :' % (path, line))
+ pos = endPos
+ else:
+ outputLines.append(inputText[pos:])
+ break
+ return ''.join(outputLines)
+
+ def _outputStdout(self, text):
+ Compiler._outputStdout(self, self._formatMessage(text))
+
+ def _outputStderr(self, text):
+ Compiler._outputStderr(self, self._formatMessage(text))
+
+ @memoise
+ def _getCompileArgs(self, suffix, shared=False, pch=False):
+ args = [self._gccExe, '-c', '-MD']
+
+ language = self._getLanguage(suffix, pch)
+ if pch: # Pch requires '-header' so must use derived language.
+ if language is not None:
+ args.extend(['-x', language])
+ else:
+ if self.language is not None:
+ args.extend(['-x', self.language])
+
+ if self.warningsAsErrors:
+ args.append('-Werror')
+
+ if self.warningLevel == 0:
+ args.append('-w')
+ elif self.warningLevel >= 4:
+ args.append('-Wall')
+
+ if self.debugSymbols:
+ args.append('-g')
+
+ if language in ['c++', 'c++-header', 'c++-cpp-output']:
+ args.extend(self.cppFlags)
+ elif language in ['c', 'c-header', 'cpp-output']:
+ args.extend(self.cFlags)
+ elif language in ['objective-c', 'objective-c-header', 'objc-cpp-output']:
+ args.extend(self.mFlags)
+ elif language in ['objective-c++', 'objective-c++-header', 'objective-c++-cpp-output']:
+ args.extend(self.mmFlags)
+
+ if self.enableRtti is not None:
+ if self.enableRtti:
+ args.append('-frtti')
+ else:
+ args.append('-fno-rtti')
+
+ if self.enableExceptions is not None:
+ if self.enableExceptions:
+ args.append('-fexceptions')
+ else:
+ args.append('-fno-exceptions')
+
+ if self.useFunctionLevelLinking:
+ args.extend([
+ '-ffunction-sections',
+ '-fdata-sections',
+ ])
+
+ if self.optimisation == self.NO_OPTIMISATION:
+ args.append('-O0')
+ elif self.optimisation == self.PARTIAL_OPTIMISATION:
+ args.append('-O2')
+ elif self.optimisation == self.FULL_OPTIMISATION:
+ args.append('-O4')
+
+ if self.useSse:
+ args.append('-msse')
+
+ if shared and self.__version[0] >= 4:
+ args.append('-fvisibility=hidden')
+
+ for p in self.getIncludePaths():
+ args.extend(['-I', p])
+
+ args.extend('-D' + d for d in self.getDefines())
+
+ for p in getPaths(self.getForcedIncludes()):
+ args.extend(['-include', p])
+
+ return args
+
+ def _getLanguage(self, suffix, pch=False):
+ language = self.language
+
+ if language is None:
+ # Attempt to derive the language based on the suffix.
+ if suffix in self.cSuffixes:
+ language = 'c'
+ elif suffix in self.cppSuffixes:
+ language = 'c++'
+ elif suffix in self.mSuffixes:
+ language = 'objective-c'
+ elif suffix in self.mmSuffixes:
+ language = 'objective-c++'
+ elif suffix in self.sSuffixes:
+ language = 'assembler'
+
+ # Pch generation requires '-header' at the end.
+ if pch and language in ['c', 'c++', 'objective-c', 'objective-c++']:
+ language += '-header'
+
+ return language
+
+ def getPchCommands(self, target, source, header, object):
+ depPath = self._generateDependencyFile(target)
+ args = list(self._getCompileArgs(cake.path.extension(source), shared=False, pch=True))
+ args.extend([source, '-o', target])
+
+ def compile():
+ dependencies = self._runProcess(args + ['-MF', depPath], target)
+ dependencies.extend(self._scanDependencyFile(depPath, target))
+ return dependencies
+
+ canBeCached = True
+ return compile, args, canBeCached
+
+ def getObjectCommands(self, target, source, pch, shared):
+ depPath = self._generateDependencyFile(target)
+ args = list(self._getCompileArgs(cake.path.extension(source), shared))
+ args.extend([source, '-o', target])
+
+ if pch is not None:
+ args.extend([
+ '-Winvalid-pch',
+ '-include', cake.path.stripExtension(pch.path),
+ ])
+
+ def compile():
+ dependencies = self._runProcess(args + ['-MF', depPath], target)
+ dependencies.extend(self._scanDependencyFile(depPath, target))
+
+ if pch is not None:
+ dependencies.append(pch.path)
+
+ return dependencies
+
+ canBeCached = True
+ return compile, args, canBeCached
+
+ @memoise
+ def _getCommonLibraryArgs(self):
+ # q - Quick append file to the end of the archive
+ # c - Don't warn if we had to create a new file
+ # s - Build an index
+ args = [self._arExe, '-qcs']
+ args.extend(self.libraryFlags)
+ return args
+
+ def getLibraryCommand(self, target, sources):
+ args = list(self._getCommonLibraryArgs())
+ args.append(target)
+ args.extend(sources)
+
+ @makeCommand(args)
+ def archive():
+ cake.filesys.remove(target)
+ self._runProcess(args, target)
+
+ @makeCommand("lib-scan")
+ def scan():
+ # TODO: Add dependencies on DLLs used by ar.exe
+ return [target], [args[0]] + sources
+
+ return archive, scan
+
+ @memoise
+ def _getCommonLinkArgs(self, dll):
+ args = [self._gccExe]
+ if dll:
+ args.extend(self.moduleFlags)
+ else:
+ args.extend(self.programFlags)
+
+ if dll:
+ args.append('-shared')
+
+ args.extend('-L' + p for p in self.getLibraryPaths())
+ return args
+
+ def getProgramCommands(self, target, sources):
+ return self._getLinkCommands(target, sources, dll=False)
+
+ def getModuleCommands(self, target, sources, importLibrary, installName):
+ return self._getLinkCommands(target, sources, importLibrary, installName, dll=True)
+
+ def _getLinkCommands(self, target, sources, importLibrary=None, installName=None, dll=False):
+
+ objects, libraries = self._resolveObjects()
+
+ if importLibrary:
+ importLibrary = self.configuration.abspath(importLibrary)
+
+ args = list(self._getCommonLinkArgs(dll))
+ args.extend(sources)
+ args.extend(objects)
+ for lib in libraries:
+ if os.path.sep in lib or os.path.altsep and os.path.altsep in lib:
+ args.append(lib)
+ else:
+ args.append('-l' + lib)
+ args.extend(['-o', target])
+
+ if self.outputMapFile:
+ args.append('-Wl,-Map=' + cake.path.stripExtension(target) + '.map')
+
+ @makeCommand(args)
+ def link():
+ self._runProcess(args, target)
+
+ if dll and importLibrary:
+ # Since the target .dylib is also the import library, copy it to the
+ # .a 'importLibrary' filename the user expects
+ cake.filesys.makeDirs(cake.path.dirName(importLibrary))
+ cake.filesys.copyFile(self.configuration.abspath(target), importLibrary)
+
+ @makeCommand("link-scan")
+ def scan():
+ # TODO: Add dependencies on DLLs used by gcc.exe
+ # Also add dependencies on system libraries, perhaps
+ # by parsing the output of ',Wl,--trace'
+ targets = [target]
+ if dll and importLibrary:
+ targets.append(importLibrary)
+ dependencies = [args[0]]
+ dependencies += sources
+ dependencies += objects
+ dependencies += self._scanForLibraries(libraries)
+ return targets, dependencies
+
+ return link, scan
+
+class WindowsGccCompiler(GccCompiler):
+
+ objectSuffix = '.obj'
+ libraryPrefixSuffixes = [('', '.lib'), ('lib', '.a')]
+ modulePrefixSuffixes = [('', '.dll'), ('lib', '.so')]
+ programSuffix = '.exe'
+ resourceSuffix = '.obj'
+
+ def __init__(
+ self,
+ configuration,
+ arExe=None,
+ gccExe=None,
+ rcExe=None,
+ binPaths=None,
+ version=None,
+ ):
+ GccCompiler.__init__(
+ self,
+ configuration=configuration,
+ arExe=arExe,
+ gccExe=gccExe,
+ binPaths=binPaths,
+ version=version,
+ )
+ self.__rcExe = rcExe
+
+ @memoise
+ def _getCommonLinkArgs(self, dll):
+ args = GccCompiler._getCommonLinkArgs(self, dll)
+
+ if self.useFunctionLevelLinking:
+ args.append('-Wl,--gc-sections')
+
+ return args
+
+ @memoise
+ def _getCommonResourceArgs(self):
+ args = [self.__rcExe]
+ args.extend(self.resourceFlags)
+ args.extend("-D" + define for define in self.getDefines())
+ args.extend("-I" + path for path in self.getIncludePaths())
+ return args
+
+ def getResourceCommand(self, target, source):
+
+ # TODO: Dependency scanning of .h files (can we use gcc and '-MD'?)
+ args = list(self._getCommonResourceArgs())
+ args.append('-o' + target)
+ args.append('-i' + source)
+
+ @makeCommand(args)
+ def compile():
+ cake.filesys.remove(self.configuration.abspath(target))
+ self._runProcess(args, target)
+
+ @makeCommand("rc-scan")
+ def scan():
+ # TODO: Add dependencies on DLLs used by rc.exe
+ return [target], [args[0], source]
+
+ return compile, scan
+
+class WindowsMinGWCompiler(WindowsGccCompiler):
+
+ _name = 'mingw'
+
+ @memoise
+ def _getCommonLinkArgs(self, dll):
+ args = WindowsGccCompiler._getCommonLinkArgs(self, dll)
+
+ # TODO: If this breaks try supporting the older '-mwindows' flag for older
+ # compiler versions. The flag below works for MinGW/GCC 4.5.2.
+ if self.subSystem is not None:
+ args.append('-Wl,-subsystem,' + self.subSystem.lower())
+
+ return args
+
+class MacGccCompiler(GccCompiler):
+
+ modulePrefixSuffixes = [('lib', '.dylib')]
+
+ @memoise
+ def _getCommonLibraryArgs(self):
+ args = [self._libtoolExe]
+ args.extend(self.libraryFlags)
+ return args
+
+ def getLibraryCommand(self, target, sources):
+ args = list(self._getCommonLibraryArgs())
+ args.append('-static')
+ args.extend(['-o', target])
+ args.extend(sources)
+
+ @makeCommand(args)
+ def archive():
+ cake.filesys.remove(target)
+ self._runProcess(args, target)
+
+ @makeCommand("lib-scan")
+ def scan():
+ # TODO: Add dependencies on DLLs used by ar.exe
+ return [target], [args[0]] + sources
+
+ return archive, scan
+
+ @memoise
+ def _getCommonLinkArgs(self, dll):
+ args = GccCompiler._getCommonLinkArgs(self, dll)
+
+ if dll:
+ args.append('-dynamiclib')
+ args.extend(["-current_version", "1.0"])
+ args.extend(["-compatibility_version", "1.0"])
+
+ return args
+
+ def _getLinkCommands(self, target, sources, importLibrary=None, installName=None, dll=False):
+
+ objects, libraries = self._resolveObjects()
+
+ if importLibrary:
+ importLibrary = self.configuration.abspath(importLibrary)
+
+ args = list(self._getCommonLinkArgs(dll))
+
+ # Should only need this if we're linking with any shared
+ # libs, but I don't know how to detect that
+ args.extend(["-Wl,-rpath,@loader_path/."])
+
+ args.extend(sources)
+ args.extend(objects)
+ for lib in libraries:
+ if os.path.sep in lib or os.path.altsep and os.path.altsep in lib:
+ args.append(lib)
+ else:
+ args.append('-l' + lib)
+ args.extend(['-o', target])
+
+ if dll and installName:
+ args.extend(["-install_name", installName])
+
+ if self.outputMapFile:
+ mapFile = cake.path.stripExtension(target) + '.map'
+ args.append('-map=' + mapFile)
+
+ @makeCommand(args)
+ def link():
+ self._runProcess(args, target)
+
+ if dll and importLibrary:
+ # Since the target .dylib is also the import library, copy it to the
+ # .a 'importLibrary' filename the user expects
+ cake.filesys.makeDirs(cake.path.dirName(importLibrary))
+ cake.filesys.copyFile(self.configuration.abspath(target), importLibrary)
+
+ @makeCommand("link-scan")
+ def scan():
+ # TODO: Add dependencies on DLLs used by gcc.exe
+ # Also add dependencies on system libraries, perhaps
+ # by parsing the output of ',Wl,--trace'
+ targets = [target]
+ if dll and importLibrary:
+ targets.append(importLibrary)
+ if self.outputMapFile:
+ targets.append(mapFile)
+
+ return targets, [args[0]] + sources
+
+ return link, scan
+
+class Ps3GccCompiler(GccCompiler):
+
+ modulePrefixSuffixes = [('', '.sprx')]
+ programSuffix = '.self'
+
+ @memoise
+ def _getCommonLinkArgs(self, dll):
+ args = GccCompiler._getCommonLinkArgs(self, dll)
+
+ if dll:
+ args.append('-Wl,--oformat=fsprx')
+ else:
+ args.append('-Wl,--oformat=fself')
+
+ if self.useFunctionLevelLinking:
+ args.extend([
+ '-Wl,-strip-unused',
+ '-Wl,-strip-unused-data',
+ ])
+
+ return args
Index: cake/cake/library/compilers/msvc.py
===================================================================
--- cake/cake/library/compilers/msvc.py (revision 0)
+++ cake/cake/library/compilers/msvc.py (working copy)
@@ -0,0 +1,1092 @@
+"""The Microsoft Visual C++ Compiler.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import os
+import os.path
+import re
+import threading
+
+import cake.filesys
+import cake.path
+import cake.system
+from cake.library.compilers import Compiler, makeCommand, CompilerNotFoundError
+from cake.library import memoise, getPaths, getTasks
+from cake.msvs import getMsvcProductDir, getMsvsInstallDir, getPlatformSdkDir
+
+def _toArchitectureDir(architecture):
+ """Re-map 'x64' to 'amd64' to match MSVC directory names.
+ """
+ return {'x64':'amd64'}.get(architecture, architecture)
+
+def _createMsvcCompiler(
+ configuration,
+ version,
+ edition,
+ architecture,
+ hostArchitecture,
+ ):
+ """Attempt to create an MSVC compiler.
+
+ @raise WindowsError: If the compiler could not be created.
+ @return: The newly created compiler.
+ @rtype: L{MsvcCompiler}
+ """
+ registryPath = edition + '\\' + version
+ msvsInstallDir = getMsvsInstallDir(registryPath)
+ msvcProductDir = getMsvcProductDir(registryPath)
+
+ # Use the compilers platform SDK if installed
+ platformSdkDir = cake.path.join(msvcProductDir, "PlatformSDK")
+ if not cake.filesys.isDir(platformSdkDir):
+ platformSdkDir = getPlatformSdkDir()
+
+ msvcRootBinDir = cake.path.join(msvcProductDir, "bin")
+
+ if architecture == 'x86':
+ # Root bin directory is always used for the x86 compiler
+ msvcBinDir = msvcRootBinDir
+ else:
+ msvcArchitecture = _toArchitectureDir(architecture)
+ msvcHostArchitecture = _toArchitectureDir(hostArchitecture)
+
+ if msvcArchitecture != msvcHostArchitecture:
+ # Determine the bin directory for cross-compilers
+ msvcBinDir = cake.path.join(
+ msvcRootBinDir,
+ "%s_%s" % (
+ msvcHostArchitecture,
+ msvcArchitecture,
+ ),
+ )
+ else:
+ # Determine the bin directory for 64-bit compilers
+ msvcBinDir = cake.path.join(
+ msvcRootBinDir,
+ "%s" % msvcArchitecture,
+ )
+
+ # Find the host bin dir for exe's such as 'cvtres.exe'
+ if hostArchitecture == 'x86':
+ msvcHostBinDir = msvcRootBinDir
+ else:
+ msvcHostArchitecture = _toArchitectureDir(hostArchitecture)
+ msvcHostBinDir = cake.path.join(msvcRootBinDir, msvcHostArchitecture)
+
+ msvcIncludeDir = cake.path.join(msvcProductDir, "include")
+ platformSdkIncludeDir = cake.path.join(platformSdkDir, "Include")
+
+ if architecture == 'x86':
+ msvcLibDir = cake.path.join(msvcProductDir, "lib")
+ platformSdkLibDir = cake.path.join(platformSdkDir, "Lib")
+ elif architecture in ['x64', 'amd64']:
+ msvcLibDir = cake.path.join(msvcProductDir, "lib", 'amd64')
+ platformSdkLibDir = cake.path.join(platformSdkDir, "Lib", "amd64")
+ # External Platform SDKs may use 'x64' instead of 'amd64'
+ if not cake.filesys.isDir(platformSdkLibDir):
+ platformSdkLibDir = cake.path.join(platformSdkDir, "Lib", "x64")
+ elif architecture == 'ia64':
+ msvcLibDir = cake.path.join(msvcProductDir, "lib", 'ia64')
+ platformSdkLibDir = cake.path.join(platformSdkDir, "Lib", "IA64")
+
+ clExe = cake.path.join(msvcBinDir, "cl.exe")
+ libExe = cake.path.join(msvcBinDir, "lib.exe")
+ linkExe = cake.path.join(msvcBinDir, "link.exe")
+ rcExe = cake.path.join(msvcBinDir, "rc.exe")
+ mtExe = cake.path.join(msvcBinDir, "mt.exe")
+ bscExe = cake.path.join(msvcRootBinDir, "bscmake.exe")
+
+ if not cake.filesys.isFile(rcExe):
+ rcExe = cake.path.join(msvcProductDir, "Bin", "rc.exe")
+ if not cake.filesys.isFile(rcExe):
+ rcExe = cake.path.join(platformSdkDir, "Bin", "rc.exe")
+
+ if not cake.filesys.isFile(mtExe):
+ mtExe = cake.path.join(msvcProductDir, "Bin", "mt.exe")
+ if not cake.filesys.isFile(mtExe):
+ mtExe = cake.path.join(platformSdkDir, "Bin", "mt.exe")
+
+ def checkFile(path):
+ if not cake.filesys.isFile(path):
+ raise WindowsError(path + " is not a file.")
+
+ def checkDirectory(path):
+ if not cake.filesys.isDir(path):
+ raise WindowsError(path + " is not a directory.")
+
+ checkFile(clExe)
+ checkFile(libExe)
+ checkFile(linkExe)
+ checkFile(rcExe)
+ checkFile(mtExe)
+ if not cake.filesys.isFile(bscExe):
+ bscExe = None # Not fatal. This just means we can't build browse info files.
+
+ checkDirectory(msvcIncludeDir)
+ checkDirectory(platformSdkIncludeDir)
+ checkDirectory(msvcLibDir)
+ checkDirectory(platformSdkLibDir)
+
+ binPaths = [msvcHostBinDir, msvsInstallDir]
+ includePaths = [msvcIncludeDir, platformSdkIncludeDir]
+ libraryPaths = [msvcLibDir, platformSdkLibDir]
+
+ compiler = MsvcCompiler(
+ configuration=configuration,
+ clExe=clExe,
+ libExe=libExe,
+ linkExe=linkExe,
+ rcExe=rcExe,
+ mtExe=mtExe,
+ bscExe=bscExe,
+ binPaths=binPaths,
+ includePaths=includePaths,
+ libraryPaths=libraryPaths,
+ architecture=architecture,
+ )
+
+ return compiler
+
+def findMsvcCompiler(
+ configuration,
+ version=None,
+ architecture=None,
+ ):
+ """Returns an MSVC compiler given a version and architecture.
+
+ Raises an EnvironmentError if a compiler or matching platform SDK
+ cannot be found.
+
+ @param version: The specific version to find. If version is None the
+ latest version is found instead.
+ @param architecture: The machine architecture to compile for. If it's
+ None an architecture that is a closest match to the host architecture
+ is used.
+
+ @return: A newly created MSVC compiler.
+ @rtype: L{MsvcCompiler}
+
+ @raise ValueError: When an invalid version or architecture is passed in.
+ @raise CompilerNotFoundError: When a valid compiler or Windows SDK
+ could not be found.
+ """
+
+ validArchitectures = ['x86', 'x64', 'amd64', 'ia64']
+
+ # Valid versions - prefer later versions over earlier ones
+ versions = [
+ '10.0',
+ '9.0',
+ '8.0',
+ '7.1',
+ '7.0',
+ ]
+
+ # Valid editions - prefer Enterprise edition over Express
+ editions = [
+ 'VisualStudio',
+ 'VCExpress',
+ ]
+
+ # Determine host architecture
+ hostArchitecture = cake.system.architecture().lower()
+ if hostArchitecture not in validArchitectures:
+ raise ValueError("Unknown host architecture '%s'." % hostArchitecture)
+
+ # Default architecture is hostArchitecture
+ if architecture is None:
+ architectures = [hostArchitecture]
+ if hostArchitecture in ('x64', 'amd64'):
+ architectures.append('x86')
+ else:
+ architecture = architecture.lower()
+ if architecture not in validArchitectures:
+ raise ValueError("Unknown architecture '%s'." % architecture)
+ architectures = [architecture]
+
+ if version is not None:
+ # Validate version
+ if version not in versions:
+ raise ValueError("Unknown version '%s'." % version)
+ # Only check for this version
+ versions = [version]
+
+ for a in architectures:
+ for v in versions:
+ for e in editions:
+ try:
+ return _createMsvcCompiler(configuration, v, e, a, hostArchitecture)
+ except WindowsError:
+ # Try the next version/edition
+ pass
+ else:
+ raise CompilerNotFoundError(
+ "Could not find Microsoft Visual Studio C++ compiler."
+ )
+
+def _mungePathToSymbol(path):
+ return "_PCH_" + hex(abs(hash(path)))[2:]
+
+class MsvcCompiler(Compiler):
+
+ outputFullPath = None
+ """Tell the compiler to output full paths.
+
+ When set to True the compiler will output full (absolute) paths to
+ source files during compilation. This applies to the paths output for
+ warnings/errors and the __FILE__ macro.
+
+ Related compiler options::
+ /FC
+ @type: bool
+ """
+ useBigObjects = None
+ """Increase the number of sections an object file can contain.
+
+ When set to True the compiler may produce bigger object files
+ but each object file may contain more addressable sections (up
+ to 2^32 in Msvc). If set to False only 2^16 addressable sections
+ are available.
+
+ Related compiler options::
+ /bigobj
+ @type: bool
+ """
+ memoryLimit = None
+ """Set the memory limit for the precompiled header.
+
+ The value is scaling factor such that 100 means a memory limit of 50MB,
+ 200 means a memory limit of 100MB, etc.
+ If set to None the default memory limit of 100 (50MB) is used.
+
+ Related compiler options::
+ /Zm
+ @type: int or None
+ """
+ runtimeLibraries = None
+ """Set the runtime libraries to use.
+
+ Possible values are 'debug-dll', 'release-dll', 'debug-static' and
+ 'release-static'.
+
+ Related compiler options::
+ /MD, /MDd, /MT, /MTd
+ @type: string or None
+ """
+ moduleVersion = None
+ """Set the program/module version.
+
+ The version string should be of the form 'major[.minor]'. Where major and
+ minor are decimal integers in the range 0 to 65,535.
+ If set to None the default version 0.0 is used.
+
+ Related compiler options::
+ /VERSION
+ @type: string or None
+ """
+ useStringPooling = None
+ """Use string pooling.
+
+ When set to True the compiler may eliminate duplicate strings by sharing
+ strings that are identical.
+
+ Related compiler options::
+ /GF
+ @type: bool
+ """
+ useMinimalRebuild = None
+ """Use minimal rebuild.
+
+ When set to True the compiler may choose not to recompile your source file
+ if it determines that the information stored in it's dependency information
+ file (.idb) has not changed.
+
+ Related compiler options::
+ /Gm
+ @type: bool
+ """
+ useEditAndContinue = None
+ """Use Edit and Continue.
+
+ When set to True the compiler will produce debug information that supports
+ the Edit and Continue feature. This option is generally not compatible with
+ any form of program/code optimisation. Enabling this option will also
+ enable function-level linking. This option is also not compatible with
+ Common Language Runtime (CLR) compilation.
+
+ Related compiler options::
+ /ZI
+ @type: bool
+ """
+ outputBrowseInfo = None
+ """Output a .sbr file for each object file and generate a final .bsc file.
+
+ NOT FULLY IMPLEMENTED! At the moment Cake will not rebuild the .sbr file
+ unless the associated .obj file is also out of date. Cake also won't yet
+ generate the final .bsc file using bscmake.exe. Perhaps the best way would
+ be to add a bsc(target, sources=objects) build function and require users
+ to generate the bsc file explicitly. ObjectTarget would gain a .sbr member
+ that points to the .sbr file corresponding to the .obj file.
+
+ If enabled the compiler will output a .sbr file that matches the
+ name of each object file. During program or library builds it will use
+ these .sbr files to generate a browse info .bsc file.
+
+ Related compiler options::
+ MSVC: /FR:<target>.sbr
+ @type: bool
+ """
+ errorReport = None
+ """Set the error reporting behaviour.
+
+ This value allows you to set how your program should send internal
+ compiler error (ICE) information to Microsoft.
+ Possible values are 'none', 'prompt', 'queue' and 'send'.
+ When set to None the default error reporting behaviour 'queue' is used.
+
+ Related compiler options::
+ /errorReport
+ @type: string or None
+ """
+ clrMode = None
+ """Set the Common Language Runtime (CLR) mode.
+
+ Set to 'pure' to allow native data types but only managed functions.
+ Set to 'safe' to only allow managed types and functions.
+
+ Related compiler options::
+ /clr, /CLRIMAGETYPE
+ @type: string or None
+ """
+
+ _lineRegex = re.compile('#line [0-9]+ "(?P<path>.+)"', re.MULTILINE)
+
+ _pdbQueue = {}
+ _pdbQueueLock = threading.Lock()
+
+ objectSuffix = '.obj'
+ libraryPrefixSuffixes = [('', '.lib')]
+ modulePrefixSuffixes = [('', '.dll')]
+ programSuffix = '.exe'
+ pchSuffix = '.pch'
+ pchObjectSuffix = '.obj'
+ manifestSuffix = '.embed.manifest'
+ resourceSuffix = '.res'
+ _name = 'msvc'
+
+ def __init__(
+ self,
+ configuration,
+ clExe=None,
+ libExe=None,
+ linkExe=None,
+ mtExe=None,
+ rcExe=None,
+ bscExe=None,
+ binPaths=None,
+ includePaths=None,
+ libraryPaths=None,
+ architecture=None,
+ ):
+ Compiler.__init__(
+ self,
+ configuration=configuration,
+ binPaths=binPaths,
+ includePaths=includePaths,
+ libraryPaths=libraryPaths,
+ )
+ self.__clExe = clExe
+ self.__libExe = libExe
+ self.__linkExe = linkExe
+ self.__mtExe = mtExe
+ self.__rcExe = rcExe
+ self.__bscExe = bscExe
+ self.__architecture = architecture
+ self.__messageExpression = re.compile(r'^(\s*)(.+)\(\d+\) :', re.MULTILINE)
+ self.forcedUsings = []
+
+ @property
+ def architecture(self):
+ return self.__architecture
+
+ def addForcedUsing(self, assembly):
+ """Add a .NET assembly to be forcibly referenced on the command-line.
+
+ @param assembly: A path or FileTarget or ScriptResult that results
+ in a path or FileTarget.
+ """
+ self.forcedUsings.append(self.configuration.basePath(assembly))
+ self._clearCache()
+
+ def _formatMessage(self, inputText):
+ """Format errors to be clickable in MS Visual Studio.
+ """
+ if self.messageStyle != self.MSVS_CLICKABLE:
+ return inputText
+
+ outputLines = []
+ pos = 0
+ while True:
+ m = self.__messageExpression.search(inputText, pos)
+ if m:
+ spaces, path, = m.groups()
+ startPos = m.start()
+ endPos = startPos + len(spaces) + len(path)
+ if startPos != pos:
+ outputLines.append(inputText[pos:startPos])
+ path = self.configuration.abspath(os.path.normpath(path))
+ outputLines.append(spaces + path)
+ pos = endPos
+ else:
+ outputLines.append(inputText[pos:])
+ break
+ return ''.join(outputLines)
+
+ def _outputStdout(self, text):
+ Compiler._outputStdout(self, self._formatMessage(text))
+
+ def _outputStderr(self, text):
+ Compiler._outputStderr(self, self._formatMessage(text))
+
+ @memoise
+ def _getObjectPrerequisiteTasks(self):
+ tasks = super(MsvcCompiler, self)._getObjectPrerequisiteTasks()
+
+ if self.language == 'c++/cli':
+ # Take a copy so we're not modifying the potentially cached
+ # base version.
+ tasks = list(tasks)
+ tasks.extend(getTasks(self.forcedUsings))
+
+ return tasks
+
+ @memoise
+ def _getCompileCommonArgs(self, suffix):
+ args = [
+ self.__clExe,
+ "/nologo",
+ "/showIncludes",
+ "/c",
+ ]
+
+ if self.errorReport:
+ args.append('/errorReport:' + self.errorReport)
+
+ if self.outputFullPath:
+ args.append("/FC")
+
+ if self.useBigObjects:
+ args.append("/bigobj")
+
+ if self.memoryLimit is not None:
+ args.append("/Zm%i" % self.memoryLimit)
+
+ if self.runtimeLibraries == 'release-dll':
+ args.append("/MD")
+ elif self.runtimeLibraries == 'debug-dll':
+ args.append("/MDd")
+ elif self.runtimeLibraries == 'release-static':
+ args.append("/MT")
+ elif self.runtimeLibraries == 'debug-static':
+ args.append("/MTd")
+
+ if self.useFunctionLevelLinking:
+ args.append('/Gy') # Enable function-level linking
+
+ if self.useStringPooling:
+ args.append('/GF') # Eliminate duplicate strings
+
+ language = self._getLanguage(suffix)
+ if language == 'c++':
+ args.extend(self.cppFlags)
+ elif language == 'c++/cli':
+ args.extend(self.cppFlags)
+ elif language == 'c':
+ args.extend(self.cFlags)
+
+ if self.enableRtti is not None:
+ if self.enableRtti:
+ args.append('/GR') # Enable RTTI
+ else:
+ args.append('/GR-') # Disable RTTI
+
+ if self.enableExceptions is not None:
+ if self.enableExceptions == "SEH":
+ args.append('/EHa') # Enable SEH exceptions
+ elif self.enableExceptions:
+ args.append('/EHsc') # Enable exceptions
+ else:
+ args.append('/EHsc-') # Disable exceptions
+
+ if self.language == 'c++/cli':
+ if self.clrMode == 'safe':
+ args.append('/clr:safe') # Compile to verifiable CLR code
+ elif self.clrMode == 'pure':
+ args.append('/clr:pure') # Compile to pure CLR code
+ else:
+ args.append('/clr') # Compile to mixed CLR/native code
+
+ for assembly in getPaths(self.forcedUsings):
+ args.append('/FU' + assembly)
+
+ if self.optimisation == self.FULL_OPTIMISATION:
+ args.append('/GL') # Global optimisation
+ elif self.optimisation == self.PARTIAL_OPTIMISATION:
+ args.append('/Ox') # Full optimisation
+ elif self.optimisation == self.NO_OPTIMISATION:
+ args.append('/Od') # No optimisation
+
+ if self.warningLevel is not None:
+ args.append('/W%s' % self.warningLevel)
+
+ if self.warningsAsErrors:
+ args.append('/WX')
+
+ if self.useEditAndContinue:
+ args.append("/ZI") # Output debug info to PDB (edit-and-continue)
+ elif self._needPdbFile:
+ args.append("/Zi") # Output debug info to PDB (no edit-and-continue)
+ elif self.debugSymbols:
+ args.append("/Z7") # Output debug info embedded in .obj
+
+ if self.useMinimalRebuild:
+ args.append("/Gm") # Enable minimal rebuild
+
+ args.extend("/D" + define for define in self.getDefines())
+ args.extend("/I" + path for path in self.getIncludePaths())
+ args.extend("/FI" + path for path in self.getForcedIncludes())
+
+ return args
+
+ def _getLanguage(self, suffix):
+ language = self.language
+ if language is None:
+ if suffix in self.cSuffixes:
+ language = 'c'
+ elif suffix in self.cppSuffixes:
+ language = 'c++'
+ return language
+
+ @property
+ @memoise
+ def _needPdbFile(self):
+ if self.pdbFile is not None and self.debugSymbols:
+ return True
+ elif self.useMinimalRebuild or self.useEditAndContinue:
+ return True
+ else:
+ return False
+
+ def getPchCommands(self, target, source, header, object):
+ args = list(self._getCompileCommonArgs(cake.path.extension(source)))
+ args.append('/Fo' + object)
+
+ if self.outputBrowseInfo:
+ args.append('/FR' + cake.path.stripExtension(target) + ".sbr")
+
+ if self.language == 'c':
+ args.append('/Tc' + source)
+ elif self.language in ['c++', 'c++/cli']:
+ args.append('/Tp' + source)
+ else:
+ args.append(source)
+
+ args.extend([
+ '/Yl' + _mungePathToSymbol(target),
+ '/Fp' + target,
+ '/Yc' + header,
+ ])
+
+ return self._getObjectCommands(target, source, args, None)
+
+ def getObjectCommands(self, target, source, pch, shared):
+ args = list(self._getCompileCommonArgs(cake.path.extension(source)))
+ args.append('/Fo' + target)
+
+ if self.outputBrowseInfo:
+ args.append('/FR' + cake.path.stripExtension(target) + ".sbr")
+
+ if self.language == 'c':
+ args.append('/Tc' + source)
+ elif self.language in ['c++', 'c++/cli']:
+ args.append('/Tp' + source)
+ else:
+ args.append(source)
+
+ if pch is not None:
+ args.extend([
+ '/Yl' + _mungePathToSymbol(pch.path),
+ '/Fp' + pch.path,
+ '/Yu' + pch.header,
+ ])
+ deps = [pch.path]
+ else:
+ deps = []
+
+ return self._getObjectCommands(target, source, args, deps)
+
+ def _getObjectCommands(self, target, source, args, deps):
+
+ if self._needPdbFile:
+ if self.pdbFile is not None:
+ pdbFile = self.pdbFile
+ else:
+ pdbFile = target + '.pdb'
+ args.append('/Fd' + pdbFile)
+ else:
+ pdbFile = None
+
+ def compile():
+ dependencies = [args[0], source]
+ if deps is not None:
+ dependencies.extend(deps)
+ if self.language == 'c++/cli':
+ dependencies.extend(getPaths(self.forcedUsings))
+ dependenciesSet = set()
+
+ def processStdout(text):
+ includePrefix = ('Note: including file:')
+ includePrefixLen = len(includePrefix)
+
+ sourceName = cake.path.baseName(source)
+ outputLines = []
+ for line in text.splitlines():
+ if line == sourceName:
+ continue
+ if line.startswith(includePrefix):
+ path = line[includePrefixLen:].lstrip()
+ normPath = os.path.normcase(os.path.normpath(path))
+ if normPath not in dependenciesSet:
+ dependenciesSet.add(normPath)
+ dependencies.append(path)
+ else:
+ outputLines.append(line)
+
+ if outputLines:
+ self._outputStdout("\n".join(outputLines) + "\n")
+
+ self._runProcess(
+ args=args,
+ target=target,
+ processStdout=processStdout,
+ )
+
+ return dependencies
+
+ def compileWhenPdbIsFree():
+ absPdbFile = self.configuration.abspath(pdbFile)
+ self._pdbQueueLock.acquire()
+ try:
+ predecessor = self._pdbQueue.get(absPdbFile, None)
+ compileTask = self.engine.createTask(compile)
+ if predecessor is not None:
+ predecessor.addCallback(
+ lambda: compileTask.start(immediate=True)
+ )
+ else:
+ compileTask.start(immediate=True)
+ self._pdbQueue[absPdbFile] = compileTask
+ finally:
+ self._pdbQueueLock.release()
+
+ return compileTask
+
+ # Can only cache the object if it's debug info is not going into
+ # a .pdb since multiple objects could all put their debug info
+ # into the same .pdb.
+ canBeCached = pdbFile is None and not self.outputBrowseInfo
+
+ if pdbFile is None:
+ return compile, args, canBeCached
+ else:
+ return compileWhenPdbIsFree, args, canBeCached
+
+ @memoise
+ def _getCommonLibraryArgs(self):
+ args = [self.__libExe, '/NOLOGO']
+
+ # XXX: MSDN says /errorReport:queue is supported by lib.exe
+ # but it seems to go unrecognised in MSVC8.
+ #if self.errorReport:
+ # args.append('/ERRORREPORT:' + self.errorReport.upper())
+
+ if self.optimisation == self.FULL_OPTIMISATION:
+ args.append('/LTCG')
+
+ if self.warningsAsErrors:
+ args.append('/WX')
+
+ args.extend(self.libraryFlags)
+
+ return args
+
+ def getLibraryCommand(self, target, sources):
+
+ args = list(self._getCommonLibraryArgs())
+
+ args.append('/OUT:' + target)
+
+ args.extend(sources)
+
+ @makeCommand(args)
+ def archive():
+ self._runProcess(args, target)
+
+ @makeCommand("lib-scan")
+ def scan():
+ # TODO: Add dependencies on DLLs used by lib.exe
+ return [target], [self.__libExe] + sources
+
+ return archive, scan
+
+ @memoise
+ def _getLinkCommonArgs(self, dll):
+
+ args = [self.__linkExe, '/NOLOGO']
+
+ # XXX: MSVC8 linker complains about /errorReport being unrecognised.
+ #if self.errorReport:
+ # args.append('/ERRORREPORT:%s' % self.errorReport.upper())
+
+ # Trying to combine /incremental with /clrimagetype gives a linker
+ # warning LNK4075: ingoring '/INCREMENTAL'
+ if self.useIncrementalLinking is not None and self.clrMode is None:
+ if self.useIncrementalLinking:
+ args.append('/INCREMENTAL')
+ else:
+ args.append('/INCREMENTAL:NO')
+
+ if dll:
+ args.append('/DLL')
+ args.extend(self.moduleFlags)
+ else:
+ args.extend(self.programFlags)
+
+ if self.useFunctionLevelLinking is not None:
+ if self.useFunctionLevelLinking:
+ args.append('/OPT:REF') # Eliminate unused functions (COMDATs)
+ args.append('/OPT:ICF') # Identical COMDAT folding
+ else:
+ args.append('/OPT:NOREF') # Keep unreferenced functions
+
+ if self.moduleVersion is not None:
+ args.append('/VERSION:' + self.moduleVersion)
+
+ if isinstance(self.stackSize, tuple):
+ # Specify stack (reserve, commit) sizes
+ args.append('/STACK:%i,%i' % self.stackSize)
+ elif self.stackSize is not None:
+ # Specify stack reserve size
+ args.append('/STACK:%i' % self.stackSize)
+
+ if isinstance(self.heapSize, tuple):
+ # Specify heap (reserve, commit) sizes
+ args.append('/HEAP:%i,%i' % self.heapSize)
+ elif self.heapSize is not None:
+ # Specify heap reserve size
+ args.append('/HEAP:%i' % self.heapSize)
+
+ if self.optimisation == self.FULL_OPTIMISATION:
+ # Link-time code generation (global optimisation)
+ args.append('/LTCG:NOSTATUS')
+
+ if self.clrMode is not None:
+ if self.clrMode == "pure":
+ args.append('/CLRIMAGETYPE:PURE')
+ elif self.clrMode == "safe":
+ args.append('/CLRIMAGETYPE:SAFE')
+ else:
+ args.append('/CLRIMAGETYPE:IJW')
+
+ if self.debugSymbols:
+ args.append('/DEBUG')
+
+ if self.clrMode is not None:
+ args.append('/ASSEMBLYDEBUG')
+
+ if self.pdbFile is not None:
+ args.append('/PDB:' + self.pdbFile)
+
+ if self.strippedPdbFile is not None:
+ args.append('/PDBSTRIPPED:' + self.strippedPdbFile)
+
+ if self.warningsAsErrors:
+ args.append('/WX')
+
+ if self.__architecture == 'x86':
+ args.append('/MACHINE:X86')
+ elif self.__architecture == 'x64':
+ args.append('/MACHINE:X64')
+ elif self.__architecture == 'ia64':
+ args.append('/MACHINE:IA64')
+
+ args.extend('/LIBPATH:' + path for path in self.getLibraryPaths())
+
+ return args
+
+ def getProgramCommands(self, target, sources):
+ return self._getLinkCommands(target, sources, dll=False)
+
+ def getModuleCommands(self, target, sources, importLibrary, installName):
+ return self._getLinkCommands(target, sources, importLibrary, installName, dll=True)
+
+ def _getLinkCommands(self, target, sources, importLibrary=None, installName=None, dll=False):
+
+ objects, libraries = self._resolveObjects()
+
+ if importLibrary:
+ importLibrary = self.configuration.abspath(importLibrary)
+
+ absTarget = self.configuration.abspath(target)
+ absTargetDir = cake.path.dirName(absTarget)
+
+ args = list(self._getLinkCommonArgs(dll))
+
+ if self.subSystem is not None:
+ args.append('/SUBSYSTEM:' + self.subSystem)
+
+ if self.debugSymbols and self.pdbFile is None:
+ args.append('/PDB:%s.pdb' % target)
+
+ if dll and importLibrary:
+ args.append('/IMPLIB:' + importLibrary)
+
+ if self.optimisation == self.FULL_OPTIMISATION and \
+ self.useIncrementalLinking:
+ self.engine.raiseError(
+ "Cannot set useIncrementalLinking with optimisation=FULL_OPTIMISATION\n"
+ )
+
+ if self.embedManifest:
+ if not self.__mtExe:
+ self.engine.raiseError(
+ "You must set path to mt.exe with embedManifest=True\n"
+ )
+
+ if dll:
+ manifestResourceId = 2
+ else:
+ manifestResourceId = 1
+ embeddedManifest = target + '.embed.manifest'
+ absEmbeddedManifest = self.configuration.abspath(embeddedManifest)
+ if self.useIncrementalLinking:
+ if not self.__rcExe:
+ self.engine.raiseError(
+ "You must set path to rc.exe with embedManifest=True and useIncrementalLinking=True\n"
+ )
+
+ intermediateManifest = target + '.intermediate.manifest'
+ absIntermediateManifest = absTarget + '.intermediate.manifest'
+
+ embeddedRc = embeddedManifest + '.rc'
+ absEmbeddedRc = absEmbeddedManifest + '.rc'
+ embeddedRes = embeddedManifest + '.res'
+ args.append('/MANIFESTFILE:' + intermediateManifest)
+ args.append(embeddedRes)
+ else:
+ args.append('/MANIFESTFILE:' + embeddedManifest)
+
+ if self.outputMapFile:
+ mapFile = cake.path.stripExtension(target) + '.map'
+ args.append('/MAP:' + mapFile)
+
+ args.append('/OUT:' + target)
+ args.extend(sources)
+ args.extend(objects)
+
+ # Msvc requires a .lib extension otherwise it will assume an .obj
+ libraryPrefix, librarySuffix = self.libraryPrefix, self.librarySuffix
+ for l in libraries:
+ if not cake.path.hasExtension(l):
+ l = cake.path.forcePrefixSuffix(l, libraryPrefix, librarySuffix)
+ args.append(l)
+
+ @makeCommand(args)
+ def link():
+ if dll and importLibrary:
+ cake.filesys.makeDirs(cake.path.dirName(importLibrary))
+ self._runProcess(args, target)
+
+ @makeCommand(args)
+ def linkWithManifestIncremental():
+
+ def compileRcToRes():
+ rcArgs = [
+ self.__rcExe,
+ "/fo" + embeddedRes,
+ embeddedRc,
+ ]
+
+ def processStdout(text):
+ outputLines = text.splitlines()
+
+ # Skip any leading logo output by some of the later versions of rc.exe
+ if len(outputLines) >= 2 and \
+ outputLines[0].startswith('Microsoft (R) Windows (R) Resource Compiler Version ') and \
+ outputLines[1].startswith('Copyright (C) Microsoft Corporation. All rights reserved.'):
+ outputLines = outputLines[2:]
+
+ if outputLines:
+ self._outputStdout("\n".join(outputLines) + "\n")
+
+ self._runProcess(
+ args=rcArgs,
+ target=embeddedRes,
+ processStdout=processStdout,
+ allowResponseFile=False,
+ )
+
+ def updateEmbeddedManifestFile():
+ """Updates the embedded manifest file based on the manifest file
+ output by the link stage.
+
+ @return: True if the manifest file changed, False if the manifest
+ file stayed the same.
+ """
+
+ mtArgs = [
+ self.__mtExe,
+ "/nologo",
+ "/notify_update",
+ "/manifest", intermediateManifest,
+ "/out:" + embeddedManifest,
+ ]
+
+ result = []
+
+ def processExitCode(exitCode):
+ # The magic number here is the exit code output by the mt.exe
+ # tool when the manifest file hasn't actually changed. We can
+ # avoid a second link if the manifest file hasn't changed.
+ if exitCode != 0 and exitCode != 1090650113:
+ self.engine.raiseError("%s: failed with exit code %i\n" % (mtArgs[0], exitCode))
+
+ result.append(exitCode != 0)
+
+ self._runProcess(
+ args=mtArgs,
+ target=embeddedManifest,
+ processExitCode=processExitCode,
+ )
+
+ return result[0]
+
+ # Create an empty embeddable manifest if one doesn't already exist
+ if not cake.filesys.isFile(absEmbeddedManifest):
+ self.engine.logger.outputInfo(
+ "Creating dummy manifest: %s\n" % embeddedManifest
+ )
+ cake.filesys.makeDirs(absTargetDir)
+ open(absEmbeddedManifest, 'wb').close()
+
+ # Generate .embed.manifest.rc
+ self.engine.logger.outputInfo("Creating %s\n" % embeddedRc)
+ f = open(absEmbeddedRc, 'w')
+ try:
+ # Use numbers so we don't have to include any headers
+ # 24 - RT_MANIFEST
+ f.write('%i 24 "%s"\n' % (
+ manifestResourceId,
+ embeddedManifest.replace("\\", "\\\\")
+ ))
+ finally:
+ f.close()
+
+ compileRcToRes()
+ link()
+
+ if cake.filesys.isFile(absIntermediateManifest) and updateEmbeddedManifestFile():
+ # Manifest file changed so we need to re-link to embed it
+ compileRcToRes()
+ link()
+
+ @makeCommand(args)
+ def linkWithManifestNonIncremental():
+ """This strategy for embedding the manifest embeds the manifest in-place
+ in the executable since it doesn't need to worry about invalidating the
+ ability to perform incremental links.
+ """
+ # Perform the link as usual
+ link()
+
+ # If we are linking with static runtimes there may be no manifest
+ # output, in which case we can skip embedding it.
+
+ if not cake.filesys.isFile(absEmbeddedManifest):
+ self.engine.logger.outputInfo(
+ "Skipping embedding manifest: no manifest to embed\n"
+ )
+ return
+
+ mtArgs = [
+ self.__mtExe,
+ "/nologo",
+ "/manifest", embeddedManifest,
+ "/outputresource:%s;%i" % (target, manifestResourceId),
+ ]
+
+ self._runProcess(mtArgs, embeddedManifest)
+
+ @makeCommand("link-scan")
+ def scan():
+ targets = [target]
+ if dll and importLibrary:
+ exportFile = cake.path.stripExtension(importLibrary) + '.exp'
+ targets.append(importLibrary)
+ targets.append(exportFile)
+ if self.outputMapFile:
+ targets.append(mapFile)
+ if self.debugSymbols:
+ if self.pdbFile is not None:
+ targets.append(self.pdbFile)
+ if self.strippedPdbFile is not None:
+ targets.append(self.strippedPdbFile)
+ if not self.embedManifest:
+ # If we are linking with static runtimes there may be no manifest
+ # output, in which case we don't need to flag it as a target.
+ manifestFile = target + '.manifest'
+ absManifestFile = self.configuration.abspath(manifestFile)
+ if cake.filesys.isFile(absManifestFile):
+ targets.append(manifestFile)
+
+ dependencies = [self.__linkExe]
+ dependencies += sources
+ dependencies += objects
+ dependencies += self._scanForLibraries(libraries)
+ return targets, dependencies
+
+ if self.embedManifest:
+ if self.useIncrementalLinking:
+ return linkWithManifestIncremental, scan
+ else:
+ return linkWithManifestNonIncremental, scan
+ else:
+ return link, scan
+
+ @memoise
+ def _getCommonResourceArgs(self):
+ args = [self.__rcExe] # Cannot use '/nologo' due to WindowsSDK 6.0A rc.exe not supporting it.
+ args.extend(self.resourceFlags)
+ args.extend("/d" + define for define in self.getDefines())
+ args.extend("/i" + path for path in self.getIncludePaths())
+ return args
+
+ def getResourceCommand(self, target, source):
+
+ args = list(self._getCommonResourceArgs())
+ args.append('/fo' + target)
+ args.append(source)
+
+ @makeCommand(args)
+ def compile():
+ self._runProcess(args, target, allowResponseFile=False)
+
+ @makeCommand("rc-scan")
+ def scan():
+ # TODO: Add dependencies on DLLs used by rc.exe
+ return [target], [self.__rcExe, source]
+
+ return compile, scan
Index: cake/cake/library/compilers/mwcw.py
===================================================================
--- cake/cake/library/compilers/mwcw.py (revision 0)
+++ cake/cake/library/compilers/mwcw.py (working copy)
@@ -0,0 +1,340 @@
+"""The Metrowerks CodeWarrior Compiler.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import os
+import os.path
+
+import cake.filesys
+import cake.path
+from cake.library import memoise, getPaths
+from cake.library.compilers import Compiler, makeCommand
+from cake.gnu import parseDependencyFile
+
+class MwcwCompiler(Compiler):
+
+ libraryPrefixSuffixes = [('', '.a')]
+ programSuffix = '.elf'
+ pchSuffix = '.mch'
+ _name = 'mwcw'
+
+ def __init__(
+ self,
+ configuration,
+ ccExe=None,
+ ldExe=None,
+ binPaths=None,
+ ):
+ Compiler.__init__(self, configuration=configuration, binPaths=binPaths)
+ self.__ccExe = ccExe
+ self.__ldExe = ldExe
+
+ def _formatMessage(self, inputText):
+ """Format errors to be clickable in MS Visual Studio.
+ """
+ if self.messageStyle != self.MSVS_CLICKABLE:
+ return inputText
+
+ def readLine(text):
+ res, _, text = text.partition("\r\n")
+ return res, text
+
+ line, inputText = readLine(inputText)
+ outputText = ""
+ indent = " "
+
+ while line.count("|") == 2:
+ executable, component, type = line.split("|")
+ line, inputText = readLine(inputText)
+
+ if line.count("|") == 5:
+ path, lineNum, colNum, _, _, _ = line[1:-1].split("|")
+ line, inputText = readLine(inputText)
+ else:
+ path = executable
+ lineNum = component
+ colNum = None
+
+ contextLines = []
+ while line.startswith("="):
+ contextLines.append(line[1:])
+ line, inputText = readLine(inputText)
+
+ messageLines = []
+ while line.startswith(">"):
+ messageLines.append(line[1:])
+ line, inputText = readLine(inputText)
+
+ outputText += "%s(%s): %s: %s\n" % (
+ path,
+ lineNum,
+ type.lower(),
+ messageLines[0],
+ )
+
+ # Context from the offending source file
+ if contextLines:
+ # Write out first line with ^ underneath pointing to the offending column
+ outputText += indent + contextLines[0] + "\n"
+ if colNum is not None:
+ outputText += indent + " " * (int(colNum) - 1) + "^\n"
+
+ # Write out any remaining lines (if any)
+ for line in contextLines[1:]:
+ outputText += indent + line + "\n"
+
+ if len(messageLines) > 1:
+ # Write out the message again if it was multi-line
+ for messageLine in messageLines:
+ outputText += indent + messageLine + "\n"
+
+ # Write the remaining lines
+ if line:
+ outputText += line + "\n"
+ outputText += inputText.replace("\r", "")
+
+ return outputText
+
+ def _outputStdout(self, text):
+ Compiler._outputStdout(self, self._formatMessage(text))
+
+ def _outputStderr(self, text):
+ Compiler._outputStderr(self, self._formatMessage(text))
+
+ @memoise
+ def _getCommonArgs(self):
+ args = [
+ '-msgstyle', 'parseable', # Use parseable message output
+ '-nowraplines', # Don't wrap long lines
+ ]
+
+ if self.warningsAsErrors:
+ args.extend(['-w', 'error'])
+
+ if self.debugSymbols:
+ args.extend(['-sym', 'dwarf-2'])
+
+ return args
+
+ @memoise
+ def _getCompileArgs(self, suffix):
+ args = [
+ self.__ccExe,
+ '-c', # Compile only
+ '-MD', # Generate dependency file
+ '-gccdep', # Output dependency file next to target
+ '-gccinc', # Use GCC #include semantics
+ '-pragma', 'cats off', # Turn off Codewarrior Analysis Tool
+ '-enum', 'int', # Enumerations always use 'int' for storage
+ ]
+ args.extend(self._getCommonArgs())
+
+ if self.language is not None:
+ args.extend(['-lang', self.language])
+
+ language = self._getLanguage(suffix)
+ if language in ['c++', 'cplus', 'ec++']:
+ args.extend(self.cppFlags)
+ elif language in ['c', 'c99']:
+ args.extend(self.cFlags)
+ elif language == 'objc':
+ args.extend(self.mFlags)
+
+ if self.enableRtti is not None:
+ if self.enableRtti:
+ args.extend(['-RTTI', 'on'])
+ else:
+ args.extend(['-RTTI', 'off'])
+
+ if self.enableExceptions is not None:
+ if self.enableExceptions:
+ args.extend(['-cpp_exceptions', 'on'])
+ else:
+ args.extend(['-cpp_exceptions', 'off'])
+
+ if self.optimisation == self.NO_OPTIMISATION:
+ args.extend([
+ '-inline', 'off',
+ '-opt', 'off',
+ '-ipa', 'off',
+ ])
+ elif (
+ self.optimisation == self.PARTIAL_OPTIMISATION or
+ self.optimisation == self.FULL_OPTIMISATION
+ ):
+ args.extend([
+ '-inline', 'all', # Let the compiler auto inline small functions
+ '-str', 'reuse,pool', # Reuse string constants, place them together
+ '-ipa', 'file', # File level optimisation
+ ])
+
+ if self.optimisation == self.PARTIAL_OPTIMISATION:
+ args.extend(['-opt', 'level=2']) # Optimisation level 2
+ elif self.optimisation == self.FULL_OPTIMISATION:
+ args.extend([
+ '-opt', 'level=4', # Optimisation level 4
+ '-opt', 'peep', # Eliminate unnecessary moves/loads/stores
+ '-opt', 'schedule', # Reorder instructions to eliminate stalls
+ ])
+ # Note: ipa program requires you to:
+ # - link with cc.exe
+ # - pass '-ipa program' to the link line
+ # - pass .irobj's to the link line instead of .o's
+ # Even after this the compiler may run out of memory trying
+ # to optimise a large program.
+ #args.extend(['-ipa', 'program']) # Whole program optimisation
+
+ for p in self.getIncludePaths():
+ args.extend(['-i', p])
+
+ args.extend('-D' + d for d in self.getDefines())
+
+ for p in getPaths(self.getForcedIncludes()):
+ args.extend(['-include', p])
+
+ return args
+
+ def _getLanguage(self, suffix):
+ language = self.language
+ if language is None:
+ if suffix in self.cSuffixes:
+ language = 'c99'
+ elif suffix in self.cppSuffixes:
+ language = 'c++'
+ elif suffix in self.mSuffixes:
+ language = 'objc'
+ return language
+
+ def getPchCommands(self, target, source, header, object):
+ depPath = self._generateDependencyFile(target)
+ args = list(self._getCompileArgs(cake.path.extension(source)))
+ args.extend([source, '-precompile', target])
+
+ def compile():
+ dependencies = self._runProcess(args + ['-MF', depPath], target)
+ dependencies.extend(self._scanDependencyFile(depPath, target))
+ return dependencies
+
+ canBeCached = True
+ return compile, args, canBeCached
+
+ def getObjectCommands(self, target, source, pch, shared):
+ depPath = self._generateDependencyFile(target)
+ args = list(self._getCompileArgs(cake.path.extension(source)))
+ args.extend([source, '-o', target])
+
+ if pch is not None:
+ args.extend(['-include', pch.path])
+
+ def compile():
+ dependencies = self._runProcess(args + ['-MF', depPath], target)
+ dependencies.extend(self._scanDependencyFile(depPath, target))
+
+ if pch is not None:
+ dependencies.append(pch.path)
+
+ return dependencies
+
+ canBeCached = True
+ return compile, args, canBeCached
+
+ @memoise
+ def _getCommonLibraryArgs(self):
+ args = [self.__ldExe, '-library']
+ args.extend(self._getCommonArgs())
+ args.extend(self.libraryFlags)
+ return args
+
+ def getLibraryCommand(self, target, sources):
+ args = list(self._getCommonLibraryArgs())
+ args.extend(['-o', target])
+ args.extend(sources)
+
+ @makeCommand(args)
+ def archive():
+ cake.filesys.remove(self.configuration.abspath(target))
+ self._runProcess(args, target)
+
+ @makeCommand("lib-scan")
+ def scan():
+ # TODO: Add dependencies on DLLs used by ld.exe
+ targets = [target]
+ dependencies = [args[0]]
+ dependencies += sources
+ return targets, dependencies
+
+ return archive, scan
+
+ @memoise
+ def _getCommonLinkArgs(self, dll):
+ args = [self.__ldExe, '-application']
+ args.extend(self._getCommonArgs())
+
+ if dll:
+ args.extend(self.moduleFlags)
+ else:
+ args.extend(self.programFlags)
+
+ if self.linkerScript is not None:
+ args.extend(['-lcf', self.linkerScript])
+
+ args.extend('-L' + p for p in self.getLibraryPaths())
+ return args
+
+ def getProgramCommands(self, target, sources):
+ return self._getLinkCommands(target, sources, dll=False)
+
+ def getModuleCommands(self, target, sources, importLibrary, installName):
+ return self._getLinkCommands(target, sources, importLibrary, installName, dll=True)
+
+ def _getLinkCommands(self, target, sources, importLibrary=None, installName=None, dll=False):
+
+ objects, libraries = self._resolveObjects()
+
+ args = list(self._getCommonLinkArgs(dll))
+ args.extend(sources)
+ args.extend(objects)
+ args.extend('-l' + l for l in libraries)
+ args.extend(['-o', target])
+
+ if self.outputMapFile:
+ mapFile = cake.path.stripExtension(target) + '.map'
+ args.extend(['-map', mapFile])
+
+ @makeCommand(args)
+ def link():
+ self._runProcess(args, target)
+
+ @makeCommand("link-scan")
+ def scan():
+ targets = [target]
+ if self.outputMapFile:
+ targets.append(mapFile)
+
+ # TODO: Add dependencies on DLLs used by gcc.exe
+ # Also add dependencies on system libraries, perhaps
+ # by parsing the output of ',Wl,--trace'
+ dependencies = [args[0]]
+ dependencies += sources
+ dependencies += objects
+ dependencies += self._scanForLibraries(libraries)
+
+ return targets, dependencies
+
+ return link, scan
+
+class WiiMwcwCompiler(MwcwCompiler):
+
+ @memoise
+ def _getCommonArgs(self):
+ args = MwcwCompiler._getCommonArgs(self)
+ args.extend([
+ '-processor', 'gekko', # Target the Gekko processor
+ '-fp', 'fmadd', # Use fmadd instructions where possible
+ '-sdatathreshold', '0', # Max size for objects in small data section
+ '-sdata2threshold', '0', # Ditto for const small data section
+ ])
+ return args
Index: cake/cake/library/env.py
===================================================================
--- cake/cake/library/env.py (revision 0)
+++ cake/cake/library/env.py (working copy)
@@ -0,0 +1,164 @@
+"""Environment Tool.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import cake.path
+
+from cake.library import Tool
+
+def _coerceToList(value):
+ if isinstance(value, list):
+ return value
+ return [value]
+
+class EnvironmentTool(Tool):
+ """
+ Tool that provides a dictionary of key/value pairs
+ used for path substitution.
+ """
+
+ def __init__(self, configuration):
+ """Default constructor.
+ """
+ Tool.__init__(self, configuration)
+ self.vars = {}
+
+ def __getitem__(self, key):
+ """Return an environment variables value given its key.
+
+ @param key: The key of the environment variable to get.
+ @return: The value of the environment variable.
+ """
+ return self.vars[key]
+
+ def __setitem__(self, key, value):
+ """Set a new value for an environment variable.
+
+ @param key: The key of the environment variable to set.
+ @param value: The value to set the environment variable to.
+ """
+ self.vars[key] = value
+
+ def __delitem__(self, key):
+ """Delete an environment variable given its key.
+
+ @param key: The key of the environment variable to delete.
+ """
+ del self.vars[key]
+
+ def __contains__(self, key):
+ """Test if an environment variable is defined.
+
+ @param key: The key of the environment variable to test.
+ """
+ return key in self.vars
+
+ def get(self, key, default=None):
+ """Return an environment variable or default value if not found.
+
+ @param key: The key of the environment variable to get.
+ @param default: The value to return if the key is not found.
+ """
+ return self.vars.get(key, default)
+
+ def setDefault(self, key, default=None):
+ """Set a value only if it doesn't already exist.
+ """
+ return self.vars.setdefault(key, default)
+
+ def update(self, *values, **kwargs):
+ """Update the environment with key/value pairs from 'values' or 'kwargs'.
+
+ If a matching key already exists its value will be replaced by the value
+ passed in.
+
+ Note that if the key/value pairs come from 'kwargs' they must conform to
+ Python keyword argument naming conventions (eg. no spaces).
+
+ Example::
+ env.update({
+ "CODE_PATH":"c:/code",
+ "ART_PATH":"c:/art",
+ })
+ env.update(
+ CODE_PATH="C:/code",
+ ART_PATH="C:/art",
+ )
+ @param values: An iterable sequence of key/value pairs to update from.
+ """
+ self.vars.update(*values, **kwargs)
+
+ def expand(self, value):
+ """Expand variables in the specified string.
+
+ Variables that are expanded are of the form ${VAR}
+ or $VAR.
+
+ Example::
+ env["CODE_PATH"] = "c:/code"
+ env.expand("${CODE_PATH}/a") -> "C:/code/a"
+
+ @param value: The string to expand.
+ @type value: string
+ @return: The expanded string.
+ @rtype: string
+ """
+ return cake.path.expandVars(value, self.vars)
+
+ def append(self, **kwargs):
+ """Append keyword arguments to the environment. If the key does not exist
+ the value passed in is used. If the key does exist the value is appended using
+ the '+' operator.
+
+ Example::
+ env.append(
+ CFLAGS=["/O1"],
+ MESSAGE="Added /O1 flag. ",
+ )
+ """
+ for k, v in kwargs.iteritems():
+ try:
+ old = self.vars[k]
+ if type(old) != type(v):
+ old = _coerceToList(old)
+ v = _coerceToList(v)
+ self.vars[k] = old + v
+ except KeyError:
+ self.vars[k] = v
+
+ def prepend(self, **kwargs):
+ """Prepend keyword arguments to the environment. If the key does not exist
+ the value passed in is used. If the key does exist the value is prepended using
+ the '+' operator.
+
+ Example::
+ env.prepend(
+ CFLAGS=["/O1"],
+ MESSAGE="Added /O1 flag. ",
+ )
+ """
+ for k, v in kwargs.iteritems():
+ try:
+ old = self.vars[k]
+ if type(old) != type(v):
+ old = _coerceToList(old)
+ v = _coerceToList(v)
+ self.vars[k] = v + old
+ except KeyError:
+ self.vars[k] = v
+
+ def replace(self, **kwargs):
+ """Replace key/values in the environment with keyword arguments.
+
+ This function is identical to the set() function.
+
+ Example::
+ env.set(
+ CODE_PATH="C:/code",
+ ART_PATH="C:/art",
+ )
+ """
+ self.vars.update(kwargs)
Index: cake/cake/library/filesys.py
===================================================================
--- cake/cake/library/filesys.py (revision 0)
+++ cake/cake/library/filesys.py (working copy)
@@ -0,0 +1,271 @@
+"""File System Tool.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import glob
+import cake.path
+import cake.filesys
+from cake.library import Tool, DirectoryTarget, FileTarget, getPath, getTask, \
+ flatten, waitForAsyncResult
+
+class FileSystemTool(Tool):
+ """Tool that provides file system related utilities.
+ """
+
+ def findFiles(self, path, recursive=True, includeMatch=None):
+ """Find files and directories given a directory path.
+
+ @param path: The path of the directory to search under.
+ @type path: string
+
+ @param recursive: Whether or not to search recursively.
+ @type recursive: bool
+
+ @param includeMatch: A callable used to decide whether to include
+ certain files in the result. This could be a python callable that
+ returns True to include the file or False to exclude it, or a regular
+ expression function such as re.compile().match or re.match.
+ @type includeMatch: any callable
+
+ @return: A sequence of paths of files and directories. The paths returned
+ are relative to the 'path' argument (they are not prefixed with 'path').
+ """
+ configuration = self.configuration
+ basePath = configuration.basePath(path)
+ absPath = configuration.abspath(basePath)
+
+ return cake.filesys.walkTree(
+ path=absPath,
+ recursive=recursive,
+ includeMatch=includeMatch,
+ )
+
+ def glob(self, pathname):
+ """Find files matching a particular pattern.
+
+ @param pathname: A glob-style file-name pattern. eg. '*.txt'
+
+ @return: A list of paths to files that match the pattern.
+ """
+ configuration = self.configuration
+ basePath = configuration.basePath(pathname)
+ absPath = configuration.abspath(basePath)
+ offset = len(absPath) - len(pathname)
+
+ return [p[offset:] for p in glob.iglob(absPath)]
+
+ def copyFile(self, source, target, onlyNewer=True):
+ """Copy a file from one location to another.
+
+ @param source: The path of the source file or a FileTarget
+ representing a file that will be created.
+ @type source: string or L{FileTarget}
+
+ @param target: The path of the target file to copy to
+ @type target: string
+
+ @param onlyNewer: Only copy source file if it's newer than the target.
+ @type onlyNewer: bool
+
+ @return: A FileTarget representing the file that will be copied.
+ @rtype: L{FileTarget}
+ """
+ if not isinstance(target, basestring):
+ raise TypeError("target must be a string")
+
+ basePath = self.configuration.basePath
+
+ source = basePath(source)
+ target = basePath(target)
+
+ return self._copyFile(source, target, onlyNewer)
+
+ def _copyFile(self, source, target, onlyNewer=True):
+
+ def doCopy():
+
+ sourcePath = getPath(source)
+ abspath = self.configuration.abspath
+ engine = self.engine
+
+ targetAbsPath = abspath(target)
+ sourceAbsPath = abspath(sourcePath)
+
+ if engine.forceBuild:
+ reasonToBuild = "rebuild has been forced"
+ elif not onlyNewer:
+ reasonToBuild = "onlyNewer is False"
+ elif not cake.filesys.isFile(targetAbsPath):
+ reasonToBuild = "it doesn't exist"
+ elif engine.getTimestamp(sourceAbsPath) > engine.getTimestamp(targetAbsPath):
+ reasonToBuild = "'%s' has been changed" % sourcePath
+ else:
+ # up-to-date
+ return
+
+ engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '%s' because %s.\n" % (target, reasonToBuild),
+ )
+ engine.logger.outputInfo("Copying %s to %s\n" % (sourcePath, target))
+
+ try:
+ cake.filesys.makeDirs(cake.path.dirName(targetAbsPath))
+ cake.filesys.copyFile(sourceAbsPath, targetAbsPath)
+ except EnvironmentError, e:
+ engine.raiseError("%s: %s\n" % (target, str(e)))
+
+ engine.notifyFileChanged(targetAbsPath)
+
+ @waitForAsyncResult
+ def run(source):
+ if self.enabled:
+ sourceTask = getTask(source)
+ copyTask = self.engine.createTask(doCopy)
+ copyTask.startAfter(sourceTask)
+ else:
+ copyTask = None
+
+ return FileTarget(path=target, task=copyTask)
+
+ return run(source)
+
+ def copyFiles(self, sources, targetDir):
+ """Copy a collection of files to a target directory.
+
+ @param sources: A list of files to copy.
+ @type sources: list of string's, FileTargets or AsyncResult yielding
+ a string, FileTarget or list of same.
+ @param targetDir: The target directory to copy to.
+ @type targetDir: string
+
+ @return: A list of FileTarget's representing the files that will be
+ copied.
+ @rtype: list of L{FileTarget}
+ """
+ if not isinstance(targetDir, basestring):
+ raise TypeError("targetDir must be a string")
+
+ basePath = self.configuration.basePath
+
+ sources = basePath(sources)
+ targetDir = basePath(targetDir)
+
+ @waitForAsyncResult
+ def run(sources):
+ results = []
+ for s in sources:
+ sourcePath = getPath(s)
+ target = cake.path.join(targetDir, cake.path.baseName(sourcePath))
+ results.append(self._copyFile(source=s, target=target))
+ return results
+
+ return run(flatten(sources))
+
+ def copyDirectory(
+ self,
+ sourceDir,
+ targetDir,
+ recursive=True,
+ onlyNewer=True,
+ removeStale=False,
+ includeMatch=None,
+ ):
+ """Copy the contents of a source directory to a target directory.
+
+ If the target directory does not exist it will be created.
+
+ @param sourceDir: The name of the source directory to copy from.
+ @type sourceDir: string
+
+ @param targetDir: The name of the target directory to copy to.
+ @type targetDir: string
+
+ @param recursive: Whether or not to copy recursively.
+ @type recursive: bool
+
+ @param onlyNewer: Only copy files that are newer than those in
+ the target directory.
+ @type onlyNewer: bool
+
+ @param removeStale: Remove files and directories in the target
+ directory that no longer exist in the source directory.
+ @type removeStale: bool
+
+ @param includeMatch: A callable used to decide whether to include
+ certain files when copying. This could be a python callable that
+ returns True to copy the file or False to exclude it, or a regular
+ expression function such as re.compile().match or re.match.
+ @type includeMatch: any callable
+
+ @return: A list of FileTarget's representing the files that will be
+ copied.
+ @rtype: list of L{FileTarget}
+ """
+ if not isinstance(targetDir, basestring):
+ raise TypeError("targetDir must be a string")
+
+ basePath = self.configuration.basePath
+ abspath = self.configuration.abspath
+ engine = self.engine
+
+ sourceDir = basePath(sourceDir)
+ targetDir = basePath(targetDir)
+ sourceTask = getTask(sourceDir)
+
+ def doMakeDir(path):
+ targetAbsPath = abspath(path)
+ if cake.path.isDir(targetAbsPath):
+ return # Don't create if it already exists.
+
+ engine.logger.outputInfo("Creating Directory %s\n" % path)
+ try:
+ cake.filesys.makeDirs(targetAbsPath)
+ except EnvironmentError, e:
+ engine.raiseError("%s: %s\n" % (targetDir, str(e)))
+
+ def doDelete(paths):
+ for path in paths:
+ targetPath = cake.path.join(targetDir, path)
+ absTargetPath = abspath(targetPath)
+ engine.logger.outputInfo("Deleting %s\n" % targetPath)
+ if cake.path.isDir(absTargetPath):
+ cake.filesys.removeTree(absTargetPath)
+ elif cake.path.isFile(absTargetPath):
+ cake.filesys.remove(absTargetPath)
+ else:
+ pass # Skip files that may have been deleted already due to iteration order.
+
+ @waitForAsyncResult
+ def run(sourceDir):
+ sources = set(cake.filesys.walkTree(
+ path=abspath(sourceDir),
+ recursive=recursive,
+ includeMatch=includeMatch,
+ ))
+
+ if removeStale:
+ targets = set(cake.filesys.walkTree(path=abspath(targetDir), recursive=recursive))
+ oldFiles = targets.difference(sources)
+ removeTask = self.engine.createTask(lambda f=oldFiles: doDelete(f))
+ removeTask.start()
+
+ results = []
+ for source in sources:
+ sourcePath = cake.path.join(sourceDir, source)
+ targetPath = cake.path.join(targetDir, source)
+ if cake.path.isDir(abspath(sourcePath)):
+ if self.enabled:
+ dirTask = self.engine.createTask(lambda t=targetPath: doMakeDir(t))
+ dirTask.start()
+ else:
+ dirTask = None
+ results.append(DirectoryTarget(path=source, task=dirTask))
+ else:
+ results.append(self._copyFile(source=sourcePath, target=targetPath, onlyNewer=onlyNewer))
+ return results
+
+ return run(sourceDir)
Index: cake/cake/library/logging.py
===================================================================
--- cake/cake/library/logging.py (revision 0)
+++ cake/cake/library/logging.py (working copy)
@@ -0,0 +1,65 @@
+"""Logging Tool.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+from cake.library import Tool
+
+class LoggingTool(Tool):
+
+ def __init__(self, configuration):
+ """Initialise this tool.
+ """
+ Tool.__init__(self, configuration)
+ self.logger = configuration.engine.logger
+
+ def debugEnabled(self, keyword):
+ """Returns True if currently debugging the given component.
+
+ @param keyword: The component to check.
+ @type keyword: string
+
+ @return: True if the logger is debugging the given component,
+ otherwise False.
+ @rtype: bool
+ """
+ return self.logger.debugEnabled(keyword)
+
+ def outputError(self, message):
+ """Output an error message.
+
+ @param message: The message to output.
+ @type message: string
+ """
+ return self.logger.outputError(message)
+
+ def outputWarning(self, message):
+ """Output a warning message.
+
+ @param message: The message to output.
+ @type message: string
+ """
+ return self.logger.outputWarning(message)
+
+ def outputInfo(self, message):
+ """Output an informative message.
+
+ @param message: The message to output.
+ @type message: string
+ """
+ return self.logger.outputInfo(message)
+
+ def outputDebug(self, keyword, message):
+ """Output a debug message.
+
+ The message will output only if the keyword matches a component
+ we are currently debugging.
+
+ @param keyword: The debug keyword associated with this message.
+ @type keyword: string
+ @param message: The message to output.
+ @type message: string
+ """
+ return self.logger.outputDebug(message)
Index: cake/cake/library/project.py
===================================================================
--- cake/cake/library/project.py (revision 0)
+++ cake/cake/library/project.py (working copy)
@@ -0,0 +1,1987 @@
+"""Project Tool.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import sys
+import threading
+import os.path
+import codecs
+import itertools
+try:
+ import cStringIO as StringIO
+except ImportError:
+ import StringIO
+
+import cake.path
+import cake.filesys
+import cake.hash
+
+from cake.library import (
+ Tool, FileTarget, AsyncResult, waitForAsyncResult, flatten, getPath, getPaths
+ )
+from cake.script import Script
+
+class _Project(object):
+
+ def __init__(self, path, filtersPath, name, version):
+
+ self.path = path
+ self.filtersPath = filtersPath
+ self.dir = cake.path.dirName(path)
+ self.name = name
+ self.version = version
+ self.sccProvider = None
+ self.sccProjectName = None
+ self.sccAuxPath = None
+ self.sccLocalPath = None
+ self.internalGuid = "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}"
+ self.externalGuid = generateGuid(path)
+ self.configurations = {}
+ self.lock = threading.Lock()
+
+ def addConfiguration(self, configuration):
+
+ self.lock.acquire()
+ try:
+ key = (configuration.name, configuration.platform)
+ if key in self.configurations:
+ raise ValueError("Project '%s' already has configuration '%s-%s'." % (
+ self.path,
+ configuration.name,
+ configuration.platform,
+ ))
+ self.configurations[key] = configuration
+ finally:
+ self.lock.release()
+
+class _ProjectConfiguration(object):
+
+ def __init__(
+ self,
+ name,
+ platform,
+ items,
+ buildArgs,
+ output,
+ intermediateDir,
+ buildLog,
+ defines,
+ includePaths,
+ assemblyPaths,
+ forcedIncludes,
+ forcedUsings,
+ compileAsManaged,
+ ):
+
+ self.name = name
+ self.platform = platform
+ self.items = items
+ self.buildArgs = buildArgs
+ self.output = output
+ self.intermediateDir = intermediateDir
+ self.buildLog = buildLog
+ self.defines = defines
+ self.includePaths = includePaths
+ self.assemblyPaths = assemblyPaths
+ self.forcedIncludes = forcedIncludes
+ self.forcedUsings = forcedUsings
+ self.compileAsManaged = compileAsManaged
+
+class _Solution(object):
+
+ def __init__(self, path, version):
+
+ self.path = path
+ self.dir = cake.path.dirName(path)
+ self.name = cake.path.baseNameWithoutExtension(path)
+ self.version = version
+ self.configurations = {}
+ self.lock = threading.Lock()
+
+ def addConfiguration(self, configuration):
+
+ self.lock.acquire()
+ try:
+ key = (configuration.name, configuration.platform)
+ if key in self.configurations:
+ raise ValueError("Solution '%s' already has configuration '%s-%s'." % (
+ self.path,
+ configuration.name,
+ configuration.platform,
+ ))
+ self.configurations[key] = configuration
+ finally:
+ self.lock.release()
+
+class _SolutionConfiguration(object):
+
+ def __init__(self, name, platform):
+
+ self.name = name
+ self.platform = platform
+ self.projectConfigurations = []
+
+ def addProjectConfiguration(self, configuration):
+
+ self.projectConfigurations.append(configuration)
+
+class _SolutionProjectConfiguration(object):
+
+ def __init__(self, name, platform, path, build):
+
+ self.name = name
+ self.platform = platform
+ self.path = path
+ self.build = build
+
+class _ProjectRegistry(object):
+
+ def __init__(self):
+
+ self.projects = {}
+ self.lock = threading.Lock()
+
+ def getProject(self, path, filtersPath, name, version):
+
+ key = os.path.normpath(os.path.normcase(path))
+ self.lock.acquire()
+ try:
+ project = self.projects.get(key, None)
+ if project is None:
+ project = _Project(path, filtersPath, name, version)
+ self.projects[key] = project
+ return project
+ finally:
+ self.lock.release()
+
+ def getProjectByPath(self, path):
+
+ key = os.path.normpath(os.path.normcase(path))
+ return self.projects.get(key, None)
+
+class _SolutionRegistry(object):
+
+ def __init__(self):
+
+ self.solutions = {}
+ self.lock = threading.Lock()
+
+ def getSolution(self, path, version):
+
+ key = os.path.normpath(os.path.normcase(path))
+ self.lock.acquire()
+ try:
+ solution = self.solutions.get(key, None)
+ if solution is None:
+ solution = _Solution(path, version)
+ self.solutions[key] = solution
+ return solution
+ finally:
+ self.lock.release()
+
+class ProjectToolTarget(FileTarget):
+ """A target returned by the ProjectTool.
+
+ @ivar tool: The tool instance that generated this file target.
+ @type tool: L{Tool}
+ """
+ def __init__(self, path, task, tool):
+ FileTarget.__init__(self, path, task)
+ self.tool = tool
+
+class ProjectTarget(ProjectToolTarget):
+ """A project target.
+
+ @ivar project: The project file target.
+ @type project: L{FileTarget}
+ @ivar filters: An optional file target for the associated '.filters' file.
+ @type filters: L{FileTarget}
+ """
+ def __init__(self, path, task, tool, filters):
+ ProjectToolTarget.__init__(self, path, task, tool)
+ self.project = FileTarget(path, task)
+ if filters is None:
+ self.filters = None
+ else:
+ self.filters = FileTarget(filters, task)
+
+class SolutionTarget(ProjectToolTarget):
+ """A solution target.
+
+ @ivar solution: The solution file target.
+ @type solution: L{FileTarget}
+ """
+ def __init__(self, path, task, tool):
+ ProjectToolTarget.__init__(self, path, task, tool)
+ self.solution = FileTarget(path, task)
+
+class ProjectTool(Tool):
+ """Tool that provides project/solution generation capabilities.
+ """
+
+ projectConfigName = None
+ """The project config name.
+
+ This should be set to a string that uniquely identifies the project
+ configuration, eg. 'Windows (x86) Debug (msvc)' or
+ 'PS3 (spu) Release (gcc)'.
+ """
+ projectPlatformName = None
+ """The project platform name.
+
+ For Visual Studio this should be set to one of 'Win32', 'Xbox'
+ or 'Xbox 360' depending on the platform you are compiling for.
+ """
+ solutionConfigName = None
+ """The solution config name.
+
+ This should be set to a string that identifies the solution
+ configuration, eg. 'Debug' or 'Release'.
+ """
+ solutionPlatformName = None
+ """The solution platform name.
+
+ This should be set to a string that identifies the solution
+ platform, eg. 'Windows Msvc (x86)' or 'PS3 Gcc (spu)'.
+ """
+
+ VS2002 = 0
+ """Visual Studio .NET 2002
+ """
+ VS2003 = 1
+ """Visual Studio .NET 2003
+ """
+ VS2005 = 2
+ """Visual Studio 2005
+ """
+ VS2008 = 3
+ """Visual Studio 2008
+ """
+ VS2010 = 4
+ """Visual Studio 2010
+ """
+
+ product = VS2010
+ """The product to generate solutions and projects for.
+
+ Can be one of L{VS2002}, L{VS2003}, L{VS2005}, L{VS2008} or L{VS2010}.
+ @type: enum
+ """
+
+ class SolutionProjectItem(object):
+ """A class used to further define solution project items.
+
+ This class can be used to wrap solution project items to
+ further define their attributes such as::
+ project.solution(
+ projects = [
+ project.SolutionProjectItem(
+ "MyProject",
+ build=False, # This project won't build when the solution is built.
+ ),
+ ],
+ target="MySolution",
+ )
+ """
+
+ build = True
+ """Whether the project should be built as part of a solution build.
+ @type: bool
+ """
+
+ def __init__(self, project, **kwargs):
+ self.project = project
+ for k, v in kwargs.iteritems():
+ setattr(self, k, v)
+
+ _projects = _ProjectRegistry()
+ _solutions = _SolutionRegistry()
+
+ _msvsBuildLogSuffix = '.html'
+ _msvsBuildLogSuffix2010 = '.log'
+ _msvsProjectSuffix = '.vcproj'
+ _msvsProjectSuffix2010 = '.vcxproj'
+ _msvsFiltersSuffix2010 = '.filters'
+ _msvsSolutionSuffix = '.sln'
+
+ _toProjectVersion = {
+ VS2002 : "7.00",
+ VS2003 : "7.10",
+ VS2005 : "8.00",
+ VS2008 : "9.00",
+ VS2010 : "4.0", # MSBuild script
+ }
+
+ _toSolutionVersion = {
+ VS2002 : '7.00',
+ VS2003 : '8.00',
+ VS2005 : '9.00',
+ VS2008 : '10.00',
+ VS2010 : '11.00',
+ }
+
+ def __init__(self, configuration):
+ Tool.__init__(self, configuration)
+
+ def _getProjectConfigName(self):
+
+ configName = self.projectConfigName
+ if configName is None:
+ keywords = Script.getCurrent().variant.keywords
+ configName = " ".join(keywords.values())
+ return configName
+
+ def _getProjectPlatformName(self):
+
+ platformName = self.projectPlatformName
+ if platformName is None:
+ platformName = "Win32"
+ return platformName
+
+ def _getSolutionConfigName(self):
+
+ configName = self.solutionConfigName
+ if configName is None:
+ keywords = Script.getCurrent().variant.keywords
+ configName = " ".join(keywords.values())
+ return configName
+
+ def _getSolutionPlatformName(self):
+
+ platformName = self.solutionPlatformName
+ if platformName is None:
+ platformName = "Win32"
+ return platformName
+
+ def project(
+ self,
+ target,
+ items=None,
+ output=None,
+ name=None,
+ intermediateDir=None,
+ buildLog=None,
+ compiler=None,
+ **kwargs
+ ):
+ """Generate a project file.
+
+ @param target: The path for the generated project file. If this path
+ doesn't have the correct suffix it will be appended automatically.
+ @type target: string
+ @param items: A list of strings or dict of string->(dict or list
+ of string) specifying the paths to project folders and their files.
+ Example::
+ items={
+ "Include":["vector.h", "integer.h"],
+ "Source":{
+ "PC":["vector_PC.cpp"],
+ "Wii":["vector_Wii.cpp"],
+ "":["integer.cpp"],
+ },
+ },
+
+ Result::
+ + Include
+ - vector.h
+ - integer.h
+ + Source
+ + PC
+ - vector_PC.cpp
+ + Wii
+ - vector_Wii.cpp
+ - integer.cpp
+
+ @type items: list/dict of string
+ @param output: The output file that this project generates.
+ This file will also be the executable used for debugging purposes
+ (if applicable).
+ @type output: L{CompilerTarget}
+ @param name: The name of the generated project. If this is None the
+ base filename of the target is used instead.
+ @type name: string
+ @param intermediateDir: The path to intermediate files. If this is
+ None the directory of the first output is used instead.
+ @type intermediateDir: string
+ @param buildLog: The path to the build log file that MSVS will
+ generate for each build. If this is None the name of the first output
+ is used instead.
+ @type buildLog: string
+ @param compiler: A compiler tool containing the compile settings
+ used for the aid of intellisense. If not supplied the compiler is
+ obtained implicitly via 'ouput.compiler'.
+ @type compiler: L{cake.library.compilers.Compiler} or C{None}
+
+ @return: A L{FileTarget} that specifies the full path to the
+ generated project file (with extension if applicable).
+ @rtype: L{FileTarget}
+ """
+ tool = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(tool, k, v)
+
+ basePath = self.configuration.basePath
+
+ target = basePath(target)
+ items = basePath(items)
+ output = basePath(output)
+ intermediateDir = basePath(intermediateDir)
+ buildLog = basePath(buildLog)
+
+ return tool._project(
+ target,
+ items,
+ output,
+ name,
+ intermediateDir,
+ buildLog,
+ compiler,
+ )
+
+ def _project(
+ self,
+ target,
+ items=None,
+ output=None,
+ name=None,
+ intermediateDir=None,
+ buildLog=None,
+ compiler=None,
+ ):
+
+ # Project name defaults the base filename without extension
+ if items is None:
+ items = []
+
+ if name is None:
+ name = cake.path.baseNameWithoutExtension(target)
+
+ if self.product == self.VS2010:
+ target = cake.path.forceExtension(target, self._msvsProjectSuffix2010)
+ filters = cake.path.forceExtension(target, self._msvsFiltersSuffix2010)
+ else:
+ target = cake.path.forceExtension(target, self._msvsProjectSuffix)
+ filters = None
+
+ @waitForAsyncResult
+ def run(output, items, intermediateDir=intermediateDir, buildLog=buildLog, compiler=compiler):
+ if compiler is None and output is not None:
+ try:
+ compiler = output.compiler
+ except AttributeError:
+ pass
+
+ if output is not None:
+ outputPath = output.path
+ else:
+ outputPath = target
+
+ if compiler is not None:
+ defines = compiler.getDefines()
+ includePaths = compiler.getIncludePaths()
+ forcedIncludes = compiler.getForcedIncludes()
+ forcedUsings = getPaths(getattr(compiler, "forcedUsings", []))
+ else:
+ defines = []
+ includePaths = []
+ forcedIncludes = []
+ forcedUsings = []
+
+ # TODO: Fill these out when the compiler has them.
+ compileAsManaged = ""
+ assemblyPaths = []
+
+ # Intermediate dir defaults to the output dir
+ if intermediateDir is None:
+ intermediateDir = cake.path.dirName(outputPath)
+
+ # Build log defaults to the output path
+ if self.product == self.VS2010:
+ if buildLog is None:
+ buildLog = cake.path.stripExtension(outputPath)
+ buildLog = cake.path.forceExtension(buildLog, self._msvsBuildLogSuffix2010)
+ else:
+ if buildLog is None:
+ buildLog = cake.path.stripExtension(outputPath) + ".buildlog"
+ buildLog = cake.path.forceExtension(buildLog, self._msvsBuildLogSuffix)
+
+ script = Script.getCurrent()
+ configuration = script.configuration
+ configName = self._getProjectConfigName()
+ platformName = self._getProjectPlatformName()
+
+ # Construct the build args
+ targetDir = configuration.abspath(cake.path.dirName(target))
+ pythonExe = cake.path.absPath(sys.executable, self.engine.oscwd)
+ cakeScript = cake.path.absPath(sys.argv[0], self.engine.oscwd)
+ scriptPath = configuration.abspath(script.path)
+ keywords = script.variant.keywords
+
+ # It's possible these files were passed relative to some arbitrary
+ # directory so make sure they exist.
+ if not cake.path.isFile(pythonExe):
+ raise EnvironmentError("Could not find Python executable at: '%s'" % pythonExe)
+ if not cake.path.isFile(cakeScript):
+ raise EnvironmentError("Could not find Cake script at: '%s'" % cakeScript)
+
+ buildArgs = [
+ cake.path.relativePath(pythonExe, targetDir),
+ "-u",
+ cake.path.relativePath(cakeScript, targetDir),
+ cake.path.relativePath(scriptPath, targetDir),
+ ]
+ buildArgs.extend("=".join([k, v]) for k, v in keywords.iteritems())
+
+ try:
+ version = self._toProjectVersion[self.product]
+ except KeyError:
+ raise ValueError("Unknown product: '%d'" % self.product)
+
+ project = self._projects.getProject(target, filters, name, version)
+ project.addConfiguration(_ProjectConfiguration(
+ configName,
+ platformName,
+ items,
+ buildArgs,
+ outputPath,
+ intermediateDir,
+ buildLog,
+ defines,
+ includePaths,
+ assemblyPaths,
+ forcedIncludes,
+ forcedUsings,
+ compileAsManaged,
+ ))
+
+ if self.enabled:
+ run(output, items)
+
+ return ProjectTarget(path=target, task=None, tool=self, filters=filters)
+
+ def solution(self, target, projects, **kwargs):
+ """Generate a solution file.
+
+ @param target: The path for the generated solution file. If this path
+ doesn't have the correct suffix it will be appended automatically.
+ @type target: string
+ @param projects: A list of projects to include in the solution. If
+ any of the projects listed don't have the correct suffix it will be
+ appended automatically.
+ @type projects: list of string
+ """
+ tool = self.clone()
+ for k, v in kwargs.iteritems():
+ setattr(tool, k, v)
+
+ basePath = self.configuration.basePath
+
+ target = basePath(target)
+ projects = basePath(projects)
+
+ return tool._solution(target, projects)
+
+ def _solution(self, target, projects):
+
+ # Obtain these now because they may rely on the value of Script.getCurrent()
+ configName = self._getSolutionConfigName()
+ platformName = self._getSolutionPlatformName()
+ projectConfigName = self._getProjectConfigName()
+ projectPlatformName = self._getProjectPlatformName()
+
+ @waitForAsyncResult
+ def run(target, projects):
+ target = cake.path.forceExtension(target, self._msvsSolutionSuffix)
+
+ if not self.enabled:
+ return FileTarget(path=target, task=None)
+
+ try:
+ version = self._toSolutionVersion[self.product]
+ except KeyError:
+ raise ValueError("Unknown product: '%d'" % self.product)
+
+ solution = self._solutions.getSolution(target, version)
+ configuration = _SolutionConfiguration(
+ configName,
+ platformName,
+ )
+ solution.addConfiguration(configuration)
+
+ if self.product == self.VS2010:
+ projectExtension = self._msvsProjectSuffix2010
+ else:
+ projectExtension = self._msvsProjectSuffix
+
+ for p in projects:
+ while isinstance(p, AsyncResult):
+ p = p.result
+
+ if not isinstance(p, self.SolutionProjectItem):
+ p = self.SolutionProjectItem(p)
+
+ projectPath = getPath(p.project)
+ projectPath = cake.path.forceExtension(projectPath, projectExtension)
+
+ configuration.addProjectConfiguration(_SolutionProjectConfiguration(
+ projectConfigName,
+ projectPlatformName,
+ projectPath,
+ p.build,
+ ))
+
+ return SolutionTarget(path=target, task=None, tool=self)
+
+ return run(target, flatten(projects))
+
+ def build(self):
+ """Build project and solution files.
+
+ This function will actually write the project and solution files,
+ provided the files on disk are different to the files being written.
+ If the engine.forceBuild flag is set to True the files will be written
+ regardless of any differences.
+
+ @param configuration: The configuration to resolve paths with.
+ @type configuration: L{cake.engine.Configuration}
+ """
+ if not self.enabled:
+ return
+
+ # Generate solutions first as they will attempt to reload in Visual
+ # studio and automatically reload all changed projects too. This
+ # saves having to click reload on every project change (most of
+ # the time).
+ for solution in self._solutions.solutions.values():
+ generator = MsvsSolutionGenerator(self.configuration, solution, self._projects)
+ generator.build()
+
+ for project in self._projects.projects.values():
+ if project.version == '4.0':
+ generator = MsBuildProjectGenerator(self.configuration, project)
+ generator.build()
+ generator = MsBuildFiltersGenerator(self.configuration, project)
+ generator.build()
+ else:
+ generator = MsvsProjectGenerator(self.configuration, project)
+ generator.build()
+
+def escapeAttr(value):
+ """Utility function for escaping xml attribute values.
+
+ @param value: The string to XML attribute escape.
+
+ @return: The escaped XML attribute string.
+ """
+ value = value.replace("&", "&")
+ value = value.replace("'", "'")
+ value = value.replace('"', """)
+ return value
+
+def generateGuid(filePath):
+ """This generates a dummy GUID for the sln/vcproj file to use.
+ It is based on the MD5 signatures of the sln filename plus the name of
+ the project. It basically just needs to be unique, and not
+ change with each invocation.
+ """
+ sig = cake.hash.md5(os.path.normpath(os.path.normcase(filePath))).hexdigest().upper()
+ # convert most of the signature to GUID form (discard the rest)
+ guid = "{%s-%s-%s-%s-%s}" % (
+ sig[:8], sig[8:12], sig[12:16], sig[16:20], sig[20:32],
+ )
+ return guid
+
+def convertToProjectItems(configuration, srcfiles, projectDir):
+ """Convert the dictionary-based datastructure for defining project items
+ and filters into ProjectItem objects.
+
+ @param srcfiles: A dictionary mapping filter names to either a list of
+ files or to a similar dictionary. An empty sub-item name in the dictionary
+ indicates that the sub-item list should be added to the parent's sub-items.
+ eg. Passing this structure::
+ {'Sources' :
+ {'Private' : ['fooimpl.cpp', 'barimpl.cpp'],
+ '' : ['foo.cpp'],
+ },
+ 'Headers' : ['foo.h'],
+ '' : ['source.cake'],
+ }
+
+ will return this hierarchy of items::
+ + ProjectFilterItem('Sources')
+ | + ProjectFilterItem('Private')
+ | | + ProjectFileItem('fooimpl.cpp')
+ | | + ProjectFileItem('barimpl.cpp')
+ | + ProjectFileItem('foo.cpp')
+ + ProjectFilterItem('Headers')
+ | + ProjectFileItem('foo.h')
+ + ProjectFileItem('source.cake')
+
+ @param projectDir: The path of the directory containing the project file.
+ All paths to file items are output relative to this directory.
+
+ @return: A list of top-level ProjectItem objects to place in the project.
+ """
+
+ results = []
+ abspath = configuration.abspath
+ if isinstance(srcfiles, dict):
+ for name in srcfiles:
+ subItems = convertToProjectItems(configuration, srcfiles[name], projectDir)
+ if name:
+ filterNode = ProjectFilterItem(name)
+ filterNode.addSubItems(subItems)
+ results.append(filterNode)
+ else:
+ results.extend(subItems)
+ elif isinstance(srcfiles, (list, set, tuple)):
+ for srcfile in flatten(srcfiles):
+ filePath = getPath(srcfile)
+ relPath = cake.path.relativePath(abspath(filePath), abspath(projectDir))
+ fileItem = ProjectFileItem(relPath)
+ results.append(fileItem)
+ else:
+ raise ValueError(
+ "Expected dictionary or list for 'srcfiles' value."
+ )
+ return results
+
+class ProjectItem(object):
+ """I am an item in the project.
+ """
+ __slots__ = ['name']
+
+ kind = 'unknown'
+
+ def __init__(self, name):
+ self.name = name
+
+class ProjectFilterItem(ProjectItem):
+ """I am a filter item in the project.
+
+ Filters are like folders and can contain other sub-items.
+ """
+
+ __slots__ = ['subItems']
+
+ kind = 'filter'
+
+ def __init__(self, name):
+ ProjectItem.__init__(self, name)
+ self.subItems = []
+
+ def addSubItems(self, subItems):
+ """Add a sequence of sub-items to this filter.
+ """
+ self.subItems.extend(subItems)
+
+class ProjectFileItem(ProjectItem):
+ """I am a file item in the project.
+
+ File items represent files in the project. They are typically leaf nodes
+ in the solution explorer hierarchy.
+ """
+
+ __slots__ = ['filePath']
+
+ kind = 'file'
+
+ def __init__(self, filePath):
+ """Construct a file-item in the project.
+
+ @param filePath: Path of the file relative to the project file.
+ """
+ ProjectItem.__init__(self, os.path.basename(filePath))
+ self.filePath = filePath
+
+def _writeIt(generator, target):
+ """Write the contents of a project or solution file only if the target
+ file is out of date.
+ """
+ configuration = generator.configuration
+ engine = configuration.engine
+ type = generator.type
+
+ stream = StringIO.StringIO()
+ writer = codecs.getwriter(generator.encoding)(stream)
+ try:
+ generator._writeContents(writer)
+ except:
+ writer.close()
+ raise
+ newFileContents = stream.getvalue()
+ writer.close()
+
+ # Check the existing dependency info file
+ buildArgs = []
+ _, reasonToBuild = configuration.checkDependencyInfo(target, buildArgs)
+
+ absTarget = configuration.abspath(target)
+ if reasonToBuild is None:
+ # Compare new file contents against existing file
+ existingFileContents = None
+ try:
+ existingFileContents = cake.filesys.readFile(absTarget)
+ if newFileContents != existingFileContents:
+ reasonToBuild = "it has been changed"
+ except EnvironmentError:
+ reasonToBuild = "it doesn't exist"
+
+ if reasonToBuild is not None:
+ engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '" + target + "' because " + reasonToBuild + ".\n",
+ )
+ engine.logger.outputInfo("Generating %s %s\n" % (type, target))
+ cake.filesys.writeFile(absTarget, newFileContents)
+
+ # Now that the file has been written successfully, save the new dependency file
+ newDependencyInfo = configuration.createDependencyInfo(
+ targets=[target],
+ args=buildArgs,
+ dependencies=[],
+ )
+ configuration.storeDependencyInfo(newDependencyInfo)
+ else:
+ engine.logger.outputDebug(
+ "project",
+ "Skipping Identical %s %s\n" % (type, target),
+ )
+
+_msvsProjectHeader = """\
+<?xml version="1.0" encoding="%(encoding)s"?>
+<VisualStudioProject
+\tProjectType="Visual C++"
+\tVersion="%(version)s"
+\tName="%(name)s"
+\tProjectGUID="%(guid)s"
+%(scc_attrs)s
+\tKeyword="MakeFileProj"
+\t>
+"""
+
+_msvsProjectTailer = """\
+</VisualStudioProject>
+"""
+
+_msvsProjectConfigurationHeader = """\
+\t\t<Configuration
+\t\t\tName="%(name)s"
+\t\t\tOutputDirectory="%(outdir)s"
+\t\t\tIntermediateDirectory="%(intdir)s"
+\t\t\tConfigurationType="0"
+\t\t\tUseOfMFC="0"
+\t\t\tATLMinimizesCRunTimeLibraryUsage="FALSE"
+\t\t\tBuildLogFile="%(buildlog)s"
+\t\t\t>
+"""
+
+_msvsProjectConfigurationMakeTool = """\
+\t\t\t<Tool
+\t\t\t\tName="VCNMakeTool"
+\t\t\t\tBuildCommandLine="%(buildcmd)s"
+\t\t\t\tReBuildCommandLine="%(rebuildcmd)s"
+\t\t\t\tCleanCommandLine="%(cleancmd)s"
+\t\t\t\tOutput="%(runfile)s"
+\t\t\t\tPreprocessorDefinitions="%(defines)s"
+\t\t\t\tIncludeSearchPath="%(includes)s"
+\t\t\t\tForcedIncludes="%(forcedinc)s"
+\t\t\t\tAssemblySearchPath="%(asspath)s"
+\t\t\t\tForcedUsingAssemblies="%(forceduse)s"
+\t\t\t\tCompileAsManaged="%(compmanag)s"
+\t\t\t/>
+"""
+
+_msvsProjectConfigurationXboxDeploymentTool = """\
+\t\t\t<Tool
+\t\t\t\tName="VCX360DeploymentTool"
+\t\t\t\tRemoteRoot="devkit:\$(ProjectName)"
+\t\t\t\tDeploymentType="0"
+\t\t\t/>
+"""
+
+_msvsProjectConfigurationTailer = """\
+\t\t</Configuration>
+"""
+
+class MsvsProjectGenerator(object):
+ """I am a class that is able to generate a single .vcproj file from
+ its configuration information.
+ """
+
+ # Default member values
+ encoding = 'utf-8'
+ type = "Project"
+
+ def __init__(self, configuration, project):
+ """Construct a new project generator instance.
+
+ @param project: A Project object containing all info required for the project.
+ """
+ self.configuration = configuration
+ self.project = project
+ self.projectName = project.name
+ self.projectDir = project.dir
+ self.projectFilePath = project.path
+ self.version = project.version
+ self.configs = project.configurations.values()
+ self.sccProvider = project.sccProvider
+
+ if project.sccProjectName is None:
+ self.sccProjectName = self.projectName
+ else:
+ self.sccProjectName = str(project.sccProjectName)
+
+ if project.sccAuxPath is None:
+ self.sccAuxPath = ""
+ else:
+ self.sccAuxPath = str(project.sccAuxPath)
+
+ if project.sccLocalPath is None:
+ self.sccLocalPath = "."
+ else:
+ self.sccLocalPath = str(project.sccLocalPath)
+
+ # Get a unique set of platforms
+ self.platforms = list(frozenset(c.platform for c in self.configs))
+ self.platforms.sort()
+
+ def build(self):
+ """Create and write the .vcproj file.
+
+ Throws an exception if building the project file fails.
+ """
+ _writeIt(self, self.projectFilePath)
+
+ def getRelativePath(self, path):
+ """Return path relative to the project file.
+ """
+ abspath = self.configuration.abspath
+ return cake.path.relativePath(abspath(path), abspath(self.projectDir))
+
+ def _writeContents(self, writer):
+ """Write the project to the currently open file.
+ """
+ self._writeProjectHeader(writer)
+ self._writePlatforms(writer)
+ self._writeConfigurations(writer)
+ self._writeFiles(writer)
+ self._writeProjectTailer(writer)
+
+ def _writeProjectHeader(self, writer):
+ """Write the project header section to the currently open file.
+
+ This should be written at the start of the file.
+ """
+
+ guid = self.project.externalGuid
+
+ if self.sccProvider:
+ scc_attrs = ('\tSccProjectName="%(name)s"\n'
+ '\tSccProvider="%(provider)s"\n'
+ '\tSccAuxPath="%(auxpath)s"\n'
+ '\tSccLocalPath="%(localpath)s"' %
+ {'name' : escapeAttr(self.sccProjectName),
+ 'provider' : escapeAttr(self.sccProvider),
+ 'auxpath' : escapeAttr(self.sccAuxPath),
+ 'localpath' : escapeAttr(self.sccLocalPath),
+ })
+ else:
+ scc_attrs = ""
+
+ writer.write(_msvsProjectHeader % {
+ 'encoding' : escapeAttr(self.encoding),
+ 'version' : escapeAttr(self.version),
+ 'name' : escapeAttr(self.projectName),
+ 'guid' : escapeAttr(guid),
+ 'scc_attrs' : scc_attrs,
+ })
+
+ def _writeProjectTailer(self, writer):
+ """Write the project tailer to the file.
+
+ This should be the last content written to the file as it closes off
+ datastructures written by the header.
+ """
+ writer.write(_msvsProjectTailer)
+
+ def _writePlatforms(self, writer):
+ """Write the section that declares all of the platforms supported by this
+ project.
+ """
+ writer.write("\t<Platforms>\n")
+ for platform in self.platforms:
+ writer.write('\t\t<Platform\n')
+ writer.write('\t\t\tName="%s"\n' % escapeAttr(platform))
+ writer.write('\t\t/>\n')
+ writer.write("\t</Platforms>\n")
+
+ def _writeConfigurations(self, writer):
+ """Write the section that declares all of the configurations supported by
+ this project.
+ """
+ writer.write("\t<Configurations>\n")
+ for config in self.configs:
+ self._writeConfiguration(writer, config)
+ writer.write("\t</Configurations>\n")
+
+ def _writeConfiguration(self, writer, config):
+ """Write a section that declares an individual build configuration.
+ """
+ outdir = self.getRelativePath(os.path.dirname(config.output))
+ intdir = self.getRelativePath(config.intermediateDir)
+ runfile = self.getRelativePath(config.output)
+ buildlog = self.getRelativePath(config.buildLog)
+
+ includePaths = [self.getRelativePath(p) for p in config.includePaths]
+ assemblyPaths = [self.getRelativePath(p) for p in config.assemblyPaths]
+ forcedIncludes = [self.getRelativePath(p) for p in config.forcedIncludes]
+
+ includePaths = ';'.join(includePaths)
+ assemblyPaths = ';'.join(assemblyPaths)
+ forcedIncludes = ';'.join(forcedIncludes)
+ forcedUsings = ';'.join(config.forcedUsings)
+ compileAsManaged = config.compileAsManaged
+
+ defines = ';'.join(config.defines)
+ name = "%s|%s" % (config.name, config.platform)
+
+ def escapeArg(arg):
+ if '"' in arg:
+ arg = arg.replace('"', '\\"')
+ if " " in arg:
+ arg = '"' + arg + '"'
+ return arg
+
+ def escapeArgs(args):
+ return [escapeArg(arg) for arg in args]
+
+ args = escapeArgs(list(config.buildArgs))
+
+ buildCmd = " ".join(args)
+ cleanCmd = "@"
+ rebuildCmd = buildCmd + " -f"
+
+ writer.write(_msvsProjectConfigurationHeader % {
+ 'name' : escapeAttr(name),
+ 'outdir' : escapeAttr(outdir),
+ 'intdir' : escapeAttr(intdir),
+ 'buildlog' : escapeAttr(buildlog),
+ })
+
+ writer.write(_msvsProjectConfigurationMakeTool % {
+ 'buildcmd' : escapeAttr(buildCmd),
+ 'rebuildcmd' : escapeAttr(rebuildCmd),
+ 'cleancmd' : escapeAttr(cleanCmd),
+ 'runfile' : escapeAttr(runfile),
+ 'defines' : escapeAttr(defines),
+ 'includes' : escapeAttr(includePaths),
+ 'forcedinc' : escapeAttr(forcedIncludes),
+ 'asspath' : escapeAttr(assemblyPaths),
+ 'forceduse' : escapeAttr(forcedUsings),
+ 'compmanag' : escapeAttr(compileAsManaged),
+ })
+
+ if config.name.endswith("|Xbox 360"):
+ writer.write(_msvsProjectConfigurationXboxDeploymentTool)
+
+ writer.write(_msvsProjectConfigurationTailer)
+
+ def _writeFiles(self, writer):
+
+ configItems = {}
+ for config in self.configs:
+ configItems[config] = convertToProjectItems(
+ self.configuration,
+ config.items,
+ self.projectDir,
+ )
+
+ writer.write("\t<Files>\n")
+ self._writeSubItems(writer, configItems, indent='\t\t')
+ writer.write("\t</Files>\n")
+
+ def _writeSubItems(self, writer, configItems, indent):
+ """Recursively write out all of the subitems.
+
+ configItems - A dictionary mapping from the ConfigurationNode
+ to a list of the items to write for that configuration.
+ """
+
+ mergedFileItemConfigs = {}
+ mergedFilterSubItems = {}
+
+ # Merge the subitems from each config
+ for config, items in configItems.iteritems():
+ for item in items:
+ if item.kind == 'filter':
+ filters = mergedFilterSubItems.setdefault(item.name, {})
+ filters[config] = item.subItems
+ elif item.kind == 'file':
+ configs = mergedFileItemConfigs.setdefault(item.filePath, [])
+ configs.append(config)
+
+ # Write out all of the <Filter> subitems first.
+ filterNames = mergedFilterSubItems.keys()
+ filterNames.sort()
+ for name in filterNames:
+ writer.write('%s<Filter\n' % indent)
+ writer.write('%s\tName="%s"\n' % (indent, escapeAttr(name)))
+ writer.write('%s\t>' % indent)
+
+ # Recurse on each filter's subitems
+ filterSubItems = mergedFilterSubItems[name]
+ self._writeSubItems(filterSubItems, indent + '\t')
+
+ writer.write('%s</Filter>\n' % indent)
+
+ # Write out all of the <File> subitems
+ filePaths = mergedFileItemConfigs.keys()
+ filePaths.sort()
+ for path in filePaths:
+ configs = mergedFileItemConfigs[path]
+ writer.write('%s<File\n' % indent)
+ writer.write('%s\tRelativePath="%s"\n' % (indent, escapeAttr(path)))
+ writer.write('%s\t>\n' % indent)
+
+ for config in self.configs:
+ writer.write('%s\t<FileConfiguration\n' % indent)
+ writer.write('%s\t\tName="%s"\n' % (
+ indent,
+ escapeAttr(config.name),
+ ))
+
+ # Exclude from build if file not present in this config
+ if config not in configs:
+ writer.write('%s\t\tExcludedFromBuild="true"\n' % indent)
+
+ writer.write('%s\t\t>\n' % indent)
+ writer.write('%s\t\t<Tool\n' % indent)
+ writer.write('%s\t\t\tName="VCNMakeTool"\n' % indent)
+ writer.write('%s\t\t/>\n' % indent)
+ writer.write('%s\t</FileConfiguration>\n' % indent)
+
+ writer.write('%s</File>\n' % indent)
+
+_msbuildProjectHeader = """\
+<?xml version="1.0" encoding="%(encoding)s"?>
+<Project\
+ DefaultTargets="Build"\
+ ToolsVersion="%(version)s"\
+ xmlns="http://schemas.microsoft.com/developer/msbuild/2003"\
+>
+"""
+_msbuildProjectTailer = """\
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project>
+"""
+
+_msbuildProjectConfiguration = """\
+ <ProjectConfiguration Include="%(name)s|%(platform)s">
+ <Configuration>%(name)s</Configuration>
+ <Platform>%(platform)s</Platform>
+ </ProjectConfiguration>
+"""
+
+_msbuildGlobals = """\
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>%(guid)s</ProjectGuid>
+ <Keyword>MakeFileProj</Keyword>
+ </PropertyGroup>
+"""
+
+_msbuildConfigurationTypesHeader = """\
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+"""
+
+_msbuildConfigurationTypesTailer = """\
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+"""
+
+_msbuildConfigurationType = """\
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='%(name)s|%(platform)s'" Label="Configuration">
+ <ConfigurationType>Makefile</ConfigurationType>
+ <UseDebugLibraries>false</UseDebugLibraries>
+ <OutDir>%(outdir)s</OutDir>
+ <IntDir>%(intdir)s</IntDir>
+ </PropertyGroup>
+"""
+
+_msbuildConfigurationPropertySheet = """\
+ <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='%(name)s|%(platform)s'">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+"""
+
+_msbuildConfiguration = """\
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='%(name)s|%(platform)s'">
+ <NMakeBuildCommandLine>%(buildcmd)s</NMakeBuildCommandLine>
+ <NMakeOutput>%(output)s</NMakeOutput>
+ <NMakeCleanCommandLine>%(cleancmd)s</NMakeCleanCommandLine>
+ <NMakeReBuildCommandLine>%(rebuildcmd)s</NMakeReBuildCommandLine>
+ <NMakePreprocessorDefinitions>%(defines)s</NMakePreprocessorDefinitions>
+ <NMakeIncludeSearchPath>%(includepaths)s</NMakeIncludeSearchPath>
+ <NMakeForcedIncludes>%(forcedincludes)s</NMakeForcedIncludes>
+ <NMakeAssemblySearchPath>%(assemblypaths)s</NMakeAssemblySearchPath>
+ <NMakeForcedUsingAssemblies>%(forcedusings)s</NMakeForcedUsingAssemblies>
+ </PropertyGroup>
+"""
+
+_msbuildLog = """\
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='%(name)s|%(platform)s'">
+ <BuildLog>
+ <Path>%(buildlog)s</Path>
+ </BuildLog>
+ </ItemDefinitionGroup>
+"""
+
+_msbuildExcludedFile = """\
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='%(config)s|%(platform)s'">true</ExcludedFromBuild>
+"""
+
+class MsBuildProjectGenerator(object):
+ """I am a class that is able to generate a single .vcproj file from
+ its configuration information.
+ """
+
+ # Default member values
+ encoding = 'utf-8'
+ type = "Project"
+
+ def __init__(self, configuration, project):
+ """Construct a new project generator instance.
+
+ @param project: A Project object containing all info required for the project.
+ """
+ self.configuration = configuration
+ self.project = project
+ self.projectName = project.name
+ self.projectDir = project.dir
+ self.projectFilePath = project.path
+ self.version = project.version
+ self.configs = project.configurations.values()
+
+ def build(self):
+ """Create and write the .vcproj file.
+
+ Throws an exception if building the project file fails.
+ """
+ _writeIt(self, self.projectFilePath)
+
+ def getRelativePath(self, path):
+ """Return path relative to the project file.
+ """
+ abspath = self.configuration.abspath
+ return cake.path.relativePath(abspath(path), abspath(self.projectDir))
+
+ def _writeContents(self, writer):
+ """Write the project to the currently open file.
+ """
+ self._writeProjectHeader(writer)
+ self._writeProjectConfigurations(writer)
+ self._writeGlobals(writer)
+ self._writeConfigurationTypes(writer)
+ self._writeConfigurations(writer)
+ self._writeBuildLogs(writer)
+ self._writeFiles(writer)
+ self._writeProjectTailer(writer)
+
+ def _writeProjectHeader(self, writer):
+ """Write the project header section to the currently open file.
+
+ This should be written at the start of the file.
+ """
+ writer.write(_msbuildProjectHeader % {
+ "encoding" : escapeAttr(self.encoding),
+ "version" : escapeAttr(self.version),
+ })
+
+ def _writeProjectTailer(self, writer):
+ """Write the project tailer to the file.
+
+ This should be the last content written to the file as it closes off
+ datastructures written by the header.
+ """
+ writer.write(_msbuildProjectTailer)
+
+ def _writeProjectConfigurations(self, writer):
+ """Write the section that declares all of the configurations supported by
+ this project.
+ """
+ writer.write(' <ItemGroup Label="ProjectConfigurations">\n')
+ for config in self.configs:
+ self._writeProjectConfiguration(writer, config)
+ writer.write(" </ItemGroup>\n")
+
+ def _writeProjectConfiguration(self, writer, config):
+ """Write a section that declares an individual build configuration.
+ """
+ writer.write(_msbuildProjectConfiguration % {
+ "name" : escapeAttr(config.name),
+ "platform" : escapeAttr(config.platform),
+ })
+
+ def _writeGlobals(self, writer):
+ """Write a section that declares globals.
+ """
+ guid = self.project.externalGuid
+
+ writer.write(_msbuildGlobals % {
+ "guid" : escapeAttr(guid),
+ })
+
+ def _writeConfigurationTypes(self, writer):
+ """Write the section that declares all of the configurations supported by
+ this project.
+ """
+ writer.write(_msbuildConfigurationTypesHeader)
+ for config in self.configs:
+ self._writeConfigurationType(writer, config)
+ writer.write(_msbuildConfigurationTypesTailer)
+ for config in self.configs:
+ self._writeConfigurationPropertySheet(writer, config)
+ writer.write(' <PropertyGroup Label="UserMacros" />\n')
+
+ def _writeConfigurationType(self, writer, config):
+ """Write a section that declares an individual build configuration.
+ """
+ outdir = self.getRelativePath(os.path.dirname(config.output))
+ intdir = self.getRelativePath(config.intermediateDir)
+
+ writer.write(_msbuildConfigurationType % {
+ "name" : escapeAttr(config.name),
+ "platform" : escapeAttr(config.platform),
+ "outdir" : escapeAttr(outdir),
+ "intdir" : escapeAttr(intdir),
+ })
+
+ def _writeConfigurationPropertySheet(self, writer, config):
+ """Write a section that declares an individual build configuration.
+ """
+ writer.write(_msbuildConfigurationPropertySheet % {
+ "name" : escapeAttr(config.name),
+ "platform" : escapeAttr(config.platform),
+ })
+
+ def _writeConfigurations(self, writer):
+ """Write the section that declares all of the configurations supported by
+ this project.
+ """
+ for config in self.configs:
+ self._writeConfiguration(writer, config)
+
+ def _writeConfiguration(self, writer, config):
+ """Write a section that declares an individual build configuration.
+ """
+ output = self.getRelativePath(config.output)
+
+ includePaths = [self.getRelativePath(p) for p in config.includePaths]
+ assemblyPaths = [self.getRelativePath(p) for p in config.assemblyPaths]
+
+ includePaths = ';'.join(includePaths + ['$(NMakeIncludeSearchPath)'])
+ assemblyPaths = ';'.join(assemblyPaths + ['$(NMakeAssemblySearchPath)'])
+ forcedIncludes = ';'.join(itertools.chain(config.forcedIncludes, ['$(NMakeForcedIncludes)']))
+ forcedUsings = ';'.join(itertools.chain(config.forcedUsings, ['$(NMakeForcedUsingAssemblies)']))
+ defines = ';'.join(itertools.chain(config.defines, ['$(NMakePreprocessorDefinitions)']))
+
+ def escapeArg(arg):
+ if '"' in arg:
+ arg = arg.replace('"', '\\"')
+ if " " in arg:
+ arg = '"' + arg + '"'
+ return arg
+
+ def escapeArgs(args):
+ return [escapeArg(arg) for arg in args]
+
+ args = escapeArgs(list(config.buildArgs))
+
+ buildCmd = " ".join(args)
+ cleanCmd = "@"
+ rebuildCmd = buildCmd + " -f"
+
+ writer.write(_msbuildConfiguration % {
+ "name" : escapeAttr(config.name),
+ "platform" : escapeAttr(config.platform),
+ "buildcmd" : escapeAttr(buildCmd),
+ "output" : escapeAttr(output),
+ "cleancmd" : escapeAttr(cleanCmd),
+ "rebuildcmd" : escapeAttr(rebuildCmd),
+ "defines" : escapeAttr(defines),
+ "includepaths" : escapeAttr(includePaths),
+ "forcedincludes" : escapeAttr(forcedIncludes),
+ "assemblypaths" : escapeAttr(assemblyPaths),
+ "forcedusings" : escapeAttr(forcedUsings),
+ })
+
+ def _writeBuildLogs(self, writer):
+ """Write the section that declares all of the configurations supported by
+ this project.
+ """
+ for config in self.configs:
+ self._writeBuildLog(writer, config)
+
+ def _writeBuildLog(self, writer, config):
+ """Write a section that declares an individual build configuration.
+ """
+ buildLog = self.getRelativePath(config.buildLog)
+
+ writer.write(_msbuildLog % {
+ "name" : escapeAttr(config.name),
+ "platform" : escapeAttr(config.platform),
+ "buildlog" : escapeAttr(buildLog),
+ })
+
+ def _writeFiles(self, writer):
+
+ configItems = {}
+ for config in self.configs:
+ configItems[config] = convertToProjectItems(
+ self.configuration,
+ config.items,
+ self.projectDir,
+ )
+
+ writer.write(' <ItemGroup>\n')
+ self._writeSubFiles(writer, configItems)
+ writer.write(' </ItemGroup>\n')
+
+ def _writeSubFiles(self, writer, configItems, parent=None):
+ """Recursively write out all of the subitems.
+
+ configItems - A dictionary mapping from the ConfigurationNode
+ to a list of the items to write for that configuration.
+ """
+ mergedFileItemConfigs = {}
+ mergedFilterSubItems = {}
+
+ # Merge the subitems from each config
+ for config, items in configItems.iteritems():
+ for item in items:
+ if item.kind == 'filter':
+ filters = mergedFilterSubItems.setdefault(item.name, {})
+ filters[config] = item.subItems
+ elif item.kind == 'file':
+ configs = mergedFileItemConfigs.setdefault(item.filePath, [])
+ configs.append(config)
+
+ # Write out all of the <Filter> subitems first.
+ filterNames = mergedFilterSubItems.keys()
+ filterNames.sort()
+ for name in filterNames:
+ if parent:
+ path = parent + "\\" + name
+ else:
+ path = name
+
+ # Recurse on each filter's subitems
+ filterSubItems = mergedFilterSubItems[name]
+ self._writeSubFiles(writer, filterSubItems, path)
+
+ # Write out all of the <File> subitems
+ filePaths = mergedFileItemConfigs.keys()
+ filePaths.sort()
+ for path in filePaths:
+ configs = mergedFileItemConfigs[path]
+
+ excluded = False
+ for config in self.configs:
+ if config not in configs:
+ excluded = True
+ break
+
+ if excluded:
+ writer.write(' <None Include="%(name)s">\n' % {
+ "name" : escapeAttr(path),
+ })
+ for config in self.configs:
+ if config not in configs:
+ writer.write(_msbuildExcludedFile % {
+ "config" : escapeAttr(config.name),
+ "platform" : escapeAttr(config.platform),
+ })
+ writer.write(' </None>\n')
+ else:
+ writer.write(' <None Include="%(name)s" />\n' % {
+ "name" : escapeAttr(path),
+ })
+
+_msbuildFiltersHeader = """\
+<?xml version="1.0" encoding="%(encoding)s"?>
+<Project\
+ ToolsVersion="%(version)s"\
+ xmlns="http://schemas.microsoft.com/developer/msbuild/2003"\
+>
+"""
+_msbuildFiltersTailer = """\
+</Project>
+"""
+
+_msbuildFolder = """\
+ <Filter Include="%(name)s">
+ <UniqueIdentifier>%(guid)s</UniqueIdentifier>
+ </Filter>
+"""
+
+_msbuildFile = """\
+ <None Include="%(name)s">
+ <Filter>%(filter)s</Filter>
+ </None>
+"""
+
+_msbuildFileNoFilter = """\
+ <None Include="%(name)s" />
+"""
+
+class MsBuildFiltersGenerator(object):
+ """I am a class that is able to generate a single .vcproj file from
+ its configuration information.
+ """
+
+ # Default member values
+ encoding = 'utf-8'
+ type = "Filters"
+
+ def __init__(self, configuration, project):
+ """Construct a new project generator instance.
+
+ @param project: A Project object containing all info required for the project.
+ """
+ self.configuration = configuration
+ self.project = project
+ self.projectName = project.name
+ self.projectDir = project.dir
+ self.projectFiltersPath = project.filtersPath
+ self.version = project.version
+ self.configs = project.configurations.values()
+
+ def build(self):
+ """Create and write the .vcproj file.
+
+ Throws an exception if building the project file fails.
+ """
+ _writeIt(self, self.projectFiltersPath)
+
+ def _writeContents(self, writer):
+ """Write the project to the currently open file.
+ """
+ self._writeFiltersHeader(writer)
+ self._writeFoldersAndFiles(writer)
+ self._writeFiltersTailer(writer)
+
+ def _writeFiltersHeader(self, writer):
+ """Write the project header section to the currently open file.
+
+ This should be written at the start of the file.
+ """
+ writer.write(_msbuildFiltersHeader % {
+ "encoding" : escapeAttr(self.encoding),
+ "version" : escapeAttr(self.version),
+ })
+
+ def _writeFiltersTailer(self, writer):
+ """Write the project tailer to the file.
+
+ This should be the last content written to the file as it closes off
+ datastructures written by the header.
+ """
+ writer.write(_msbuildFiltersTailer)
+
+ def _writeFoldersAndFiles(self, writer):
+
+ configItems = {}
+ for config in self.configs:
+ configItems[config] = convertToProjectItems(
+ self.configuration,
+ config.items,
+ self.projectDir,
+ )
+
+ writer.write(' <ItemGroup>\n')
+ self._writeSubFolders(writer, configItems)
+ writer.write(' </ItemGroup>\n')
+
+ writer.write(' <ItemGroup>\n')
+ self._writeSubFiles(writer, configItems)
+ writer.write(' </ItemGroup>\n')
+
+ def _writeSubFolders(self, writer, configItems, parent=None):
+ """Recursively write out all of the subitems.
+
+ configItems - A dictionary mapping from the ConfigurationNode
+ to a list of the items to write for that configuration.
+ """
+ mergedFileItemConfigs = {}
+ mergedFilterSubItems = {}
+
+ # Merge the subitems from each config
+ for config, items in configItems.iteritems():
+ for item in items:
+ if item.kind == 'filter':
+ filters = mergedFilterSubItems.setdefault(item.name, {})
+ filters[config] = item.subItems
+ elif item.kind == 'file':
+ configs = mergedFileItemConfigs.setdefault(item.filePath, [])
+ configs.append(config)
+
+ # Write out all of the <Filter> subitems first.
+ filterNames = mergedFilterSubItems.keys()
+ filterNames.sort()
+ for name in filterNames:
+ if parent:
+ path = parent + "\\" + name
+ else:
+ path = name
+ guid = generateGuid(path)
+
+ writer.write(_msbuildFolder % {
+ "name" : escapeAttr(path),
+ "guid" : escapeAttr(guid),
+ })
+
+ # Recurse on each filter's subitems
+ filterSubItems = mergedFilterSubItems[name]
+ self._writeSubFolders(writer, filterSubItems, path)
+
+ def _writeSubFiles(self, writer, configItems, parent=None):
+ """Recursively write out all of the subitems.
+
+ configItems - A dictionary mapping from the ConfigurationNode
+ to a list of the items to write for that configuration.
+ """
+ mergedFileItemConfigs = {}
+ mergedFilterSubItems = {}
+
+ # Merge the subitems from each config
+ for config, items in configItems.iteritems():
+ for item in items:
+ if item.kind == 'filter':
+ filters = mergedFilterSubItems.setdefault(item.name, {})
+ filters[config] = item.subItems
+ elif item.kind == 'file':
+ configs = mergedFileItemConfigs.setdefault(item.filePath, [])
+ configs.append(config)
+
+ # Write out all of the <Filter> subitems first.
+ filterNames = mergedFilterSubItems.keys()
+ filterNames.sort()
+ for name in filterNames:
+ if parent:
+ path = parent + "\\" + name
+ else:
+ path = name
+
+ # Recurse on each filter's subitems
+ filterSubItems = mergedFilterSubItems[name]
+ self._writeSubFiles(writer, filterSubItems, path)
+
+ # Write out all of the <File> subitems
+ filePaths = mergedFileItemConfigs.keys()
+ filePaths.sort()
+ for path in filePaths:
+ if parent:
+ writer.write(_msbuildFile % {
+ "name" : escapeAttr(path),
+ "filter" : escapeAttr(parent),
+ })
+ else:
+ writer.write(_msbuildFileNoFilter % {
+ "name" : escapeAttr(path),
+ })
+
+class MsvsSolutionGenerator(object):
+ """I am the class that does the actual writing of solution files.
+ """
+
+ # Default member values
+ file = None
+ encoding = 'utf-8'
+ type = "Solution"
+
+ def __init__(self, configuration, solution, registry):
+ """Construct a new solution file writer.
+
+ @param solution: The Solution object containing details of solution
+ file to build.
+
+ @param registry: The ProjectRegistry to use to find details of referenced
+ projects.
+ """
+ self.configuration = configuration
+ self.registry = registry
+ self.solution = solution
+ self.name = solution.name
+ self.solutionDir = solution.dir
+ self.solutionFilePath = solution.path
+ self.version = solution.version
+ self.isDotNet = solution.version in ['7.00', '8.00']
+
+ self.solutionConfigurations = list(solution.configurations.values())
+ self.solutionConfigurations.sort(key=lambda config: (config.name, config.platform))
+
+ self.solutionGUID = generateGuid(self.solutionFilePath)
+
+ # Construct a sorted list all project files
+ projectFilePathToProject = {}
+ for solutionConfig in self.solutionConfigurations:
+ for projectConfig in solutionConfig.projectConfigurations:
+ project = self.registry.getProjectByPath(projectConfig.path)
+ if project is not None:
+ key = (projectConfig.name, projectConfig.platform)
+ projectConfig = project.configurations.get(key, None)
+ if projectConfig is None:
+ continue
+ path = project.path
+ projectFilePathToProject[path] = project
+ else:
+ print "Warning: skipping project %s (not built by cake)" % projectConfig.path
+ projectFilePaths = projectFilePathToProject.keys()
+ projectFilePaths.sort()
+ self.projects = [projectFilePathToProject[p] for p in projectFilePaths]
+
+ variants = set()
+ for solutionConfig in self.solutionConfigurations:
+ for projectConfig in solutionConfig.projectConfigurations:
+ solutionVariant = self.getSolutionVariant(solutionConfig)
+ projectVariant = self.getProjectVariant(projectConfig)
+
+ variants.add((solutionVariant, projectVariant))
+ self.variants = variants
+
+ def getSolutionVariant(self, solutionConfig):
+ if self.isDotNet:
+ # .NET VS versions do not support user-defined solution platform names,
+ # so use a project config name in an attempt to find a unique config name.
+ if solutionConfig.projectConfigurations:
+ return solutionConfig.projectConfigurations[0].name
+ else:
+ return solutionConfig.name
+ else:
+ return "%s|%s" % (solutionConfig.name, solutionConfig.platform)
+
+ def getProjectVariant(self, projectConfig):
+ return "%s|%s" % (projectConfig.name, projectConfig.platform)
+
+ def getRelativePath(self, path):
+ """Return path relative to the solution file.
+ """
+ abspath = self.configuration.abspath
+ return cake.path.relativePath(abspath(path), abspath(self.solutionDir))
+
+ def build(self):
+ """Actually write the target file.
+ """
+ _writeIt(self, self.solutionFilePath)
+
+ def _writeContents(self, writer):
+ """Write the solution part (this is essentially the whole file's contents)
+ """
+ writer.write(u"\ufeff\r\n") # BOM
+ self._writeHeader(writer)
+ self._writeProjectsSection(writer)
+ self._writeGlobalSection(writer)
+
+ def _writeHeader(self, writer):
+ """Write the solution header.
+
+ Visual Studio uses this to determine which version of the .sln format this is.
+ """
+ writer.write(
+ "Microsoft Visual Studio Solution File, Format Version %(version)s\r\n" % {
+ 'version' : self.version,
+ }
+ )
+ writer.write(
+ "# Generated by Cake for Visual Studio\r\n"
+ )
+
+ def _writeProjectsSection(self, writer):
+ """Write the projects section.
+
+ This section declares all of the constituent project files.
+ """
+ # Build a global list of all projects across all solution configurations
+ for project in self.projects:
+ self._writeProject(writer, project)
+
+ def _writeProject(self, writer, project):
+ """Write details of an individual project.
+
+ This associates an internal project guid with the visual studio project files
+ and their external guids.
+ """
+
+ # Note: The external GUID must match up to that generated in the
+ # .vcproj file. We could either just duplicate the logic here or
+ # implement a parser that pulled the GUID from the .vcproj.
+ # For now just duplicate logic (requires that all .vcproj files
+ # are also generated by SCons).
+ projectName = project.name
+ externalGuid = project.externalGuid
+ internalGuid = project.internalGuid
+
+ projectFilePath = project.path
+ relativePath = self.getRelativePath(projectFilePath)
+
+ writer.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
+ internalGuid, projectName, relativePath, externalGuid,
+ ))
+
+ if self.isDotNet:
+ writer.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
+ writer.write("\tEndProjectSection\r\n")
+
+ writer.write('EndProject\r\n')
+
+ def _writeGlobalSection(self, writer):
+ """Write all global sections.
+ """
+ writer.write("Global\r\n")
+ self._writeSourceCodeControlSection(writer)
+ self._writeSolutionConfigurationPlatformsSection(writer)
+ self._writeProjectConfigurationPlatformsSection(writer)
+ if self.isDotNet:
+ self._writeExtensibilityGlobalsSection(writer)
+ self._writeExtensibilityAddInsSection(writer)
+ else:
+ self._writeSolutionPropertiesSection(writer)
+ writer.write("EndGlobal\r\n")
+
+ def _writeSourceCodeControlSection(self, writer):
+ """Write the section that defines the source code control for the projects.
+
+ Looks up the MSVS_SCC_PROVIDER of the environment used to build the projects.
+ """
+ projectsWithScc = []
+
+ for project in self.projects:
+ if project.sccProvider is not None:
+ projectsWithScc.append(project)
+
+ if not projectsWithScc:
+ return
+
+ writer.write("\tGlobalSection(SourceCodeControl) = preSolution\r\n")
+
+ writer.write(
+ "\t\tSccNumberOfProjects = %i\r\n" % len(projectsWithScc)
+ )
+
+ i = 0
+ for project in projectsWithScc:
+ relativePath = self.getRelativePath(project.path)
+
+ sccLocalPath = project.sccLocalPath
+ if sccLocalPath is None:
+ sccLocalPath = os.path.dirname(relativePath)
+
+ sccProvider = project.sccProvider
+ if sccProvider is None:
+ sccProvider = ''
+
+ sccProjectName = project.sccProjectName
+ if sccProjectName is None:
+ sccProjectName = project.name
+
+ def escape(s):
+ s = s.replace('\\', '\\\\')
+ s = s.replace(' ', '\\u0020')
+ return s
+
+ writer.write(
+ "\t\tSccProjectUniqueName%(id)i = %(file_base)s\r\n"
+ "\t\tSccProjectName%(id)i = %(scc_project_name)s\r\n"
+ "\t\tSccLocalPath%(id)i = %(scc_local_path)s\r\n"
+ "\t\tSccProvider%(id)i = %(scc_provider)s\r\n"
+ "\t\tCanCheckoutShared = true\r\n"
+ % {"id" : i,
+ "file_base" : escape(relativePath),
+ "scc_local_path" : escape(sccLocalPath),
+ "scc_project_name" : escape(sccProjectName),
+ "scc_provider" : escape(sccProvider),
+ }
+ )
+ i += 1
+
+ writer.write(
+ "\t\tSolutionUniqueID = %s\r\n" % self.solutionGUID
+ )
+ writer.write("\tEndGlobalSection\r\n")
+
+ def _writeSolutionConfigurationPlatformsSection(self, writer):
+
+ if not self.solutionConfigurations:
+ return
+
+ if self.isDotNet:
+ writer.write(
+ "\tGlobalSection(SolutionConfiguration) = preSolution\r\n"
+ )
+ else:
+ writer.write(
+ "\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n"
+ )
+
+ for solutionVariant, _ in self.variants:
+ writer.write("\t\t%s = %s\r\n" % (
+ solutionVariant,
+ solutionVariant,
+ ))
+
+ writer.write("\tEndGlobalSection\r\n")
+
+ def _writeProjectConfigurationPlatformsSection(self, writer):
+
+ if self.isDotNet:
+ writer.write("\tGlobalSection(ProjectConfiguration) = postSolution\r\n")
+ else:
+ writer.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n")
+
+ # Note: Not bothering to sort these because VS seems to have a strange sort
+ # order ('0' comes after '9').
+ for solutionConfig in self.solutionConfigurations:
+ for projectConfig in solutionConfig.projectConfigurations:
+ project = self.registry.getProjectByPath(projectConfig.path)
+ if project is None:
+ continue # Skip unknown projects
+
+ guid = project.externalGuid
+ solutionVariant = self.getSolutionVariant(solutionConfig)
+ projectVariant = self.getProjectVariant(projectConfig)
+
+ writer.write(
+ "\t\t%(guid)s.%(slnvariant)s.ActiveCfg = %(projvariant)s\r\n" % {
+ "guid" : guid,
+ "slnvariant" : solutionVariant,
+ "projvariant" : projectVariant,
+ })
+
+ if projectConfig.build:
+ writer.write(
+ "\t\t%(guid)s.%(slnvariant)s.Build.0 = %(projvariant)s\r\n" % {
+ "guid" : guid,
+ "slnvariant" : solutionVariant,
+ "projvariant" : projectVariant,
+ })
+
+ writer.write("\tEndGlobalSection\r\n")
+
+ def _writeExtensibilityGlobalsSection(self, writer):
+ writer.write("\tGlobalSection(ExtensibilityGlobals) = postSolution\r\n")
+ writer.write("\tEndGlobalSection\r\n")
+
+ def _writeExtensibilityAddInsSection(self, writer):
+ writer.write("\tGlobalSection(ExtensibilityAddIns) = postSolution\r\n")
+ writer.write("\tEndGlobalSection\r\n")
+
+ def _writeSolutionPropertiesSection(self, writer):
+ writer.write("\tGlobalSection(SolutionProperties) = preSolution\r\n")
+ writer.write("\t\tHideSolutionNode = FALSE\r\n")
+ writer.write("\tEndGlobalSection\r\n")
Index: cake/cake/library/script.py
===================================================================
--- cake/cake/library/script.py (revision 0)
+++ cake/cake/library/script.py (working copy)
@@ -0,0 +1,262 @@
+"""Script Tool.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import os.path
+
+from cake.library import Tool, FileTarget, getPaths, getTasks
+from cake.script import Script
+
+class ScriptTool(Tool):
+ """Tool that provides utilities for performing Script operations.
+ """
+
+ def __init__(self, *args, **kwargs):
+ Tool.__init__(self, *args, **kwargs)
+ self._included = {}
+
+ @property
+ def path(self):
+ """The path of the currently executing script.
+ """
+ return Script.getCurrent().path
+
+ @property
+ def dir(self):
+ """The path of the directory of the currently executing script.
+ """
+ return Script.getCurrent().dir
+
+ @property
+ def variant(self):
+ """The Variant the currently executing script is being built with.
+ """
+ return Script.getCurrent().variant
+
+ def setResult(self, **kwargs):
+ """Export a result from this script that other scripts can import.
+
+ Other scripts can use getResult(script, name) to get the result
+ exported by the other script calling setResult(name=result).
+ """
+ Script.getCurrent().setResult(**kwargs)
+
+ def getResult(self, script, name, *args, **kwargs):
+ """Get a placeholder value that will yield the result of another
+ script once that other script has finished executing.
+ """
+ return self.get(script).getResult(name, *args, **kwargs)
+
+ def get(self, script, keywords={}, useContext=None, configScript=None, configScriptName=None):
+ """Get another script to use in referencing targets.
+
+ @param script: Path of the script to load.
+ @type script: string
+
+ @param keywords: A set of keywords used to find the variant the script
+ will be executed with. The variant is looked up in the script's configuration.
+ @type keywords: dictionary of string -> string
+
+ @param useContext: If False or if None and either configScript or configScriptName
+ are not None then lookup the corresponding configuration script starting from the
+ script's path, if True then use the current configuration/variant.
+ @type useContext: bool or None
+
+ @param configScript: The path of the configuration script to use to execute the script.
+ Ignored if useContext is True.
+ @type configScript: string or None
+
+ @param configScriptName: If not None and configScript is None then find the
+ configuration script with this name starting the search at the script's path.
+ Ignored if useContext is True.
+ @type configScriptName: string or None
+ """
+ if not isinstance(script, basestring):
+ raise ValueError("'script' must be a string")
+
+ script = self.configuration.basePath(script)
+
+ if useContext is None:
+ useContext = configScript is None and configScriptName is None
+
+ if useContext:
+ # Use the current configuration and lookup the variant relative
+ # to the current variant.
+ baseVariant = Script.getCurrent().variant
+ variant = self.configuration.findVariant(keywords, baseVariant=baseVariant)
+ return self.configuration.execute(path=script, variant=variant)
+ else:
+ # Re-evaluate the configuration to execute the script with.
+ # Uses the keywords specified to find the variant in the variants
+ # defined in that configuration.
+ path = self.configuration.abspath(script)
+ if configScript is None:
+ configuration = self.engine.findConfiguration(
+ path=path,
+ configScriptName=configScriptName,
+ )
+ else:
+ configuration = self.engine.getConfiguration(
+ path=self.configuration.abspath(configScript),
+ )
+ variant = configuration.findVariant(keywords)
+ return configuration.execute(path=path, variant=variant)
+
+ def cwd(self, *args):
+ """Return the path prefixed with the this script's directory.
+
+ Examples::
+ env.cwd("a") -> "{cwd}/a"
+ env.cwd(["a", "b", "c"]) -> ["{cwd}/a", "{cwd}/b", "{cwd}/c"]
+
+ @param args: The arguments that need to have the prefix added.
+ @type args: string or list(string)
+
+ @return: The path prefixed with this script's directory.
+ @rtype: string or list(string)
+ """
+ script = Script.getCurrent()
+ return script.cwd(*args)
+
+ def include(self, scripts):
+ """Include another script within the context of the currently
+ executing script.
+
+ A given script will only be included once.
+
+ @param scripts: A path or sequence of paths of scripts to include.
+ @type scripts: string or sequence of string
+ """
+ basePath = self.configuration.basePath
+
+ scripts = basePath(scripts)
+
+ include = self._include
+ if isinstance(scripts, basestring):
+ include(scripts)
+ else:
+ for path in scripts:
+ include(path)
+
+ def _include(self, path):
+ """Include another script for execution within this script's context.
+
+ A script will only be included once within a given context.
+
+ @param path: The path of the file to include.
+ @type path: string
+ """
+ path = os.path.normpath(path)
+
+ normalisedPath = os.path.normcase(self.configuration.abspath(path))
+
+ if normalisedPath in self._included:
+ return
+
+ currentScript = Script.getCurrent()
+ includedScript = Script(
+ path=path,
+ variant=currentScript.variant,
+ engine=currentScript.engine,
+ configuration=currentScript.configuration,
+ task=currentScript.task,
+ tools=currentScript.tools,
+ parent=currentScript,
+ )
+ self._included[normalisedPath] = includedScript
+ includedScript.execute()
+
+ def execute(self, scripts, **keywords):
+ """Execute another script as a background task.
+
+ Executes the other script using the current script's configuration
+ but potentially a different build variant.
+
+ If you need to execute a script using a different configuration
+ then use the 'executeNoContext' method instead.
+
+ @param scripts: A path or sequence of paths of scripts to execute.
+ @type scripts: string or sequence of string
+
+ @return: A Script object or sequence of Script objects that can be used
+ to determine what scripts will be executed. The script's task will
+ complete when the script has finished executing.
+ @rtype: L{Script} or C{list} of L{Script}
+ """
+ basePath = self.configuration.basePath
+
+ scripts = basePath(scripts)
+
+ script = Script.getCurrent()
+ configuration = script.configuration
+ variant = configuration.findVariant(keywords, baseVariant=script.variant)
+ execute = configuration.execute
+ if isinstance(scripts, basestring):
+ return execute(scripts, variant)
+ else:
+ return [execute(path, variant) for path in scripts]
+
+ def run(self, func, args=None, targets=None, sources=[]):
+ """Execute the specified python function as a task.
+
+ Only executes the function after the sources have been built and only
+ if the target exists, args is the same as last run and the sources
+ haven't changed.
+
+ @note: I couldn't think of a better class to put this function in so
+ for now it's here although it doesn't really belong.
+ """
+ engine = self.engine
+ configuration = self.configuration
+
+ basePath = configuration.basePath
+
+ targets = basePath(targets)
+ sources = basePath(sources)
+
+ sourceTasks = getTasks(sources)
+
+ def _run():
+ sourcePaths = getPaths(sources)
+ if targets:
+ buildArgs = (args, sourcePaths)
+ try:
+ _, reason = configuration.checkDependencyInfo(
+ targets[0],
+ buildArgs,
+ )
+ if reason is None:
+ # Up to date
+ return
+
+ engine.logger.outputDebug(
+ "reason",
+ "Building '%s' because '%s'\n" % (targets[0], reason),
+ )
+ except EnvironmentError:
+ pass
+
+ try:
+ return func()
+ finally:
+ if targets:
+ newDependencyInfo = configuration.createDependencyInfo(
+ targets=targets,
+ args=buildArgs,
+ dependencies=sourcePaths,
+ )
+ configuration.storeDependencyInfo(newDependencyInfo)
+
+ if self.enabled:
+ task = engine.createTask(_run)
+ task.startAfter(sourceTasks)
+ else:
+ task = None
+
+ if targets is not None:
+ return [FileTarget(path=t, task=task) for t in targets]
+ else:
+ return task
Index: cake/cake/library/shell.py
===================================================================
--- cake/cake/library/shell.py (revision 0)
+++ cake/cake/library/shell.py (working copy)
@@ -0,0 +1,207 @@
+"""Shell Tool.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import os
+import subprocess
+import cake.filesys
+import cake.path
+from cake.library import Tool, FileTarget, flatten, getPaths, getTasks, waitForAsyncResult
+
+_undefined = object()
+
+class ShellTool(Tool):
+
+ def __init__(self, configuration, env=None):
+ Tool.__init__(self, configuration)
+ if env is None:
+ self._env = dict(os.environ)
+ else:
+ self._env = dict(env)
+
+ def run(self, args, targets=None, sources=[], cwd=None, shell=False, removeTargets=False):
+ """Run a shell command to build specified targets.
+
+ @param args: The command-line to run.
+ Either a list of strings, one item per argument, or a single string.
+ @type args: string or list of string
+
+ @param targets: If specified then a list of the target file paths that
+ will be generated by this shell command.
+
+ @param sources: If specified then a list of the sources for building
+ the target. If any of these sources change then the command will be
+ re-executed.
+
+ @param cwd: The directory to spawn the shell command in.
+ If not specified then uses the configuration.baseDir.
+
+ @param shell: Whether to run this command using the default shell,
+ eg. '/bin/sh' or 'cmd.exe'.
+
+ @param removeTargets: If specified then the target files will be removed
+ before running the command if they already exist.
+ """
+ tool = self.clone()
+
+ basePath = self.configuration.basePath
+
+ return tool._run(args, basePath(targets), basePath(sources), basePath(cwd), shell, removeTargets)
+
+ def _run(self, args, targets=None, sources=[], cwd=None, shell=False, removeTargets=False):
+
+ engine = self.engine
+
+ def spawnProcess(targets, sources, cwd):
+
+ sourcePaths = getPaths(sources)
+ configuration = self.configuration
+ abspath = configuration.abspath
+
+ if isinstance(args, basestring):
+ argsString = args
+ argsList = [args]
+ executable = None
+ else:
+ argsString = " ".join(args)
+ argsList = args
+ executable = abspath(args[0])
+
+ if targets:
+ # Check dependencies to see if they've changed
+ buildArgs = argsList + sourcePaths + targets
+ try:
+ _, reasonToBuild = configuration.checkDependencyInfo(
+ targets[0],
+ buildArgs,
+ )
+ if reasonToBuild is None:
+ # Target is up to date, no work to do
+ return
+ except EnvironmentError:
+ pass
+
+ engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '%s' because %s.\n" % (targets[0], reasonToBuild),
+ )
+
+ # Create target directories first
+ if targets:
+ for t in targets:
+ absT = abspath(t)
+ cake.filesys.makeDirs(cake.path.dirName(absT))
+ if removeTargets:
+ cake.filesys.remove(absT)
+
+ if cwd is None:
+ cwd = configuration.baseDir
+ else:
+ cwd = abspath(cwd)
+
+ # Output the command-line we're about to run.
+ engine.logger.outputInfo("Running %s\n" % argsList[0])
+
+ engine.logger.outputDebug(
+ "run",
+ "run: %s\n" % argsString,
+ )
+
+ try:
+ p = subprocess.Popen(
+ args=args,
+ executable=executable,
+ env=self._env,
+ stdin=subprocess.PIPE,
+ shell=shell,
+ cwd=cwd,
+ )
+ except EnvironmentError, e:
+ msg = "cake: failed to launch %s: %s\n" % (argsList[0], str(e))
+ engine.raiseError(msg)
+
+ p.stdin.close()
+ exitCode = p.wait()
+
+ if exitCode != 0:
+ msg = "%s exited with code %i\n" % (argsList[0], exitCode)
+ engine.raiseError(msg)
+
+ if targets:
+ newDependencyInfo = configuration.createDependencyInfo(
+ targets=targets,
+ args=buildArgs,
+ dependencies=sourcePaths,
+ )
+ configuration.storeDependencyInfo(newDependencyInfo)
+
+ @waitForAsyncResult
+ def _run(targets, sources, cwd):
+ if self.enabled:
+ tasks = getTasks(sources)
+
+ task = engine.createTask(lambda t=targets, s=sources, c=cwd: spawnProcess(t, s, c))
+ task.startAfter(tasks)
+ else:
+ task = None
+
+ if targets:
+ return [FileTarget(path=t, task=task) for t in targets]
+ else:
+ return task
+
+ return _run(flatten(targets), flatten(sources), cwd)
+
+ def __iter__(self):
+ return iter(self._env)
+
+ def keys(self):
+ return self._env.keys()
+
+ def items(self):
+ return self._env.items()
+
+ def update(self, value):
+ return self._env.update(value)
+
+ def get(self, key, default=_undefined):
+ if default is _undefined:
+ return self._env.get(key)
+ else:
+ return self._env.get(key, default)
+
+ def __getitem__(self, key):
+ return self._env[key]
+
+ def __setitem__(self, key, value):
+ self._env[key] = value
+
+ def __delitem__(self, key):
+ del self._env[key]
+
+ def appendPath(self, path):
+ basePath = self.configuration.basePath
+
+ path = basePath(path)
+
+ pathEnv = self.get('PATH', None)
+ if pathEnv is None:
+ pathEnv = path
+ else:
+ pathEnv = os.pathsep.join((pathEnv, path))
+ self['PATH'] = pathEnv
+
+ def prependPath(self, path):
+ basePath = self.configuration.basePath
+
+ path = basePath(path)
+
+ pathEnv = self.get('PATH', None)
+ if pathEnv is None:
+ pathEnv = path
+ else:
+ pathEnv = os.pathsep.join((path, pathEnv))
+ self['PATH'] = pathEnv
Index: cake/cake/library/variant.py
===================================================================
--- cake/cake/library/variant.py (revision 0)
+++ cake/cake/library/variant.py (working copy)
@@ -0,0 +1,27 @@
+"""Variant Tool.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+from cake.library import Tool
+from cake.script import Script
+
+class VariantTool(Tool):
+
+ def __getattribute__(self, name):
+ """Return a variant keywords current value given its name.
+
+ @param name: The name of the keyword to query.
+ @type name: string
+ @return: The current value of the named keyword.
+ @rtype: string
+ """
+ try:
+ return Tool.__getattribute__(self, name)
+ except AttributeError:
+ try:
+ return Script.getCurrent().variant.keywords[name]
+ except KeyError:
+ raise AttributeError("Unknown attribute '%s'" % name)
Index: cake/cake/library/zipping.py
===================================================================
--- cake/cake/library/zipping.py (revision 0)
+++ cake/cake/library/zipping.py (working copy)
@@ -0,0 +1,364 @@
+"""Zip Tool.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+from cake.library import Tool, getPath, getTask
+import cake.filesys
+import cake.zipping
+import zipfile
+import os
+import os.path
+import calendar
+import time
+
+def _shouldExtractFile(engine, absTargetFile, zipTime, onlyNewer):
+
+ if not onlyNewer:
+ return "onlyNewer is False" # Always rebuild
+
+ try:
+ mtime = os.stat(absTargetFile).st_mtime
+ except EnvironmentError:
+ return "it doesn't exist"
+
+ if zipTime != mtime:
+ # Assume the zip and the extract are the same file.
+ return "it has been changed"
+
+ return None
+
+def _extractFile(configuration, zipFile, zipPath, zipInfo, targetDir, absTargetDir, onlyNewer):
+ """Extract the ZipInfo object to a physical file at targetDir.
+ """
+ engine = configuration.engine
+ targetFile = os.path.join(targetDir, zipInfo.filename)
+ absTargetFile = os.path.join(absTargetDir, zipInfo.filename)
+
+ if cake.zipping.isDirectoryInfo(zipInfo):
+ # The zip info corresponds to a directory.
+ cake.filesys.makeDirs(absTargetFile)
+ else:
+ # The zip info corresponds to a file.
+ year, month, day, hour, minute, second = zipInfo.date_time
+ zipTime = calendar.timegm((year, month, day, hour, minute, second, 0, 0, 0))
+
+ buildArgs = []
+ _, reasonToBuild = configuration.checkDependencyInfo(targetFile, buildArgs)
+
+ if reasonToBuild is None:
+ reasonToBuild = _shouldExtractFile(engine, absTargetFile, zipTime, onlyNewer)
+ if reasonToBuild is None:
+ return # Target is up to date
+
+ engine.logger.outputDebug(
+ "reason",
+ "Extracting '" + targetFile + "' because " + reasonToBuild + ".\n",
+ )
+
+ engine.logger.outputInfo("Extracting %s\n" % targetFile)
+
+ try:
+ cake.filesys.writeFile(absTargetFile, zipFile.read(zipInfo.filename))
+ except Exception, e:
+ engine.raiseError(
+ "Failed to extract file %s from zip %s: %s\n" % (
+ zipInfo.filename,
+ zipPath,
+ str(e),
+ ),
+ )
+
+ # Now that the file has been written successfully, save the new dependency file
+ newDependencyInfo = configuration.createDependencyInfo(
+ targets=[targetFile],
+ args=buildArgs,
+ dependencies=[],
+ )
+ configuration.storeDependencyInfo(newDependencyInfo)
+
+ # Set the file modification time to match the zip time
+ os.utime(absTargetFile, (zipTime, zipTime))
+
+def _shouldCompress(
+ configuration,
+ sourcePath,
+ targetPath,
+ toZip,
+ onlyNewer,
+ removeStale,
+ ):
+
+ if not onlyNewer:
+ return None, "onlyNewer is False" # Always rebuild
+
+ absSourcePath = configuration.abspath(sourcePath)
+ absTargetPath = configuration.abspath(targetPath)
+
+ # Try to open an existing zip file
+ try:
+ file = zipfile.ZipFile(absTargetPath, "r")
+ try:
+ zipInfos = file.infolist()
+ finally:
+ file.close()
+
+ # Build a list of files/dirs in the current zip
+ fromZip = {}
+ for zipInfo in zipInfos:
+ path = os.path.normpath(os.path.normcase(zipInfo.filename))
+ fromZip[path] = zipInfo
+ except EnvironmentError:
+ # File doesn't exist or is invalid
+ return None, "'" + targetPath + "' doesn't exist"
+
+ # Check modification times of source files against those in the zip
+ for casedPath, originalPath in toZip.iteritems():
+ zipInfo = fromZip.get(casedPath, None)
+
+ # Not interested in modified directories
+ if zipInfo is not None and not cake.zipping.isDirectoryInfo(zipInfo):
+ absSourceFilePath = os.path.join(absSourcePath, originalPath)
+ utcTime = time.gmtime(os.stat(absSourceFilePath).st_mtime)
+ zipTime = utcTime[0:5] + (
+ utcTime[5] & 0xFE, # Zip only saves 2 second resolution
+ )
+ if zipTime != zipInfo.date_time:
+ sourceFilePath = os.path.join(sourcePath, originalPath)
+ # We must recreate the entire zip to update files
+ return None, "'" + sourceFilePath + "' has been changed"
+
+ if removeStale:
+ for path, zipInfo in fromZip.iteritems():
+ if path not in toZip:
+ # We must recreate the zip to remove files
+ sourceFilePath = os.path.join(sourcePath, zipInfo.filename)
+ return None, "'" + sourceFilePath + "' has been removed"
+
+ toAppend = []
+ reasonToBuild = None
+ for casedPath, originalPath in toZip.iteritems():
+ if casedPath not in fromZip:
+ toAppend.append(originalPath)
+ if reasonToBuild is None:
+ sourceFilePath = os.path.join(sourcePath, originalPath)
+ reasonToBuild = "'" + sourceFilePath + "' is not in zip"
+
+ return toAppend, reasonToBuild
+
+class ZipTool(Tool):
+
+ def extract(
+ self,
+ targetDir,
+ source,
+ onlyNewer=True,
+ removeStale=False,
+ includeMatch=None,
+ ):
+ """Extract all files in a Zip to the specified path.
+
+ @param targetDir: The directory to extract files to.
+ @type targetDir: string
+
+ @param source: Path to the zip file to extract files from.
+ @type source: string
+
+ @param onlyNewer: Only extract files that are newer than those in
+ the target directory.
+ @type onlyNewer: bool
+
+ @param removeStale: Remove files and directories in the target
+ directory that no longer exist in the zip.
+ @type removeStale: bool
+
+ @param includeMatch: A callable used to decide whether to include
+ certain files in the extraction. This could be a python callable that
+ returns True to include the file or False to exclude it, or a regular
+ expression function such as re.compile().match or re.match.
+ @type includeMatch: any callable
+
+ @return: A task that will complete when the extraction has finished.
+ @rtype: L{Task}
+ """
+ if not isinstance(targetDir, basestring):
+ raise TypeError("targetDir must be a string")
+
+ engine = self.engine
+ configuration = self.configuration
+ basePath = configuration.basePath
+
+ targetDir = basePath(targetDir)
+ source = basePath(source)
+
+ def doIt():
+ sourcePath = getPath(source)
+ absTargetDir = configuration.abspath(targetDir)
+ zipFile = zipfile.ZipFile(configuration.abspath(sourcePath), "r")
+ try:
+ zipInfos = zipFile.infolist()
+
+ if includeMatch is not None:
+ zipInfos = [z for z in zipInfos if includeMatch(z.filename)]
+
+ if removeStale:
+ filesInZip = set()
+ for zipInfo in zipInfos:
+ filesInZip.add(os.path.normcase(os.path.normpath(zipInfo.filename)))
+
+ searchDir = os.path.normpath(absTargetDir)
+ for path in cake.filesys.walkTree(searchDir):
+ normPath = os.path.normcase(path)
+ # Skip files that also exist in the zip.
+ if normPath in filesInZip:
+ continue
+ if engine.dependencyInfoPath is None:
+ # Skip .dep files that match a file in the zip.
+ p, e = os.path.splitext(normPath)
+ if e == ".dep" and p in filesInZip:
+ continue
+
+ absPath = os.path.join(searchDir, path)
+ engine.logger.outputInfo(
+ "Deleting %s\n" % os.path.join(targetDir, path),
+ )
+ if os.path.isdir(absPath):
+ cake.filesys.removeTree(absPath)
+ else:
+ cake.filesys.remove(absPath)
+
+ for zipinfo in zipInfos:
+ _extractFile(configuration, zipFile, sourcePath, zipinfo, targetDir, absTargetDir, onlyNewer)
+ finally:
+ zipFile.close()
+
+ if self.enabled:
+ sourceTask = getTask(source)
+
+ task = engine.createTask(doIt)
+ task.startAfter(sourceTask)
+ else:
+ task = None
+
+ return task
+
+ def compress(
+ self,
+ target,
+ source,
+ onlyNewer=True,
+ removeStale=True,
+ includeMatch=None,
+ ):
+ """Compress a source file/directory to the specified zip target path.
+
+ @param target: Path to the zip file to add files to.
+ @type target: string
+
+ @param source: Path to the source file or directory to add.
+ @type source: string
+
+ @param onlyNewer: Only add files that are newer than those in
+ the zip file. Otherwise all files are re-added every time.
+ @type onlyNewer: bool
+
+ @param removeStale: Remove files and directories in the zip
+ file that no longer exist in the source directory.
+ @type removeStale: bool
+
+ @param includeMatch: A callable used to decide whether to include
+ certain files in the zip file. This could be a python callable that
+ returns True to include the file or False to exclude it, or a regular
+ expression function such as re.compile().match or re.match.
+ @type includeMatch: any callable
+
+ @return: A task that will complete when the compression has finished.
+ @rtype: L{Task}
+ """
+ if not isinstance(target, basestring):
+ raise TypeError("target must be a string")
+
+ engine = self.engine
+ configuration = self.configuration
+ basePath = configuration.basePath
+
+ target = basePath(target)
+ source = basePath(source)
+
+ def doIt():
+ sourceDir = getPath(source)
+ absSourceDir = configuration.abspath(sourceDir)
+
+ # Build a list of files/dirs to zip
+ toZip = cake.zipping.findFilesToCompress(absSourceDir, includeMatch)
+
+ # Check for an existing dependency info file
+ buildArgs = []
+ toAppend = None
+ _, reasonToBuild = configuration.checkDependencyInfo(target, buildArgs)
+ if reasonToBuild is None:
+ # Figure out if we need to rebuild/append
+ toAppend, reasonToBuild = _shouldCompress(
+ configuration,
+ sourceDir,
+ target,
+ toZip,
+ onlyNewer,
+ removeStale,
+ )
+ if reasonToBuild is None:
+ return # Target is up to date
+
+ engine.logger.outputDebug(
+ "reason",
+ "Rebuilding '" + target + "' because " + reasonToBuild + ".\n",
+ )
+
+ absTargetPath = configuration.abspath(target)
+ if toAppend is None:
+ # Recreate zip
+ cake.filesys.makeDirs(os.path.dirname(absTargetPath))
+ f = open(absTargetPath, "wb")
+ try:
+ zipFile = zipfile.ZipFile(f, "w")
+ for originalPath in toZip.itervalues():
+ sourcePath = os.path.join(sourceDir, originalPath)
+ absSourcePath = configuration.abspath(sourcePath)
+ configuration.engine.logger.outputInfo("Adding %s to %s\n" % (sourcePath, target))
+ cake.zipping.writeFileToZip(zipFile, absSourcePath, originalPath)
+ zipFile.close()
+ finally:
+ f.close()
+ else:
+ # Append to existing zip
+ f = open(absTargetPath, "r+b")
+ try:
+ zipFile = zipfile.ZipFile(f, "a")
+ for originalPath in toAppend:
+ sourcePath = os.path.join(sourceDir, originalPath)
+ absSourcePath = configuration.abspath(sourcePath)
+ configuration.engine.logger.outputInfo("Adding %s to %s\n" % (sourcePath, target))
+ cake.zipping.writeFileToZip(zipFile, absSourcePath, originalPath)
+ zipFile.close()
+ finally:
+ f.close()
+
+ # Now that the zip has been written successfully, save the new dependency file
+ newDependencyInfo = configuration.createDependencyInfo(
+ targets=[target],
+ args=buildArgs,
+ dependencies=[],
+ )
+ configuration.storeDependencyInfo(newDependencyInfo)
+
+ if self.enabled:
+ sourceTask = getTask(source)
+
+ task = engine.createTask(doIt)
+ task.startAfter(sourceTask)
+ else:
+ task = None
+
+ return task
Index: cake/cake/logging.py
===================================================================
--- cake/cake/logging.py (revision 0)
+++ cake/cake/logging.py (working copy)
@@ -0,0 +1,101 @@
+"""Logging Utilities.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import sys
+import threading
+
+class Logger(object):
+ """A class used to log tool output.
+
+ Message output for each function is guaranteed to not intermingle
+ with other messages output due to the use of a thread lock.
+ """
+
+ def __init__(self):
+ """Default construction.
+ """
+ self._lock = threading.Lock()
+ self._debugComponents = set()
+ self.quiet = False
+
+ def enableDebug(self, component):
+ """Enable debugging for a given component.
+
+ @param component: The component to enable debugging of.
+ @type component: string
+ """
+ self._debugComponents.add(component)
+
+ def disableDebug(self, component):
+ """Disable debugging for a given component.
+
+ @param component: The component to disable debugging of.
+ @type component: string
+ """
+ self._debugComponents.discard(component)
+
+ def debugEnabled(self, keyword):
+ """Returns True if currently debugging the given component.
+
+ @param keyword: The component to check.
+ @type keyword: string
+
+ @return: True if the logger is debugging the given component,
+ otherwise False.
+ @rtype: bool
+ """
+ return keyword in self._debugComponents
+
+ def outputError(self, message):
+ """Output an error message.
+
+ @param message: The message to output.
+ @type message: string
+ """
+ if not self.quiet:
+ self._lock.acquire()
+ try:
+ sys.stderr.write(message)
+ sys.stderr.flush()
+ finally:
+ self._lock.release()
+
+ def outputWarning(self, message):
+ """Output a warning message.
+
+ @param message: The message to output.
+ @type message: string
+ """
+ self.outputError(message)
+
+ def outputInfo(self, message):
+ """Output an informative message.
+
+ @param message: The message to output.
+ @type message: string
+ """
+ if not self.quiet:
+ self._lock.acquire()
+ try:
+ sys.stdout.write(message)
+ sys.stdout.flush()
+ finally:
+ self._lock.release()
+
+ def outputDebug(self, keyword, message):
+ """Output a debug message.
+
+ The message will output only if the keyword matches a component
+ we are currently debugging.
+
+ @param keyword: The debug keyword associated with this message.
+ @type keyword: string
+ @param message: The message to output.
+ @type message: string
+ """
+ if keyword in self._debugComponents:
+ self.outputInfo(message)
Index: cake/cake/main.py
===================================================================
--- cake/cake/main.py (revision 0)
+++ cake/cake/main.py (working copy)
@@ -0,0 +1,28 @@
+"""Main entrypoint.
+
+This main module reduces the risk of a stack dump caused by a
+KeyboardInterrupt by not loading any unnecessary modules until a
+keyboard interrupt signal handler is in place.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+def execute():
+ """Execute Cake passing the result to sys.exit().
+ """
+ import signal
+ import sys
+
+ def signalHandler(signum, frame):
+ sys.exit(-1)
+ signal.signal(signal.SIGINT, signalHandler)
+
+ import cake.runner
+ sys.exit(cake.runner.run())
+
+if __name__ == '__main__':
+ """Main entrypoint.
+ """
+ execute()
Index: cake/cake/msvs.py
===================================================================
--- cake/cake/msvs.py (revision 0)
+++ cake/cake/msvs.py (working copy)
@@ -0,0 +1,83 @@
+"""Utilities for querying Microsoft Visual Studio settings.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+import _winreg
+
+from cake.registry import queryString
+
+def getMsvsInstallDir(version=r'VisualStudio\8.0'):
+ """Returns the MSVS install directory.
+
+ Typically: 'C:\Program Files\Microsoft Visual Studio 8\Common7\IDE'.
+
+ @param version: The registry path used to search for MSVS.
+ @type version: string
+
+ @return: The path to the MSVS install directory.
+ @rtype: string
+
+ @raise WindowsError: If MSVS is not installed.
+ """
+ subKey = r"SOFTWARE\Microsoft\%s" % version
+ return queryString(_winreg.HKEY_LOCAL_MACHINE, subKey, "InstallDir")
+
+def getMsvsProductDir(version=r'VisualStudio\8.0'):
+ """Returns the MSVS product directory.
+
+ Typically: 'C:\Program Files\Microsoft Visual Studio 8\'.
+
+ @param version: The registry path used to search for MSVS.
+ @type version: string
+
+ @return: The path to the MSVS product directory.
+ @rtype: string
+
+ @raise WindowsError: If MSVS is not installed.
+ """
+ subKey = r"SOFTWARE\Microsoft\%s\Setup\VS" % version
+ return queryString(_winreg.HKEY_LOCAL_MACHINE, subKey, "ProductDir")
+
+def getMsvcProductDir(version=r'VisualStudio\8.0'):
+ """Returns the MSVC product directory as obtained from the registry.
+
+ Typically: 'C:\Program Files\Microsoft Visual Studio 8\VC'.
+
+ @param version: The registry path used to search for MSVS.
+ @type version: string
+
+ @return: The path to the MSVC product directory.
+ @rtype: string
+
+ @raise WindowsError: If MSVC is not installed.
+ """
+ subKey = r"SOFTWARE\Microsoft\%s\Setup\VC" % version
+ return queryString(_winreg.HKEY_LOCAL_MACHINE, subKey, "ProductDir")
+
+def getPlatformSdkDir():
+ """Returns the Microsoft Platform SDK directory.
+
+ @return: The path to the Platform SDK directory.
+ @rtype: string
+
+ @raise WindowsError: If the Platform SDK is not installed.
+ """
+ subKey = r"SOFTWARE\Microsoft\Microsoft SDKs\Windows"
+ return queryString(_winreg.HKEY_LOCAL_MACHINE, subKey, "CurrentInstallFolder")
+
+def getDotNetFrameworkSdkDir(version='2.0'):
+ """Looks up the path of the Microsoft .NET Framework SDK directory.
+
+ @param version: The .NET Framework version to search for.
+ @type version: string
+
+ @return: The path to the .NET Framework SDK root directory.
+ @rtype: string
+
+ @raise WindowsError: If the .NET Framework SDK is not installed.
+ """
+ subKey = r"SOFTWARE\Microsoft\.NETFramework"
+ valueName = "sdkInstallRootv" + version
+ return queryString(_winreg.HKEY_LOCAL_MACHINE, subKey, valueName)
Index: cake/cake/optparse.py
===================================================================
--- cake/cake/optparse.py (revision 0)
+++ cake/cake/optparse.py (working copy)
@@ -0,0 +1,1718 @@
+"""A powerful, extensible, and easy-to-use option parser.
+
+By Greg Ward <gward@python.net>
+
+Originally distributed as Optik.
+
+For support, use the optik-users@lists.sourceforge.net mailing list
+(http://lists.sourceforge.net/lists/listinfo/optik-users).
+
+Simple usage example:
+
+ from optparse import OptionParser
+
+ parser = OptionParser()
+ parser.add_option("-f", "--file", dest="filename",
+ help="write report to FILE", metavar="FILE")
+ parser.add_option("-q", "--quiet",
+ action="store_false", dest="verbose", default=True,
+ help="don't print status messages to stdout")
+
+ (options, args) = parser.parse_args()
+"""
+
+__version__ = "1.5.3"
+
+__all__ = ['Option',
+ 'make_option',
+ 'SUPPRESS_HELP',
+ 'SUPPRESS_USAGE',
+ 'Values',
+ 'OptionContainer',
+ 'OptionGroup',
+ 'OptionParser',
+ 'HelpFormatter',
+ 'IndentedHelpFormatter',
+ 'TitledHelpFormatter',
+ 'OptParseError',
+ 'OptionError',
+ 'OptionConflictError',
+ 'OptionValueError',
+ 'BadOptionError']
+
+__copyright__ = """
+Copyright (c) 2001-2006 Gregory P. Ward. All rights reserved.
+Copyright (c) 2002-2006 Python Software Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ * Neither the name of the author nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import sys, os
+import types
+import textwrap
+
+def _repr(self):
+ return "<%s at 0x%x: %s>" % (self.__class__.__name__, id(self), self)
+
+
+# This file was generated from:
+# Id: option_parser.py 527 2006-07-23 15:21:30Z greg
+# Id: option.py 522 2006-06-11 16:22:03Z gward
+# Id: help.py 527 2006-07-23 15:21:30Z greg
+# Id: errors.py 509 2006-04-20 00:58:24Z gward
+
+try:
+ from gettext import gettext
+except ImportError:
+ def gettext(message):
+ return message
+_ = gettext
+
+
+class OptParseError (Exception):
+ def __init__(self, msg):
+ self.msg = msg
+
+ def __str__(self):
+ return self.msg
+
+
+class OptionError (OptParseError):
+ """
+ Raised if an Option instance is created with invalid or
+ inconsistent arguments.
+ """
+
+ def __init__(self, msg, option):
+ self.msg = msg
+ self.option_id = str(option)
+
+ def __str__(self):
+ if self.option_id:
+ return "option %s: %s" % (self.option_id, self.msg)
+ else:
+ return self.msg
+
+class OptionConflictError (OptionError):
+ """
+ Raised if conflicting options are added to an OptionParser.
+ """
+
+class OptionValueError (OptParseError):
+ """
+ Raised if an invalid option value is encountered on the command
+ line.
+ """
+
+class BadOptionError (OptParseError):
+ """
+ Raised if an invalid option is seen on the command line.
+ """
+ def __init__(self, opt_str):
+ self.opt_str = opt_str
+
+ def __str__(self):
+ return _("no such option: %s") % self.opt_str
+
+class AmbiguousOptionError (BadOptionError):
+ """
+ Raised if an ambiguous option is seen on the command line.
+ """
+ def __init__(self, opt_str, possibilities):
+ BadOptionError.__init__(self, opt_str)
+ self.possibilities = possibilities
+
+ def __str__(self):
+ return (_("ambiguous option: %s (%s?)")
+ % (self.opt_str, ", ".join(self.possibilities)))
+
+
+class HelpFormatter:
+
+ """
+ Abstract base class for formatting option help. OptionParser
+ instances should use one of the HelpFormatter subclasses for
+ formatting help; by default IndentedHelpFormatter is used.
+
+ Instance attributes:
+ parser : OptionParser
+ the controlling OptionParser instance
+ indent_increment : int
+ the number of columns to indent per nesting level
+ max_help_position : int
+ the maximum starting column for option help text
+ help_position : int
+ the calculated starting column for option help text;
+ initially the same as the maximum
+ width : int
+ total number of columns for output (pass None to constructor for
+ this value to be taken from the $COLUMNS environment variable)
+ level : int
+ current indentation level
+ current_indent : int
+ current indentation level (in columns)
+ help_width : int
+ number of columns available for option help text (calculated)
+ default_tag : str
+ text to replace with each option's default value, "%default"
+ by default. Set to false value to disable default value expansion.
+ option_strings : { Option : str }
+ maps Option instances to the snippet of help text explaining
+ the syntax of that option, e.g. "-h, --help" or
+ "-fFILE, --file=FILE"
+ _short_opt_fmt : str
+ format string controlling how short options with values are
+ printed in help text. Must be either "%s%s" ("-fFILE") or
+ "%s %s" ("-f FILE"), because those are the two syntaxes that
+ Optik supports.
+ _long_opt_fmt : str
+ similar but for long options; must be either "%s %s" ("--file FILE")
+ or "%s=%s" ("--file=FILE").
+ """
+
+ NO_DEFAULT_VALUE = "none"
+
+ def __init__(self,
+ indent_increment,
+ max_help_position,
+ width,
+ short_first):
+ self.parser = None
+ self.indent_increment = indent_increment
+ self.help_position = self.max_help_position = max_help_position
+ if width is None:
+ try:
+ width = int(os.environ['COLUMNS'])
+ except (KeyError, ValueError):
+ width = 80
+ width -= 2
+ self.width = width
+ self.current_indent = 0
+ self.level = 0
+ self.help_width = None # computed later
+ self.short_first = short_first
+ self.default_tag = "%default"
+ self.option_strings = {}
+ self._short_opt_fmt = "%s%s" # STU: Removed space since we don't support it.
+ self._long_opt_fmt = "%s=%s"
+
+ def set_parser(self, parser):
+ self.parser = parser
+
+ def set_short_opt_delimiter(self, delim):
+ if delim not in ("", " "):
+ raise ValueError(
+ "invalid metavar delimiter for short options: %r" % delim)
+ self._short_opt_fmt = "%s" + delim + "%s"
+
+ def set_long_opt_delimiter(self, delim):
+ if delim not in ("=", " "):
+ raise ValueError(
+ "invalid metavar delimiter for long options: %r" % delim)
+ self._long_opt_fmt = "%s" + delim + "%s"
+
+ def indent(self):
+ self.current_indent += self.indent_increment
+ self.level += 1
+
+ def dedent(self):
+ self.current_indent -= self.indent_increment
+ assert self.current_indent >= 0, "Indent decreased below 0."
+ self.level -= 1
+
+ def format_usage(self, usage):
+ raise NotImplementedError, "subclasses must implement"
+
+ def format_heading(self, heading):
+ raise NotImplementedError, "subclasses must implement"
+
+ def _format_text(self, text):
+ """
+ Format a paragraph of free-form text for inclusion in the
+ help output at the current indentation level.
+ """
+ text_width = self.width - self.current_indent
+ indent = " "*self.current_indent
+ return textwrap.fill(text,
+ text_width,
+ initial_indent=indent,
+ subsequent_indent=indent)
+
+ def format_description(self, description):
+ if description:
+ return self._format_text(description) + "\n"
+ else:
+ return ""
+
+ def format_epilog(self, epilog):
+ if epilog:
+ return "\n" + self._format_text(epilog) + "\n"
+ else:
+ return ""
+
+
+ def expand_default(self, option):
+ if self.parser is None or not self.default_tag:
+ return option.help
+
+ default_value = self.parser.defaults.get(option.dest)
+ if default_value is NO_DEFAULT or default_value is None:
+ default_value = self.NO_DEFAULT_VALUE
+
+ return option.help.replace(self.default_tag, str(default_value))
+
+ def format_option(self, option):
+ # The help for each option consists of two parts:
+ # * the opt strings and metavars
+ # eg. ("-x", or "-fFILENAME, --file=FILENAME")
+ # * the user-supplied help string
+ # eg. ("turn on expert mode", "read data from FILENAME")
+ #
+ # If possible, we write both of these on the same line:
+ # -x turn on expert mode
+ #
+ # But if the opt string list is too long, we put the help
+ # string on a second line, indented to the same column it would
+ # start in if it fit on the first line.
+ # -fFILENAME, --file=FILENAME
+ # read data from FILENAME
+ result = []
+ opts = self.option_strings[option]
+ opt_width = self.help_position - self.current_indent - 2
+ if len(opts) > opt_width:
+ opts = "%*s%s\n" % (self.current_indent, "", opts)
+ indent_first = self.help_position
+ else: # start help on same line as opts
+ opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts)
+ indent_first = 0
+ result.append(opts)
+ if option.help:
+ help_text = self.expand_default(option)
+ help_lines = textwrap.wrap(help_text, self.help_width)
+ result.append("%*s%s\n" % (indent_first, "", help_lines[0]))
+ result.extend(["%*s%s\n" % (self.help_position, "", line)
+ for line in help_lines[1:]])
+ elif opts[-1] != "\n":
+ result.append("\n")
+ return "".join(result)
+
+ def store_option_strings(self, parser):
+ self.indent()
+ max_len = 0
+ for opt in parser.option_list:
+ strings = self.format_option_strings(opt)
+ self.option_strings[opt] = strings
+ max_len = max(max_len, len(strings) + self.current_indent)
+ self.indent()
+ for group in parser.option_groups:
+ for opt in group.option_list:
+ strings = self.format_option_strings(opt)
+ self.option_strings[opt] = strings
+ max_len = max(max_len, len(strings) + self.current_indent)
+ self.dedent()
+ self.dedent()
+ self.help_position = min(max_len + 2, self.max_help_position)
+ self.help_width = self.width - self.help_position
+
+ def format_option_strings(self, option):
+ """Return a comma-separated list of option strings & metavariables."""
+ if option.takes_value():
+ metavar = option.metavar or option.dest.upper()
+ short_opts = [self._short_opt_fmt % (sopt, metavar)
+ for sopt in option._short_opts]
+ long_opts = [self._long_opt_fmt % (lopt, metavar)
+ for lopt in option._long_opts]
+ else:
+ short_opts = option._short_opts
+ long_opts = option._long_opts
+
+ if self.short_first:
+ opts = short_opts + long_opts
+ else:
+ opts = long_opts + short_opts
+
+ return ", ".join(opts)
+
+class IndentedHelpFormatter (HelpFormatter):
+ """Format help with indented section bodies.
+ """
+
+ def __init__(self,
+ indent_increment=2,
+ max_help_position=24,
+ width=None,
+ short_first=1):
+ HelpFormatter.__init__(
+ self, indent_increment, max_help_position, width, short_first)
+
+ def format_usage(self, usage):
+ return _("Usage: %s\n") % usage
+
+ def format_heading(self, heading):
+ return "%*s%s:\n" % (self.current_indent, "", heading)
+
+
+class TitledHelpFormatter (HelpFormatter):
+ """Format help with underlined section headers.
+ """
+
+ def __init__(self,
+ indent_increment=0,
+ max_help_position=24,
+ width=None,
+ short_first=0):
+ HelpFormatter.__init__ (
+ self, indent_increment, max_help_position, width, short_first)
+
+ def format_usage(self, usage):
+ return "%s %s\n" % (self.format_heading(_("Usage")), usage)
+
+ def format_heading(self, heading):
+ return "%s\n%s\n" % (heading, "=-"[self.level] * len(heading))
+
+
+def _parse_num(val, type):
+ if val[:2].lower() == "0x": # hexadecimal
+ radix = 16
+ elif val[:2].lower() == "0b": # binary
+ radix = 2
+ val = val[2:] or "0" # have to remove "0b" prefix
+ elif val[:1] == "0": # octal
+ radix = 8
+ else: # decimal
+ radix = 10
+
+ return type(val, radix)
+
+def _parse_int(val):
+ return _parse_num(val, int)
+
+def _parse_long(val):
+ return _parse_num(val, long)
+
+_builtin_cvt = { "int" : (_parse_int, _("integer")),
+ "long" : (_parse_long, _("long integer")),
+ "float" : (float, _("floating-point")),
+ "complex" : (complex, _("complex")) }
+
+def check_builtin(option, opt, value):
+ (cvt, what) = _builtin_cvt[option.type]
+ try:
+ return cvt(value)
+ except ValueError:
+ raise OptionValueError(
+ _("option %s: invalid %s value: %r") % (opt, what, value))
+
+def check_choice(option, opt, value):
+ if value in option.choices:
+ return value
+ else:
+ choices = ", ".join(map(repr, option.choices))
+ raise OptionValueError(
+ _("option %s: invalid choice: %r (choose from %s)")
+ % (opt, value, choices))
+
+# Not supplying a default is different from a default of None,
+# so we need an explicit "not supplied" value.
+NO_DEFAULT = ("NO", "DEFAULT")
+
+
+class Option:
+ """
+ Instance attributes:
+ _short_opts : [string]
+ _long_opts : [string]
+
+ action : string
+ type : string
+ dest : string
+ default : any
+ nargs : int
+ const : any
+ choices : [string]
+ callback : function
+ callback_args : (any*)
+ callback_kwargs : { string : any }
+ help : string
+ metavar : string
+ """
+
+ # The list of instance attributes that may be set through
+ # keyword args to the constructor.
+ ATTRS = ['action',
+ 'type',
+ 'dest',
+ 'default',
+ 'nargs',
+ 'const',
+ 'choices',
+ 'callback',
+ 'callback_args',
+ 'callback_kwargs',
+ 'help',
+ 'metavar']
+
+ # The set of actions allowed by option parsers. Explicitly listed
+ # here so the constructor can validate its arguments.
+ ACTIONS = ("store",
+ "store_const",
+ "store_true",
+ "store_false",
+ "append",
+ "append_const",
+ "extend",
+ "count",
+ "callback",
+ "help",
+ "version")
+
+ # The set of actions that involve storing a value somewhere;
+ # also listed just for constructor argument validation. (If
+ # the action is one of these, there must be a destination.)
+ STORE_ACTIONS = ("store",
+ "store_const",
+ "store_true",
+ "store_false",
+ "append",
+ "append_const",
+ "extend",
+ "count")
+
+ # The set of actions for which it makes sense to supply a value
+ # type, ie. which may consume an argument from the command line.
+ TYPED_ACTIONS = ("store",
+ "append",
+ "extend",
+ "callback")
+
+ # The set of actions which *require* a value type, ie. that
+ # always consume an argument from the command line.
+ ALWAYS_TYPED_ACTIONS = ("store",
+ "append",
+ "extend")
+
+ # The set of actions which take a 'const' attribute.
+ CONST_ACTIONS = ("store_const",
+ "append_const")
+
+ # The set of known types for option parsers. Again, listed here for
+ # constructor argument validation.
+ TYPES = ("string", "int", "long", "float", "complex", "choice")
+
+ # Dictionary of argument checking functions, which convert and
+ # validate option arguments according to the option type.
+ #
+ # Signature of checking functions is:
+ # check(option : Option, opt : string, value : string) -> any
+ # where
+ # option is the Option instance calling the checker
+ # opt is the actual option seen on the command-line
+ # (eg. "-a", "--file")
+ # value is the option argument seen on the command-line
+ #
+ # The return value should be in the appropriate Python type
+ # for option.type -- eg. an integer if option.type == "int".
+ #
+ # If no checker is defined for a type, arguments will be
+ # unchecked and remain strings.
+ TYPE_CHECKER = { "int" : check_builtin,
+ "long" : check_builtin,
+ "float" : check_builtin,
+ "complex": check_builtin,
+ "choice" : check_choice,
+ }
+
+
+ # CHECK_METHODS is a list of unbound method objects; they are called
+ # by the constructor, in order, after all attributes are
+ # initialized. The list is created and filled in later, after all
+ # the methods are actually defined. (I just put it here because I
+ # like to define and document all class attributes in the same
+ # place.) Subclasses that add another _check_*() method should
+ # define their own CHECK_METHODS list that adds their check method
+ # to those from this class.
+ CHECK_METHODS = None
+
+
+ # -- Constructor/initialization methods ----------------------------
+
+ def __init__(self, *opts, **attrs):
+ # Set _short_opts, _long_opts attrs from 'opts' tuple.
+ # Have to be set now, in case no option strings are supplied.
+ self._short_opts = []
+ self._long_opts = []
+ opts = self._check_opt_strings(opts)
+ self._set_opt_strings(opts)
+
+ # Set all other attrs (action, type, etc.) from 'attrs' dict
+ self._set_attrs(attrs)
+
+ # Check all the attributes we just set. There are lots of
+ # complicated interdependencies, but luckily they can be farmed
+ # out to the _check_*() methods listed in CHECK_METHODS -- which
+ # could be handy for subclasses! The one thing these all share
+ # is that they raise OptionError if they discover a problem.
+ for checker in self.CHECK_METHODS:
+ checker(self)
+
+ def _check_opt_strings(self, opts):
+ # Filter out None because early versions of Optik had exactly
+ # one short option and one long option, either of which
+ # could be None.
+ opts = filter(None, opts)
+ if not opts:
+ raise TypeError("at least one option string must be supplied")
+ return opts
+
+ def _set_opt_strings(self, opts):
+ for opt in opts:
+ if len(opt) < 2:
+ raise OptionError(
+ "invalid option string %r: "
+ "must be at least two characters long" % opt, self)
+ elif len(opt) == 2:
+ if not (opt[0] == "-" and opt[1] != "-"):
+ raise OptionError(
+ "invalid short option string %r: "
+ "must be of the form -x, (x any non-dash char)" % opt,
+ self)
+ self._short_opts.append(opt)
+ else:
+ if not (opt[0:2] == "--" and opt[2] != "-"):
+ raise OptionError(
+ "invalid long option string %r: "
+ "must start with --, followed by non-dash" % opt,
+ self)
+ self._long_opts.append(opt)
+
+ def _set_attrs(self, attrs):
+ for attr in self.ATTRS:
+ if attr in attrs:
+ setattr(self, attr, attrs[attr])
+ del attrs[attr]
+ else:
+ if attr == 'default':
+ setattr(self, attr, NO_DEFAULT)
+ else:
+ setattr(self, attr, None)
+ if attrs:
+ attrs = attrs.keys()
+ attrs.sort()
+ raise OptionError(
+ "invalid keyword arguments: %s" % ", ".join(attrs),
+ self)
+
+
+ # -- Constructor validation methods --------------------------------
+
+ def _check_action(self):
+ if self.action is None:
+ self.action = "store"
+ elif self.action not in self.ACTIONS:
+ raise OptionError("invalid action: %r" % self.action, self)
+
+ def _check_type(self):
+ if self.type is None:
+ if self.action in self.ALWAYS_TYPED_ACTIONS:
+ if self.choices is not None:
+ # The "choices" attribute implies "choice" type.
+ self.type = "choice"
+ else:
+ # No type given? "string" is the most sensible default.
+ self.type = "string"
+ else:
+ # Allow type objects or builtin type conversion functions
+ # (int, str, etc.) as an alternative to their names. (The
+ # complicated check of __builtin__ is only necessary for
+ # Python 2.1 and earlier, and is short-circuited by the
+ # first check on modern Pythons.)
+ import __builtin__
+ if ( type(self.type) is types.TypeType or
+ (hasattr(self.type, "__name__") and
+ getattr(__builtin__, self.type.__name__, None) is self.type) ):
+ self.type = self.type.__name__
+
+ if self.type == "str":
+ self.type = "string"
+
+ if self.type not in self.TYPES:
+ raise OptionError("invalid option type: %r" % self.type, self)
+ if self.action not in self.TYPED_ACTIONS:
+ raise OptionError(
+ "must not supply a type for action %r" % self.action, self)
+
+ def _check_choice(self):
+ if self.type == "choice":
+ if self.choices is None:
+ raise OptionError(
+ "must supply a list of choices for type 'choice'", self)
+ elif type(self.choices) not in (types.TupleType, types.ListType):
+ raise OptionError(
+ "choices must be a list of strings ('%s' supplied)"
+ % str(type(self.choices)).split("'")[1], self)
+ elif self.choices is not None:
+ raise OptionError(
+ "must not supply choices for type %r" % self.type, self)
+
+ def _check_dest(self):
+ # No destination given, and we need one for this action. The
+ # self.type check is for callbacks that take a value.
+ takes_value = (self.action in self.STORE_ACTIONS or
+ self.type is not None)
+ if self.dest is None and takes_value:
+
+ # Glean a destination from the first long option string,
+ # or from the first short option string if no long options.
+ if self._long_opts:
+ # eg. "--foo-bar" -> "foo_bar"
+ self.dest = self._long_opts[0][2:].replace('-', '_')
+ else:
+ self.dest = self._short_opts[0][1]
+
+ def _check_const(self):
+ if self.action not in self.CONST_ACTIONS and self.const is not None:
+ raise OptionError(
+ "'const' must not be supplied for action %r" % self.action,
+ self)
+
+ def _check_nargs(self):
+ if self.action in self.TYPED_ACTIONS:
+ if self.nargs is None:
+ self.nargs = 1
+ elif self.nargs is not None:
+ raise OptionError(
+ "'nargs' must not be supplied for action %r" % self.action,
+ self)
+
+ def _check_callback(self):
+ if self.action == "callback":
+ if not hasattr(self.callback, '__call__'):
+ raise OptionError(
+ "callback not callable: %r" % self.callback, self)
+ if (self.callback_args is not None and
+ type(self.callback_args) is not types.TupleType):
+ raise OptionError(
+ "callback_args, if supplied, must be a tuple: not %r"
+ % self.callback_args, self)
+ if (self.callback_kwargs is not None and
+ type(self.callback_kwargs) is not types.DictType):
+ raise OptionError(
+ "callback_kwargs, if supplied, must be a dict: not %r"
+ % self.callback_kwargs, self)
+ else:
+ if self.callback is not None:
+ raise OptionError(
+ "callback supplied (%r) for non-callback option"
+ % self.callback, self)
+ if self.callback_args is not None:
+ raise OptionError(
+ "callback_args supplied for non-callback option", self)
+ if self.callback_kwargs is not None:
+ raise OptionError(
+ "callback_kwargs supplied for non-callback option", self)
+
+
+ CHECK_METHODS = [_check_action,
+ _check_type,
+ _check_choice,
+ _check_dest,
+ _check_const,
+ _check_nargs,
+ _check_callback]
+
+
+ # -- Miscellaneous methods -----------------------------------------
+
+ def __str__(self):
+ return "/".join(self._short_opts + self._long_opts)
+
+ __repr__ = _repr
+
+ def takes_value(self):
+ return self.type is not None
+
+ def get_opt_string(self):
+ if self._long_opts:
+ return self._long_opts[0]
+ else:
+ return self._short_opts[0]
+
+
+ # -- Processing methods --------------------------------------------
+
+ def check_value(self, opt, value):
+ checker = self.TYPE_CHECKER.get(self.type)
+ if checker is None:
+ return value
+ else:
+ return checker(self, opt, value)
+
+ def convert_value(self, opt, value):
+ if value is not None:
+ if self.nargs == 1:
+ return self.check_value(opt, value)
+ else:
+ return tuple([self.check_value(opt, v) for v in value])
+
+ def process(self, opt, value, values, parser):
+
+ # First, convert the value(s) to the right type. Howl if any
+ # value(s) are bogus.
+ value = self.convert_value(opt, value)
+
+ # And then take whatever action is expected of us.
+ # This is a separate method to make life easier for
+ # subclasses to add new actions.
+ return self.take_action(
+ self.action, self.dest, opt, value, values, parser)
+
+ def take_action(self, action, dest, opt, value, values, parser):
+ if action == "store":
+ setattr(values, dest, value)
+ elif action == "store_const":
+ setattr(values, dest, self.const)
+ elif action == "store_true":
+ setattr(values, dest, True)
+ elif action == "store_false":
+ setattr(values, dest, False)
+ elif action == "append":
+ values.ensure_value(dest, []).append(value)
+ elif action == "append_const":
+ values.ensure_value(dest, []).append(self.const)
+ elif action == "extend":
+ lvalue = value.split(",")
+ values.ensure_value(dest, []).extend(lvalue)
+ elif action == "count":
+ setattr(values, dest, values.ensure_value(dest, 0) + 1)
+ elif action == "callback":
+ args = self.callback_args or ()
+ kwargs = self.callback_kwargs or {}
+ self.callback(self, opt, value, parser, *args, **kwargs)
+ elif action == "help":
+ parser.print_help()
+ parser.exit()
+ elif action == "version":
+ parser.print_version()
+ parser.exit()
+ else:
+ raise ValueError("unknown action %r" % self.action)
+
+ return 1
+
+# class Option
+
+
+SUPPRESS_HELP = "SUPPRESS"+"HELP"
+SUPPRESS_USAGE = "SUPPRESS"+"USAGE"
+
+try:
+ basestring
+except NameError:
+ def isbasestring(x):
+ return isinstance(x, (types.StringType, types.UnicodeType))
+else:
+ def isbasestring(x):
+ return isinstance(x, basestring)
+
+class Values:
+
+ def __init__(self, defaults=None):
+ if defaults:
+ for (attr, val) in defaults.items():
+ setattr(self, attr, val)
+
+ def __str__(self):
+ return str(self.__dict__)
+
+ __repr__ = _repr
+
+ def __cmp__(self, other):
+ if isinstance(other, Values):
+ return cmp(self.__dict__, other.__dict__)
+ elif isinstance(other, types.DictType):
+ return cmp(self.__dict__, other)
+ else:
+ return -1
+
+ def _update_careful(self, dict):
+ """
+ Update the option values from an arbitrary dictionary, but only
+ use keys from dict that already have a corresponding attribute
+ in self. Any keys in dict without a corresponding attribute
+ are silently ignored.
+ """
+ for attr in dir(self):
+ if attr in dict:
+ dval = dict[attr]
+ if dval is not None:
+ setattr(self, attr, dval)
+
+ def _update_loose(self, dict):
+ """
+ Update the option values from an arbitrary dictionary,
+ using all keys from the dictionary regardless of whether
+ they have a corresponding attribute in self or not.
+ """
+ self.__dict__.update(dict)
+
+ def _update(self, dict, mode):
+ if mode == "careful":
+ self._update_careful(dict)
+ elif mode == "loose":
+ self._update_loose(dict)
+ else:
+ raise ValueError, "invalid update mode: %r" % mode
+
+ def read_module(self, modname, mode="careful"):
+ __import__(modname)
+ mod = sys.modules[modname]
+ self._update(vars(mod), mode)
+
+ def read_file(self, filename, mode="careful"):
+ vars = {}
+ execfile(filename, vars)
+ self._update(vars, mode)
+
+ def ensure_value(self, attr, value):
+ if not hasattr(self, attr) or getattr(self, attr) is None:
+ setattr(self, attr, value)
+ return getattr(self, attr)
+
+
+class OptionContainer:
+
+ """
+ Abstract base class.
+
+ Class attributes:
+ standard_option_list : [Option]
+ list of standard options that will be accepted by all instances
+ of this parser class (intended to be overridden by subclasses).
+
+ Instance attributes:
+ option_list : [Option]
+ the list of Option objects contained by this OptionContainer
+ _short_opt : { string : Option }
+ dictionary mapping short option strings, eg. "-f" or "-X",
+ to the Option instances that implement them. If an Option
+ has multiple short option strings, it will appears in this
+ dictionary multiple times. [1]
+ _long_opt : { string : Option }
+ dictionary mapping long option strings, eg. "--file" or
+ "--exclude", to the Option instances that implement them.
+ Again, a given Option can occur multiple times in this
+ dictionary. [1]
+ defaults : { string : any }
+ dictionary mapping option destination names to default
+ values for each destination [1]
+
+ [1] These mappings are common to (shared by) all components of the
+ controlling OptionParser, where they are initially created.
+
+ """
+
+ def __init__(self, option_class, conflict_handler, description):
+ # Initialize the option list and related data structures.
+ # This method must be provided by subclasses, and it must
+ # initialize at least the following instance attributes:
+ # option_list, _short_opt, _long_opt, defaults.
+ self._create_option_list()
+
+ self.option_class = option_class
+ self.set_conflict_handler(conflict_handler)
+ self.set_description(description)
+
+ def _create_option_mappings(self):
+ # For use by OptionParser constructor -- create the master
+ # option mappings used by this OptionParser and all
+ # OptionGroups that it owns.
+ self._short_opt = {} # single letter -> Option instance
+ self._long_opt = {} # long option -> Option instance
+ self.defaults = {} # maps option dest -> default value
+
+
+ def _share_option_mappings(self, parser):
+ # For use by OptionGroup constructor -- use shared option
+ # mappings from the OptionParser that owns this OptionGroup.
+ self._short_opt = parser._short_opt
+ self._long_opt = parser._long_opt
+ self.defaults = parser.defaults
+
+ def set_conflict_handler(self, handler):
+ if handler not in ("error", "resolve"):
+ raise ValueError, "invalid conflict_resolution value %r" % handler
+ self.conflict_handler = handler
+
+ def set_description(self, description):
+ self.description = description
+
+ def get_description(self):
+ return self.description
+
+
+ def destroy(self):
+ """see OptionParser.destroy()."""
+ del self._short_opt
+ del self._long_opt
+ del self.defaults
+
+
+ # -- Option-adding methods -----------------------------------------
+
+ def _check_conflict(self, option):
+ conflict_opts = []
+ for opt in option._short_opts:
+ if opt in self._short_opt:
+ conflict_opts.append((opt, self._short_opt[opt]))
+ for opt in option._long_opts:
+ if opt in self._long_opt:
+ conflict_opts.append((opt, self._long_opt[opt]))
+
+ if conflict_opts:
+ handler = self.conflict_handler
+ if handler == "error":
+ raise OptionConflictError(
+ "conflicting option string(s): %s"
+ % ", ".join([co[0] for co in conflict_opts]),
+ option)
+ elif handler == "resolve":
+ for (opt, c_option) in conflict_opts:
+ if opt.startswith("--"):
+ c_option._long_opts.remove(opt)
+ del self._long_opt[opt]
+ else:
+ c_option._short_opts.remove(opt)
+ del self._short_opt[opt]
+ if not (c_option._short_opts or c_option._long_opts):
+ c_option.container.option_list.remove(c_option)
+
+ def add_option(self, *args, **kwargs):
+ """add_option(Option)
+ add_option(opt_str, ..., kwarg=val, ...)
+ """
+ if type(args[0]) in types.StringTypes:
+ option = self.option_class(*args, **kwargs)
+ elif len(args) == 1 and not kwargs:
+ option = args[0]
+ if not isinstance(option, Option):
+ raise TypeError, "not an Option instance: %r" % option
+ else:
+ raise TypeError, "invalid arguments"
+
+ self._check_conflict(option)
+
+ self.option_list.append(option)
+ option.container = self
+ for opt in option._short_opts:
+ self._short_opt[opt] = option
+ for opt in option._long_opts:
+ self._long_opt[opt] = option
+
+ if option.dest is not None: # option has a dest, we need a default
+ if option.default is not NO_DEFAULT:
+ self.defaults[option.dest] = option.default
+ elif option.dest not in self.defaults:
+ self.defaults[option.dest] = None
+
+ return option
+
+ def add_options(self, option_list):
+ for option in option_list:
+ self.add_option(option)
+
+ # -- Option query/removal methods ----------------------------------
+
+ def get_option(self, opt_str):
+ return (self._short_opt.get(opt_str) or
+ self._long_opt.get(opt_str))
+
+ def has_option(self, opt_str):
+ return (opt_str in self._short_opt or
+ opt_str in self._long_opt)
+
+ def remove_option(self, opt_str):
+ option = self._short_opt.get(opt_str)
+ if option is None:
+ option = self._long_opt.get(opt_str)
+ if option is None:
+ raise ValueError("no such option %r" % opt_str)
+
+ for opt in option._short_opts:
+ del self._short_opt[opt]
+ for opt in option._long_opts:
+ del self._long_opt[opt]
+ option.container.option_list.remove(option)
+
+
+ # -- Help-formatting methods ---------------------------------------
+
+ def format_option_help(self, formatter):
+ if not self.option_list:
+ return ""
+ result = []
+ for option in self.option_list:
+ if not option.help is SUPPRESS_HELP:
+ result.append(formatter.format_option(option))
+ return "".join(result)
+
+ def format_description(self, formatter):
+ return formatter.format_description(self.get_description())
+
+ def format_help(self, formatter):
+ result = []
+ if self.description:
+ result.append(self.format_description(formatter))
+ if self.option_list:
+ result.append(self.format_option_help(formatter))
+ return "\n".join(result)
+
+
+class OptionGroup (OptionContainer):
+
+ def __init__(self, parser, title, description=None):
+ self.parser = parser
+ OptionContainer.__init__(
+ self, parser.option_class, parser.conflict_handler, description)
+ self.title = title
+
+ def _create_option_list(self):
+ self.option_list = []
+ self._share_option_mappings(self.parser)
+
+ def set_title(self, title):
+ self.title = title
+
+ def destroy(self):
+ """see OptionParser.destroy()."""
+ OptionContainer.destroy(self)
+ del self.option_list
+
+ # -- Help-formatting methods ---------------------------------------
+
+ def format_help(self, formatter):
+ result = formatter.format_heading(self.title)
+ formatter.indent()
+ result += OptionContainer.format_help(self, formatter)
+ formatter.dedent()
+ return result
+
+
+class OptionParser (OptionContainer):
+
+ """
+ Class attributes:
+ standard_option_list : [Option]
+ list of standard options that will be accepted by all instances
+ of this parser class (intended to be overridden by subclasses).
+
+ Instance attributes:
+ usage : string
+ a usage string for your program. Before it is displayed
+ to the user, "%prog" will be expanded to the name of
+ your program (self.prog or os.path.basename(sys.argv[0])).
+ prog : string
+ the name of the current program (to override
+ os.path.basename(sys.argv[0])).
+ description : string
+ A paragraph of text giving a brief overview of your program.
+ optparse reformats this paragraph to fit the current terminal
+ width and prints it when the user requests help (after usage,
+ but before the list of options).
+ epilog : string
+ paragraph of help text to print after option help
+
+ option_groups : [OptionGroup]
+ list of option groups in this parser (option groups are
+ irrelevant for parsing the command-line, but very useful
+ for generating help)
+
+ allow_interspersed_args : bool = true
+ if true, positional arguments may be interspersed with options.
+ Assuming -a and -b each take a single argument, the command-line
+ -ablah foo bar -bboo baz
+ will be interpreted the same as
+ -ablah -bboo -- foo bar baz
+ If this flag were false, that command line would be interpreted as
+ -ablah -- foo bar -bboo baz
+ -- ie. we stop processing options as soon as we see the first
+ non-option argument. (This is the tradition followed by
+ Python's getopt module, Perl's Getopt::Std, and other argument-
+ parsing libraries, but it is generally annoying to users.)
+
+ process_default_values : bool = true
+ if true, option default values are processed similarly to option
+ values from the command line: that is, they are passed to the
+ type-checking function for the option's type (as long as the
+ default value is a string). (This really only matters if you
+ have defined custom types; see SF bug #955889.) Set it to false
+ to restore the behaviour of Optik 1.4.1 and earlier.
+
+ rargs : [string]
+ the argument list currently being parsed. Only set when
+ parse_args() is active, and continually trimmed down as
+ we consume arguments. Mainly there for the benefit of
+ callback options.
+ largs : [string]
+ the list of leftover arguments that we have skipped while
+ parsing options. If allow_interspersed_args is false, this
+ list is always empty.
+ values : Values
+ the set of option values currently being accumulated. Only
+ set when parse_args() is active. Also mainly for callbacks.
+
+ Because of the 'rargs', 'largs', and 'values' attributes,
+ OptionParser is not thread-safe. If, for some perverse reason, you
+ need to parse command-line arguments simultaneously in different
+ threads, use different OptionParser instances.
+
+ """
+
+ standard_option_list = []
+
+ def __init__(self,
+ usage=None,
+ option_list=None,
+ option_class=Option,
+ version=None,
+ conflict_handler="error",
+ description=None,
+ formatter=None,
+ add_help_option=True,
+ prog=None,
+ epilog=None):
+ OptionContainer.__init__(
+ self, option_class, conflict_handler, description)
+ self.set_usage(usage)
+ self.prog = prog
+ self.version = version
+ self.allow_interspersed_args = True
+ self.process_default_values = True
+ if formatter is None:
+ formatter = IndentedHelpFormatter()
+ self.formatter = formatter
+ self.formatter.set_parser(self)
+ self.epilog = epilog
+
+ # Populate the option list; initial sources are the
+ # standard_option_list class attribute, the 'option_list'
+ # argument, and (if applicable) the _add_version_option() and
+ # _add_help_option() methods.
+ self._populate_option_list(option_list,
+ add_help=add_help_option)
+
+ self._init_parsing_state()
+
+
+ def destroy(self):
+ """
+ Declare that you are done with this OptionParser. This cleans up
+ reference cycles so the OptionParser (and all objects referenced by
+ it) can be garbage-collected promptly. After calling destroy(), the
+ OptionParser is unusable.
+ """
+ OptionContainer.destroy(self)
+ for group in self.option_groups:
+ group.destroy()
+ del self.option_list
+ del self.option_groups
+ del self.formatter
+
+
+ # -- Private methods -----------------------------------------------
+ # (used by our or OptionContainer's constructor)
+
+ def _create_option_list(self):
+ self.option_list = []
+ self.option_groups = []
+ self._create_option_mappings()
+
+ def _add_help_option(self):
+ self.add_option("-h", "--help",
+ action="help",
+ help=_("show this help message and exit"))
+
+ def _add_version_option(self):
+ self.add_option("--version",
+ action="version",
+ help=_("show program's version number and exit"))
+
+ def _populate_option_list(self, option_list, add_help=True):
+ if self.standard_option_list:
+ self.add_options(self.standard_option_list)
+ if option_list:
+ self.add_options(option_list)
+ if self.version:
+ self._add_version_option()
+ if add_help:
+ self._add_help_option()
+
+ def _init_parsing_state(self):
+ # These are set in parse_args() for the convenience of callbacks.
+ self.rargs = None
+ self.largs = None
+ self.values = None
+
+
+ # -- Simple modifier methods ---------------------------------------
+
+ def set_usage(self, usage):
+ if usage is None:
+ self.usage = _("%prog [options]")
+ elif usage is SUPPRESS_USAGE:
+ self.usage = None
+ # For backwards compatibility with Optik 1.3 and earlier.
+ elif usage.lower().startswith("usage: "):
+ self.usage = usage[7:]
+ else:
+ self.usage = usage
+
+ def enable_interspersed_args(self):
+ """Set parsing to not stop on the first non-option, allowing
+ interspersing switches with command arguments. This is the
+ default behavior. See also disable_interspersed_args() and the
+ class documentation description of the attribute
+ allow_interspersed_args."""
+ self.allow_interspersed_args = True
+
+ def disable_interspersed_args(self):
+ """Set parsing to stop on the first non-option. Use this if
+ you have a command processor which runs another command that
+ has options of its own and you want to make sure these options
+ don't get confused.
+ """
+ self.allow_interspersed_args = False
+
+ def set_process_default_values(self, process):
+ self.process_default_values = process
+
+ def set_default(self, dest, value):
+ self.defaults[dest] = value
+
+ def set_defaults(self, **kwargs):
+ self.defaults.update(kwargs)
+
+ def _get_all_options(self):
+ options = self.option_list[:]
+ for group in self.option_groups:
+ options.extend(group.option_list)
+ return options
+
+ def get_default_values(self):
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return Values(self.defaults)
+
+ defaults = self.defaults.copy()
+ for option in self._get_all_options():
+ default = defaults.get(option.dest)
+ if isbasestring(default):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+
+ return Values(defaults)
+
+
+ # -- OptionGroup methods -------------------------------------------
+
+ def add_option_group(self, *args, **kwargs):
+ # XXX lots of overlap with OptionContainer.add_option()
+ if type(args[0]) is types.StringType:
+ group = OptionGroup(self, *args, **kwargs)
+ elif len(args) == 1 and not kwargs:
+ group = args[0]
+ if not isinstance(group, OptionGroup):
+ raise TypeError, "not an OptionGroup instance: %r" % group
+ if group.parser is not self:
+ raise ValueError, "invalid OptionGroup (wrong parser)"
+ else:
+ raise TypeError, "invalid arguments"
+
+ self.option_groups.append(group)
+ return group
+
+ def get_option_group(self, opt_str):
+ option = (self._short_opt.get(opt_str) or
+ self._long_opt.get(opt_str))
+ if option and option.container is not self:
+ return option.container
+ return None
+
+
+ # -- Option-parsing methods ----------------------------------------
+
+ def _get_args(self, args):
+ if args is None:
+ return sys.argv[1:]
+ else:
+ return args[:] # don't modify caller's list
+
+ def parse_args(self, args=None, values=None):
+ """
+ parse_args(args : [string] = sys.argv[1:],
+ values : Values = None)
+ -> (values : Values, args : [string])
+
+ Parse the command-line options found in 'args' (default:
+ sys.argv[1:]). Any errors result in a call to 'error()', which
+ by default prints the usage message to stderr and calls
+ sys.exit() with an error message. On success returns a pair
+ (values, args) where 'values' is an Values instance (with all
+ your option values) and 'args' is the list of arguments left
+ over after parsing options.
+ """
+ rargs = self._get_args(args)
+ if values is None:
+ values = self.get_default_values()
+
+ # Store the halves of the argument list as attributes for the
+ # convenience of callbacks:
+ # rargs
+ # the rest of the command-line (the "r" stands for
+ # "remaining" or "right-hand")
+ # largs
+ # the leftover arguments -- ie. what's left after removing
+ # options and their arguments (the "l" stands for "leftover"
+ # or "left-hand")
+ self.rargs = rargs
+ self.largs = largs = []
+ self.values = values
+
+ try:
+ stop = self._process_args(largs, rargs, values)
+ except (BadOptionError, OptionValueError), err:
+ self.error(str(err))
+
+ args = largs + rargs
+ return self.check_values(values, args)
+
+ def check_values(self, values, args):
+ """
+ check_values(values : Values, args : [string])
+ -> (values : Values, args : [string])
+
+ Check that the supplied option values and leftover arguments are
+ valid. Returns the option values and leftover arguments
+ (possibly adjusted, possibly completely new -- whatever you
+ like). Default implementation just returns the passed-in
+ values; subclasses may override as desired.
+ """
+ return (values, args)
+
+ def _process_args(self, largs, rargs, values):
+ """_process_args(largs : [string],
+ rargs : [string],
+ values : Values)
+
+ Process command-line arguments and populate 'values', consuming
+ options and arguments from 'rargs'. If 'allow_interspersed_args' is
+ false, stop at the first non-option argument. If true, accumulate any
+ interspersed non-option arguments in 'largs'.
+ """
+ while rargs:
+ arg = rargs[0]
+ # We handle bare "--" explicitly, and bare "-" is handled by the
+ # standard arg handler since the short arg case ensures that the
+ # len of the opt string is greater than 1.
+ if arg == "--":
+ del rargs[0]
+ return
+ elif arg[0:2] == "--":
+ # process a single long option (possibly with value(s))
+ self._process_long_opt(rargs, values)
+ elif arg[:1] == "-" and len(arg) > 1:
+ # process a cluster of short options (possibly with
+ # value(s) for the last one only)
+ self._process_short_opts(rargs, values)
+ elif self.allow_interspersed_args:
+ largs.append(arg)
+ del rargs[0]
+ else:
+ return # stop now, leave this arg in rargs
+
+ # Say this is the original argument list:
+ # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
+ # ^
+ # (we are about to process arg(i)).
+ #
+ # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
+ # [arg0, ..., arg(i-1)] (any options and their arguments will have
+ # been removed from largs).
+ #
+ # The while loop will usually consume 1 or more arguments per pass.
+ # If it consumes 1 (eg. arg is an option that takes no arguments),
+ # then after _process_arg() is done the situation is:
+ #
+ # largs = subset of [arg0, ..., arg(i)]
+ # rargs = [arg(i+1), ..., arg(N-1)]
+ #
+ # If allow_interspersed_args is false, largs will always be
+ # *empty* -- still a subset of [arg0, ..., arg(i-1)], but
+ # not a very interesting subset!
+
+ def _match_long_opt(self, opt):
+ """_match_long_opt(opt : string) -> string
+
+ Determine which long option string 'opt' matches, ie. which one
+ it is an unambiguous abbrevation for. Raises BadOptionError if
+ 'opt' doesn't unambiguously match any long option string.
+ """
+ return _match_abbrev(opt, self._long_opt)
+
+ def _process_long_opt(self, rargs, values):
+ arg = rargs.pop(0)
+
+ # Value explicitly attached to arg? Pretend it's the next
+ # argument.
+ if "=" in arg:
+ (opt, next_arg) = arg.split("=", 1)
+ rargs.insert(0, next_arg)
+ had_explicit_value = True
+ else:
+ opt = arg
+ had_explicit_value = False
+
+ opt = self._match_long_opt(opt)
+ option = self._long_opt[opt]
+ if option.takes_value():
+ # STU: Forcing options to take an explicit value so there's no grey area
+ # between options and arguments.
+ if not had_explicit_value:
+ self.error(_("%s option requires an explicit argument") % opt)
+ nargs = option.nargs
+ if len(rargs) < nargs:
+ if nargs == 1:
+ self.error(_("%s option requires an argument") % opt)
+ else:
+ self.error(_("%s option requires %d arguments")
+ % (opt, nargs))
+ elif nargs == 1:
+ value = rargs.pop(0)
+ else:
+ value = tuple(rargs[0:nargs])
+ del rargs[0:nargs]
+
+ elif had_explicit_value:
+ self.error(_("%s option does not take a value") % opt)
+
+ else:
+ value = None
+
+ option.process(opt, value, values, self)
+
+ def _process_short_opts(self, rargs, values):
+ arg = rargs.pop(0)
+ stop = False
+ i = 1
+ for ch in arg[1:]:
+ opt = "-" + ch
+ option = self._short_opt.get(opt)
+ i += 1 # we have consumed a character
+
+ if not option:
+ raise BadOptionError(opt)
+ if option.takes_value():
+ # Any characters left in arg? Pretend they're the
+ # next arg, and stop consuming characters of arg.
+ if i < len(arg):
+ rargs.insert(0, arg[i:])
+ stop = True
+ else:
+ # STU: Forcing options to take an explicit value so there's no grey area
+ # between options and arguments.
+ self.error(_("%s option requires an explicit argument") % opt)
+
+ nargs = option.nargs
+ if len(rargs) < nargs:
+ if nargs == 1:
+ self.error(_("%s option requires an argument") % opt)
+ else:
+ self.error(_("%s option requires %d arguments")
+ % (opt, nargs))
+ elif nargs == 1:
+ value = rargs.pop(0)
+ else:
+ value = tuple(rargs[0:nargs])
+ del rargs[0:nargs]
+
+ else: # option doesn't take a value
+ value = None
+
+ option.process(opt, value, values, self)
+
+ if stop:
+ break
+
+
+ # -- Feedback methods ----------------------------------------------
+
+ def get_prog_name(self):
+ if self.prog is None:
+ return os.path.basename(sys.argv[0])
+ else:
+ return self.prog
+
+ def expand_prog_name(self, s):
+ return s.replace("%prog", self.get_prog_name())
+
+ def get_description(self):
+ return self.expand_prog_name(self.description)
+
+ def exit(self, status=0, msg=None):
+ if msg:
+ sys.stderr.write(msg)
+ sys.exit(status)
+
+ def error(self, msg):
+ """error(msg : string)
+
+ Print a usage message incorporating 'msg' to stderr and exit.
+ If you override this in a subclass, it should not return -- it
+ should either exit or raise an exception.
+ """
+ self.print_usage(sys.stderr)
+ self.exit(2, "%s: error: %s\n" % (self.get_prog_name(), msg))
+
+ def get_usage(self):
+ if self.usage:
+ return self.formatter.format_usage(
+ self.expand_prog_name(self.usage))
+ else:
+ return ""
+
+ def print_usage(self, file=None):
+ """print_usage(file : file = stdout)
+
+ Print the usage message for the current program (self.usage) to
+ 'file' (default stdout). Any occurrence of the string "%prog" in
+ self.usage is replaced with the name of the current program
+ (basename of sys.argv[0]). Does nothing if self.usage is empty
+ or not defined.
+ """
+ if self.usage:
+ print >>file, self.get_usage()
+
+ def get_version(self):
+ if self.version:
+ return self.expand_prog_name(self.version)
+ else:
+ return ""
+
+ def print_version(self, file=None):
+ """print_version(file : file = stdout)
+
+ Print the version message for this program (self.version) to
+ 'file' (default stdout). As with print_usage(), any occurrence
+ of "%prog" in self.version is replaced by the current program's
+ name. Does nothing if self.version is empty or undefined.
+ """
+ if self.version:
+ print >>file, self.get_version()
+
+ def format_option_help(self, formatter=None):
+ if formatter is None:
+ formatter = self.formatter
+ formatter.store_option_strings(self)
+ result = []
+ result.append(formatter.format_heading(_("Options")))
+ formatter.indent()
+ if self.option_list:
+ result.append(OptionContainer.format_option_help(self, formatter))
+ result.append("\n")
+ for group in self.option_groups:
+ result.append(group.format_help(formatter))
+ result.append("\n")
+ formatter.dedent()
+ # Drop the last "\n", or the header if no options or option groups:
+ return "".join(result[:-1])
+
+ def format_epilog(self, formatter):
+ return formatter.format_epilog(self.epilog)
+
+ def format_help(self, formatter=None):
+ if formatter is None:
+ formatter = self.formatter
+ result = []
+ if self.usage:
+ result.append(self.get_usage() + "\n")
+ if self.description:
+ result.append(self.format_description(formatter) + "\n")
+ result.append(self.format_option_help(formatter))
+ result.append(self.format_epilog(formatter))
+ return "".join(result)
+
+ # used by test suite
+ def _get_encoding(self, file):
+ encoding = getattr(file, "encoding", None)
+ if not encoding:
+ encoding = sys.getdefaultencoding()
+ return encoding
+
+ def print_help(self, file=None):
+ """print_help(file : file = stdout)
+
+ Print an extended help message, listing all options and any
+ help text provided with them, to 'file' (default stdout).
+ """
+ if file is None:
+ file = sys.stdout
+ encoding = self._get_encoding(file)
+ file.write(self.format_help().encode(encoding, "replace"))
+
+# class OptionParser
+
+
+def _match_abbrev(s, wordmap):
+ """_match_abbrev(s : string, wordmap : {string : Option}) -> string
+
+ Return the string key in 'wordmap' for which 's' is an unambiguous
+ abbreviation. If 's' is found to be ambiguous or doesn't match any of
+ 'words', raise BadOptionError.
+ """
+ # Is there an exact match?
+ if s in wordmap:
+ return s
+ else:
+ # Isolate all words with s as a prefix.
+ possibilities = [word for word in wordmap.keys()
+ if word.startswith(s)]
+ # No exact match, so there had better be just one possibility.
+ if len(possibilities) == 1:
+ return possibilities[0]
+ elif not possibilities:
+ raise BadOptionError(s)
+ else:
+ # More than one possible completion: ambiguous prefix.
+ possibilities.sort()
+ raise AmbiguousOptionError(s, possibilities)
+
+
+# Some day, there might be many Option classes. As of Optik 1.3, the
+# preferred way to instantiate Options is indirectly, via make_option(),
+# which will become a factory function when there are many Option
+# classes.
+make_option = Option
Index: cake/cake/path.py
===================================================================
--- cake/cake/path.py (revision 0)
+++ cake/cake/path.py (working copy)
@@ -0,0 +1,531 @@
+"""Path Utilities.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import os
+import os.path
+import re
+import cake.system
+
+def absPath(path, cwd=None):
+ """Return a normalised absolute path of the given path.
+
+ @param path: The path to normalise and make absolute.
+ @type path: string
+
+ @param cwd: Optional current working directory to prepend
+ if the path is not absolute. If not provided this defaults
+ to os.getcwd().
+ @type cwd: string or None
+
+ @return: The normalised, absolute path.
+ @rtype: string
+ """
+ if not os.path.isabs(path):
+ if cwd is None:
+ if isinstance(path, unicode):
+ cwd = os.getcwdu()
+ else:
+ cwd = os.getcwd()
+ path = os.path.join(cwd, path)
+ return os.path.normpath(path)
+
+def addPrefix(path, prefix):
+ """Prefix the baseName part of a path and return the result.
+
+ @param path: The path to prefix.
+ @type path: string
+ @param prefix: The prefix to prepend to the baseName part.
+ @type prefix: string
+
+ @return: The path with it's baseName part prefixed with 'prefix'.
+ @rtype: string
+ """
+ if not prefix:
+ return path
+
+ tail = os.path.basename(path)
+ head = path[:len(path)-len(tail)]
+
+ return head + prefix + tail
+
+def baseName(path):
+ """Get the file-name part of the path.
+
+ @param path: The path to split.
+ @type path: string
+
+ @return: The file-name part of the path.
+ @rtype: string
+ """
+ return os.path.basename(path)
+
+def baseNameWithoutExtension(path):
+ """Get the file-name part of the path without the extension.
+
+ @param path: The path to split.
+ @type path: string
+
+ @return: The file-name part of the path without the extension.
+ @rtype: string
+ """
+ tail = os.path.basename(path)
+ extStart = tail.rfind(".")
+
+ if extStart == -1:
+ return tail
+ else:
+ return tail[:extStart]
+
+def commonPath(path1, path2):
+ """
+ Given two paths, find their common root path, if any.
+
+ @param path1: The first path to scan.
+ @type path1: string
+ @param path2: The second path to scan.
+ @type path2: string
+ @return: The common root path of the two paths.
+ @rtype: string
+ """
+ seps = [os.path.sep, os.path.altsep]
+ path1len = len(path1)
+ path2len = len(path2)
+ charCount = min(path1len, path2len)
+ safeCount = 0
+
+ for i in xrange(charCount):
+ if path1[i] != path2[i]:
+ return path1[:safeCount] # No more matches
+ elif path1[i] in seps:
+ safeCount = i # Last safe path match
+
+ # All characters matched in at least one string. For the path to be valid,
+ # the next character in other string must be a slash
+ if path1len > charCount:
+ if path1[charCount] in seps:
+ safeCount = charCount
+ elif path2len > charCount:
+ if path2[charCount] in seps:
+ safeCount = charCount
+ elif path1len == path2len and path1len and path1[-1] not in seps:
+ safeCount = path1len
+ return path1[:safeCount]
+
+def dirName(path):
+ """Get the directory part of the path.
+
+ @param path: The path to split.
+ @type path: string
+
+ @return: The directory part of the path.
+ @rtype: string
+ """
+ return os.path.dirname(path)
+
+def exists(path):
+ """Query if a file or directory exists at the given path.
+
+ @param path: The path to check.
+ @type path: string
+
+ @return: True if a file or directory exists, otherwise False.
+ @rtype: bool
+ """
+ return os.path.exists(path)
+
+def expandVars(path, env):
+ """Recursively expand shell variables of the form $var and ${var}.
+
+ This function is a copy of os.path.expandvars() with added support for
+ recursion.
+
+ Unknown variables are replaced with {MISSING_SYMBOL_<varname>}.
+
+ @param path: The path to expand.
+ @type path: string
+ @param env: A dictionary of symbols to their values.
+ @type env: dict
+
+ @return: The expanded path.
+ @rtype: string
+ """
+ if '$' not in path:
+ return path
+ import string
+ varchars = string.ascii_letters + string.digits + '_-'
+ res = ''
+ index = 0
+ pathlen = len(path)
+ while index < pathlen:
+ c = path[index]
+ if c == '\'': # no expansion within single quotes
+ path = path[index + 1:]
+ pathlen = len(path)
+ try:
+ index = path.index('\'')
+ res = res + '\'' + path[:index + 1]
+ except ValueError:
+ res = res + path
+ index = pathlen - 1
+ elif c == '$': # variable or '$$'
+ if path[index + 1:index + 2] == '$':
+ res = res + c
+ index = index + 1
+ elif path[index + 1:index + 2] == '{':
+ path = path[index + 2:]
+ pathlen = len(path)
+ try:
+ index = path.index('}')
+ var = path[:index]
+ sliceStart = None
+ # Check for an indexed variable.
+ if var.endswith(']') and '[' in var:
+ m = re.match(r'(.+)\[(\d+)\]', var)
+ if m:
+ var = m.group(1)
+ sliceStart = int(m.group(2))
+ if var in env:
+ subVar = expandVars(env[var], env)
+ if sliceStart is not None:
+ subVar = subVar[sliceStart]
+ res = res + subVar
+ else:
+ res = res + '{MISSING_SYMBOL_' + var + '}'
+ except ValueError:
+ res = res + path
+ index = pathlen - 1
+ else:
+ var = ''
+ index = index + 1
+ c = path[index:index + 1]
+ while c != '' and c in varchars:
+ var = var + c
+ index = index + 1
+ c = path[index:index + 1]
+ if var in env:
+ res = res + expandVars(env[var], env)
+ else:
+ res = res + '{MISSING_SYMBOL_' + var + '}'
+ if c != '':
+ res = res + c
+ else:
+ res = res + c
+ index = index + 1
+ return res
+
+def extension(path):
+ """Get the file extension of the last part of a path.
+
+ A file extension is any part after the last dot inclusively.
+
+ @param path: The path to split.
+ @type path: string
+
+ @return: The extension part of the path.
+ @rtype: string
+ """
+ end = path.rfind("\\")
+ end = max(path.rfind("/", end + 1), end) + 1
+ # We search in the substring AFTER the last slash.
+ # In the case that a slash is not found, the -1 returned by rfind becomes zero,
+ # and so searches the whole string
+ extStart = path.rfind(".", end)
+ if extStart > end and path.count(".", end, extStart) != extStart - end:
+ return path[extStart:]
+ else:
+ return ""
+
+if cake.system.isWindows():
+ try:
+ import win32file
+ def _fileSystemBaseName(path, stem, leaf):
+ findData = win32file.FindFilesIterator(path).next()
+ return str(findData[8])
+ except ImportError:
+ def _fileSystemBaseName(path, stem, leaf):
+ if not stem:
+ stem = '.'
+ leafNorm = os.path.normcase(leaf)
+ for f in os.listdir(stem):
+ if os.path.normcase(f) == leafNorm:
+ return f
+ return leaf
+
+ def fileSystemPath(path):
+ """Look up the correctly cased path from the file system.
+
+ This is only relevant on file systems that are case insensitive such
+ as Windows.
+
+ '/' and '\\' will be left intact.
+
+ '.' and '..' will be left intact.
+
+ A drive letter will be capitalized.
+
+ @param path: The path to look up.
+ @type path: string
+
+ @return: The correctly cased file system path.
+ @rtype: string
+ """
+ seps = frozenset([os.path.sep, os.path.altsep])
+
+ parts = list()
+ while path:
+ stem, leaf = os.path.split(path)
+ if leaf != '.' and leaf != '..':
+ try:
+ leaf = _fileSystemBaseName(path, stem, leaf)
+ except Exception:
+ pass
+ parts.append(leaf)
+
+ if stem and len(path) > len(stem):
+ sep = path[len(stem)]
+ if sep in seps:
+ parts.append(sep)
+
+ path = stem
+
+ if not leaf:
+ # Reached root path
+ break
+
+ if path:
+ # Capitalise drive letter if found
+ if len(path) >= 2 and path[1] == ':':
+ path = path.capitalize()
+ parts.append(path)
+
+ return "".join(reversed(parts))
+
+else:
+ # Assume a case-sensitive file-system
+ def fileSystemPath(path):
+ return path
+
+def forceExtension(path, ext):
+ """Return the path modified if needed to have the specified extension.
+
+ @param path: The path to force an extension onto.
+ @type path: string
+
+ @return: The path with the specified extension.
+ @rtype: string
+ """
+ if not os.path.normcase(path).endswith(os.path.normcase(ext)):
+ return path + ext
+ else:
+ return path
+
+def forcePrefixSuffix(path, prefix, suffix):
+ """Force both a prefix and suffix only if the suffix does not match.
+
+ @param path: The path to modify.
+ @type path: string
+ @param prefix: The prefix to prepend to the baseName part.
+ @type prefix: string
+ @param suffix: The suffix to append to the path.
+ @type suffix: string
+ @return: The path with the given prefix and suffix if the suffix did
+ not exist, otherwise the original path.
+ @rtype: string
+ """
+ if os.path.normcase(extension(path)) != os.path.normcase(suffix):
+ return addPrefix(path, prefix) + suffix
+ else:
+ return path
+
+def hasExtension(path):
+ """Query if the last part of a path has a file extension.
+
+ A file extension is any part after the last dot inclusively.
+
+ @param path: The path to check.
+ @type path: string
+
+ @return: True if the path has an extension, otherwise False.
+ @rtype: bool
+ """
+ end = path.rfind("\\")
+ end = max(path.rfind("/", end + 1), end) + 1
+ # We search in the substring AFTER the last slash.
+ # In the case that a slash is not found, the -1 returned by rfind becomes zero,
+ # and so searches the whole string
+ extStart = path.rfind(".", end)
+ return extStart > end and path.count(".", end, extStart) != extStart - end
+
+def isAbs(path):
+ """Query if the path is absolute.
+
+ @param path: The path to check.
+ @type path: string
+
+ @return: True if the path is absolute, otherwise False.
+ @rtype: bool
+ """
+ return os.path.isabs(path)
+
+def isDir(path):
+ """Query if the path is a directory.
+
+ @param path: The path to check.
+ @type path: string
+
+ @return: True if the path is a directory, otherwise False.
+ @rtype: bool
+ """
+ return os.path.isdir(path)
+
+def isFile(path):
+ """Query if the path is a file.
+
+ @param path: The path to check.
+ @type path: string
+
+ @return: True if the path is a file, otherwise False.
+ @rtype: bool
+ """
+ return os.path.isfile(path)
+
+def isMount(path):
+ """Query if the path is a mount point (drive root).
+
+ @param path: The path to check.
+ @type path: string
+
+ @return: True if the path is a mount point, otherwise False.
+ @rtype: bool
+ """
+ seps = [os.path.sep, os.path.altsep]
+ root, rest = os.path.splitdrive(path)
+ if root and root[0] in seps:
+ return (not rest) or (rest in seps)
+ return rest in seps
+
+def join(*args):
+ """Find the cross product of any amount of input paths or lists of paths.
+
+ Examples::
+ join("a", "b", "c") -> "a/b/c"
+ join("a", ["b", "c"], "d") -> ["a/b/d", "a/c/d"]
+ join(["a", "b"], ["c", "d"]) -> ["a/c", "a/d", "b/c", "b/d"]
+
+ @param args: The arguments to cross.
+ @type args: string or list(string)
+
+ @return: The cross product of the given arguments.
+ @rtype: string or list(string)
+ """
+ results = []
+
+ if not args:
+ return ""
+ elif len(args) == 1:
+ return args[0]
+
+ anyLists = False
+
+ last = args[-1]
+ if isinstance(last, basestring):
+ results = [last]
+ else:
+ results = last
+ anyLists = True
+
+ osJoin = os.path.join
+
+ for i in xrange(len(args) - 2, -1, -1):
+ arg = args[i]
+ if isinstance(arg, basestring):
+ results = [osJoin(arg, r) for r in results]
+ else:
+ anyLists = True
+ newResults = []
+ for a in arg:
+ newResults.extend(osJoin(a, r) for r in results)
+ results = newResults
+
+ if anyLists:
+ return results
+ else:
+ return results[0]
+
+def relativePath(child, parent):
+ """
+ Make a child path relative to the parent path.
+
+ @param child: The absolute child path.
+ @type child: string
+ @param parent: The absolute parent path.
+ @type parent: string
+ @return: The child path relative to parent, or the child
+ path itself if the child was not relative to the parent.
+ @rtype: string
+ """
+ def _hasDrive(path):
+ return bool(os.path.splitdrive(path)[0]) # Drive?
+
+ def _isUnc(path):
+ return path.startswith("\\\\")
+
+ # Convert slashes, remove trailing slash, remove '..' etc.
+ child = os.path.normpath(child)
+ parent = os.path.normpath(parent)
+
+ childList = child.split(os.path.sep)
+ parentList = parent.split(os.path.sep)
+
+ if cake.system.isWindows():
+ if _isUnc(child) or _isUnc(parent):
+ return child # Not even attempting to make unc paths relative
+ if _hasDrive(child) or _hasDrive(parent):
+ if os.path.normcase(parentList[0]) != os.path.normcase(childList[0]):
+ return child # Paths are on different drives
+
+ for i in range(min(len(parentList), len(childList))):
+ if os.path.normcase(parentList[i]) != os.path.normcase(childList[i]):
+ break
+ else:
+ i += 1
+
+ relList = [os.path.pardir] * (len(parentList)-i) + childList[i:]
+ if not relList:
+ return os.curdir
+ return join(*relList)
+
+def split(path):
+ """Split the path into directory and base parts.
+
+ @param path: The path to split.
+ @type path: string
+
+ @return: The directory and base parts of the path.
+ @rtype: tuple(string, string)
+ """
+ return os.path.split(path)
+
+def stripExtension(path):
+ """Return the part of the path before the extension.
+
+ @param path: The path to split.
+ @type path: string
+
+ @return: The part of the path before the extension.
+ @rtype: string
+ """
+ end = path.rfind("\\")
+ end = max(path.rfind("/", end + 1), end) + 1
+ # We search in the substring AFTER the last slash.
+ # In the case that a slash is not found, the -1 returned by rfind becomes zero,
+ # and so searches the whole string
+ extStart = path.rfind(".", end)
+ if extStart > end and path.count(".", end, extStart) != extStart - end:
+ return path[:extStart]
+ else:
+ return path
Index: cake/cake/registry.py
===================================================================
--- cake/cake/registry.py (revision 0)
+++ cake/cake/registry.py (working copy)
@@ -0,0 +1,64 @@
+"""Utilities for querying the Windows registry.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+import _winreg
+import sys
+
+import cake.system
+
+_shownWow64Warning = False
+
+def queryString(key, subKey, name):
+ """Queries a string value from the Windows registry.
+
+ On 64-bit Windows this function will first try to query the value from
+ the 64-bit registry. If the value doesn't exist there it will then try to
+ find the value in the 32-bit registry.
+
+ @param key: The key to query, eg: _winreg.HKEY_LOCAL_MACHINE
+ @type key: string
+
+ @param subKey: The subkey to query, eg: r"SOFTWARE\Microsoft"
+ @type subKey: string
+
+ @param name: The name to query, eg: "InstallDir"
+ @type name: string
+
+ @return: The value queried.
+ @rtype: string
+
+ @raise WindowsError: If the value could not be found.
+ """
+ # List of access modes to try.
+ sams = [_winreg.KEY_READ]
+
+ # Also try for a 32-bit registry key on 64-bit Windows. On 64-bit Windows
+ # the Windows SDK is usually installed in the 64-bit program files
+ # directory but the compiler is usually installed in the 32-bit program
+ # files directory.
+ if cake.system.isWindows64():
+ if hasattr(_winreg, "KEY_WOW64_32KEY"):
+ sams.append(_winreg.KEY_READ | _winreg.KEY_WOW64_32KEY)
+ else:
+ global _shownWow64Warning
+ if not _shownWow64Warning:
+ _shownWow64Warning = True
+ sys.stderr.write(
+ "warning: _winreg module does not have access key KEY_WOW64_32KEY. "
+ "It may not be possible to find all compiler and SDK install "
+ "locations automatically.\n"
+ )
+
+ for sam in sams:
+ try:
+ keyHandle = _winreg.OpenKey(key, subKey, 0, sam)
+ try:
+ return str(_winreg.QueryValueEx(keyHandle, name)[0])
+ finally:
+ _winreg.CloseKey(keyHandle)
+ except WindowsError:
+ if sam is sams[-1]:
+ raise
Index: cake/cake/runner.py
===================================================================
--- cake/cake/runner.py (revision 0)
+++ cake/cake/runner.py (working copy)
@@ -0,0 +1,441 @@
+"""Running Utilities.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import os
+import os.path
+import sys
+import threading
+import datetime
+import time
+import traceback
+import platform
+
+import cake.engine
+import cake.logging
+import cake.path
+import cake.script
+import cake.task
+import cake.threadpool
+import cake.version
+
+from cake.optparse import Option, OptionParser
+
+# Make sure stat() returns floats so timestamps are consistent across
+# Python versions (2.4 used longs, 2.5+ uses floats).
+os.stat_float_times(True)
+
+def callOnce(f):
+ """Decorator that handles calling a function only once.
+
+ The second and subsequent times it is called the cached
+ result is returned.
+ """
+ state = {}
+ def func(*args, **kwargs):
+ if state:
+ try:
+ return state["result"]
+ except KeyError:
+ raise state["exception"]
+ else:
+ try:
+ result = state["result"] = f(*args, **kwargs)
+ return result
+ except Exception, e:
+ state["exception"] = e
+ raise
+ return func
+
+@callOnce
+def _overrideOpen():
+ """
+ Override the built-in open() and os.open() to set the no-inherit
+ flag on files to prevent processes from inheriting file handles.
+ """
+ if hasattr(os, "O_NOINHERIT"):
+ import __builtin__
+
+ if (sys.hexversion >= 0x03000000 or
+ platform.python_compiler().startswith("MSC v.1310")):
+ # Python 3.x or Python 2.x compiled with MSVC 7.1 doesn't support the
+ # 'N' flag being passed to fopen. It is ignored. So we need to manually
+ # interpret mode string and call onto os.open().
+
+ _basicFlags = {
+ 'r': os.O_RDONLY,
+ 'w': os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+ 'a': os.O_WRONLY | os.O_CREAT | os.O_APPEND,
+ }
+
+ _otherFlags = {
+ '+': os.O_RDWR, # Also clears os.O_RDONLY and os.O_WRONLY
+ 't': os.O_TEXT,
+ 'b': os.O_BINARY,
+ 'N': os.O_NOINHERIT,
+ 'D': os.O_TEMPORARY,
+ 'T': os.O_SHORT_LIVED,
+ 'S': os.O_SEQUENTIAL,
+ 'R': os.O_RANDOM,
+ ' ': 0,
+ ',': 0,
+ 'U': 0,
+ }
+
+ def new_open(filename, mode="r", bufsize=-1):
+ try:
+ flags = _basicFlags[mode[0]]
+ except LookupError:
+ raise ValueError("mode must start with 'r', 'w' or 'a'")
+
+ for c in mode[1:]:
+ try:
+ flags |= _otherFlags[c]
+ except KeyError:
+ raise ValueError("unknown flag '%s' in mode" % c)
+
+ if flags & os.O_RDWR:
+ flags &= ~(os.O_RDONLY | os.O_WRONLY)
+
+ if flags & os.O_BINARY and flags & os.O_TEXT:
+ raise ValueError("Cannot specify both 't' and 'b' in mode")
+ if flags & os.O_SEQUENTIAL and flags & os.O_RANDOM:
+ raise ValueError("Cannot specify both 'S' and 'R' in mode")
+
+ fd = os.open(filename, flags)
+ return os.fdopen(fd, mode, bufsize)
+ else:
+ # Simpler version for platforms that have fopen() that understands 'N'
+ old_open = __builtin__.open
+ def new_open(filename, mode="r", *args, **kwargs):
+ if "N" not in mode:
+ mode += "N"
+ return old_open(filename, mode, *args, **kwargs)
+ __builtin__.open = new_open
+
+ old_os_open = os.open
+ def new_os_open(filename, flag, mode=0777):
+ flag |= os.O_NOINHERIT
+ return old_os_open(filename, flag, mode)
+ os.open = new_os_open
+
+@callOnce
+def _overridePopen():
+ """
+ Override the subprocess Popen class due to a bug in Python 2.4
+ that can cause an exception if a process finishes too quickly.
+ """
+ version = platform.python_version_tuple()
+ if version[0] == "2" and version[1] == "4":
+ import subprocess
+
+ old_Popen = subprocess.Popen
+ class new_Popen(old_Popen):
+ def poll(self):
+ try:
+ return old_Popen.poll(self)
+ except ValueError:
+ return self.returncode
+
+ def wait(self):
+ try:
+ return old_Popen.wait(self)
+ except ValueError:
+ return self.returncode
+ subprocess.Popen = new_Popen
+
+@callOnce
+def _speedUp():
+ """
+ Speed up execution by importing Psyco and binding the slowest functions
+ with it.
+ """
+ try:
+ import psyco
+ psyco.bind(cake.engine.Configuration.checkDependencyInfo)
+ psyco.bind(cake.engine.Configuration.createDependencyInfo)
+ #psyco.full()
+ #psyco.profile()
+ #psyco.log()
+ except ImportError:
+ # Only report import failures on systems we know Psyco supports.
+ version = platform.python_version_tuple()
+ supportsVersion = version[0] == "2" and version[1] in ["5", "6"]
+ if platform.system() == "Windows" and supportsVersion:
+ sys.stderr.write(
+ "warning: Psyco is not installed. Installing it may halve your incremental build time.\n"
+ )
+
+def run(args=None, cwd=None):
+ """Run a cake build with the specified command-line args.
+
+ @param args: A list of command-line args for cake. If this is None
+ sys.argv is used instead.
+ @type args: list of string, or None
+ @param cwd: The working directory to use. If this is None os.getcwd()
+ is used instead.
+ @type cwd: string or None
+
+ @return: The exit code of cake. Non-zero if exited with errors, zero
+ if exited with success.
+ @rtype: int
+ """
+ startTime = datetime.datetime.utcnow()
+
+ _overrideOpen()
+ _overridePopen()
+ _speedUp()
+
+ if args is None:
+ args = sys.argv[1:]
+
+ if cwd is not None:
+ cwd = os.path.abspath(cwd)
+ else:
+ cwd = os.getcwd()
+
+ usage = "usage: %prog [options] <cake-script>*"
+ argsCakeFlag = "--args"
+
+ parser = OptionParser(usage=usage, add_help_option=False)
+ parser.add_option(
+ "-h", "--help",
+ action="help",
+ help="Show this help message and exit.",
+ )
+ parser.add_option(
+ "-v", "--version",
+ dest="outputVersion",
+ action="store_true",
+ help="Print the current version of Cake and exit.",
+ default=False,
+ )
+ parser.add_option(
+ argsCakeFlag,
+ metavar="FILE",
+ dest="args",
+ help="Path to the args.cake file to use.",
+ default=None,
+ )
+ parser.add_option(
+ "--config",
+ metavar="FILE",
+ dest="config",
+ help="Path to the config.cake configuration file to use.",
+ default=None,
+ )
+ parser.add_option(
+ "--debug", metavar="KEYWORDS",
+ action="extend",
+ dest="debugComponents",
+ help="Set features to debug, eg: 'reason,run,script,scan'.",
+ default=[],
+ )
+ parser.add_option(
+ "-s", "--silent", "--quiet",
+ action="store_true",
+ dest="quiet",
+ help="Suppress printing of all Cake messages, warnings and errors.",
+ default=False,
+ )
+ parser.add_option(
+ "-f", "--force",
+ action="store_true",
+ dest="forceBuild",
+ help="Force rebuild of every target.",
+ default=False,
+ )
+ parser.add_option(
+ "-j", "--jobs",
+ metavar="JOBCOUNT",
+ type="int",
+ dest="jobs",
+ help="Number of simultaneous jobs to execute.",
+ default=cake.threadpool.getProcessorCount(),
+ )
+ parser.add_option(
+ "-k", "--keep-going",
+ dest="maximumErrorCount",
+ action="store_const",
+ const=None,
+ help="Keep building even in the presence of errors.",
+ )
+ parser.add_option(
+ "-e", "--max-errors",
+ dest="maximumErrorCount",
+ metavar="COUNT",
+ type="int",
+ help="Halt the build after a certain number of errors.",
+ default=100,
+ )
+
+ # Find and remove script filenames from the arguments.
+ scripts = []
+ newArgs = []
+ for arg in args:
+ path = arg
+ if not os.path.isabs(path):
+ path = os.path.join(cwd, path)
+ # If it's a file or directory assume it's a script path.
+ if os.path.exists(path):
+ scripts.append(path)
+ else:
+ newArgs.append(arg)
+ args = newArgs
+
+ # Default to building a script file in the working directory.
+ if not scripts:
+ scripts.append(cwd)
+
+ logger = cake.logging.Logger()
+ engine = cake.engine.Engine(logger, parser, args)
+
+ # Try to find an args.cake command line option.
+ for arg in engine.args:
+ if arg.startswith(argsCakeFlag):
+ argsFileName = arg[len(argsCakeFlag):]
+ if argsFileName:
+ break
+ else:
+ # Try to find an args.cake by searching up from each scripts directory.
+ for script in scripts:
+ # Script could be a file or directory name.
+ if os.path.isdir(script):
+ scriptDirName = script
+ else:
+ scriptDirName = os.path.dirname(script)
+
+ argsFileName = engine.searchUpForFile(scriptDirName, "args.cake")
+ if argsFileName:
+ break
+ else:
+ argsFileName = None # No args.cake found.
+
+ # Run the args.cake
+ if argsFileName:
+ script = cake.script.Script(
+ path=argsFileName,
+ configuration=None,
+ variant=None,
+ task=None,
+ engine=engine,
+ )
+ # Don't cache args.cake as this is where the cache dir may be set.
+ script.execute(cached=False)
+
+ # Parse any remaining args (after args.cake may have modified them).
+ options, args = parser.parse_args(engine.args)
+
+ # Print out Cake version information if requested.
+ if options.outputVersion:
+ cakeVersion = cake.version.__version__
+ cakePath = cake.path.dirName(cake.__file__)
+ sys.stdout.write("Cake %s [%s]\n" % (cakeVersion, cakePath))
+ sys.stdout.write("Python %s\n" % sys.version)
+ return 1
+
+ # Find keyword arguments from what's left of the args.
+ keywords = {}
+ unknownArgs = []
+ for arg in args:
+ if '=' in arg:
+ keyword, value = arg.split('=', 1)
+ existingValues = keywords.setdefault(keyword, [])
+ if value:
+ existingValues.extend(value.split(','))
+ else:
+ unknownArgs.append(arg)
+
+ if unknownArgs:
+ parser.error("unknown args: %s" % " ".join(unknownArgs))
+
+ # Set components to debug.
+ for c in options.debugComponents:
+ logger.enableDebug(c)
+ logger.quiet = options.quiet
+
+ engine.options = options
+ engine.forceBuild = options.forceBuild
+ engine.maximumErrorCount = options.maximumErrorCount
+
+ threadPool = cake.threadpool.ThreadPool(options.jobs)
+ cake.task.setThreadPool(threadPool)
+
+ tasks = []
+
+ configScript = options.config
+ if configScript is not None and not os.path.isabs(configScript):
+ configScript = os.path.abspath(configScript)
+
+ bootFailed = False
+
+ for script in scripts:
+ script = cake.path.fileSystemPath(script)
+ try:
+ task = engine.execute(
+ path=script,
+ configScript=configScript,
+ keywords=keywords,
+ )
+ tasks.append(task)
+ except cake.engine.BuildError:
+ # Error already output
+ bootFailed = True
+ except Exception:
+ bootFailed = True
+ msg = traceback.format_exc()
+ engine.logger.outputError(msg)
+ engine.errors.append(msg)
+
+ def onFinish():
+ if not bootFailed and mainTask.succeeded:
+ engine.onBuildSucceeded()
+ if engine.warningCount:
+ msg = "Build succeeded with %i warnings.\n" % engine.warningCount
+ else:
+ msg = "Build succeeded.\n"
+ else:
+ engine.onBuildFailed()
+ if engine.warningCount:
+ msg = "Build failed with %i errors and %i warnings.\n" % (
+ engine.errorCount,
+ engine.warningCount,
+ )
+ else:
+ msg = "Build failed with %i errors.\n" % engine.errorCount
+ engine.logger.outputInfo(msg)
+
+ mainTask = cake.task.Task()
+ mainTask.completeAfter(tasks)
+ mainTask.addCallback(onFinish)
+ mainTask.start()
+
+ finished = threading.Event()
+ mainTask.addCallback(finished.set)
+ # We must wait in a loop in case a KeyboardInterrupt comes.
+ while not finished.isSet():
+ time.sleep(0.1)
+
+ endTime = datetime.datetime.utcnow()
+ engine.logger.outputInfo(
+ "Build took %s.\n" % _formatTimeDelta(endTime - startTime)
+ )
+
+ return engine.errorCount
+
+def _formatTimeDelta(t):
+ """Return a string representation of the time to millisecond precision."""
+
+ hours = t.seconds // 3600
+ minutes = (t.seconds / 60) % 60
+ seconds = t.seconds % 60
+ milliseconds = t.microseconds // 1000
+
+ if t.days:
+ return "%i days, %i:%02i:%02i.%03i" % (
+ t.days, hours, minutes, seconds, milliseconds)
+ else:
+ return "%i:%02i:%02i.%03i" % (hours, minutes, seconds, milliseconds)
Index: cake/cake/script.py
===================================================================
--- cake/cake/script.py (revision 0)
+++ cake/cake/script.py (working copy)
@@ -0,0 +1,202 @@
+"""Base script class and utilities.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import threading
+import cake.path
+
+class AsyncResult(object):
+ """Base class for asynchronous results.
+
+ @ivar task: A Task that will complete when the result is available.
+ @ivar result: The result of the asynchronous operation.
+ """
+
+class DeferredResult(AsyncResult):
+
+ def __init__(self, task):
+ self.task = task
+
+ @property
+ def result(self):
+ return self.task.result
+
+_undefined = object()
+
+class ScriptResult(AsyncResult):
+ """A placeholder that can be used to reference a result of another
+ script that may not be available yet.
+
+ The result will be available when the task has completed successfully.
+
+ If you try to access the result before the task has completed the script
+ will be forcibly executed on the current thread.
+
+ However it is better to wait until the result is ready by chaining a new
+ task to start after this one and then access the result. That will provide
+ better CPU utilisation and faster build times.
+ """
+
+ __slots__ = ['__script', '__name', '__default']
+
+ def __init__(self, script, name, default=_undefined):
+ self.__script = script
+ self.__name = name
+ self.__default = default
+
+ @property
+ def script(self):
+ """The Script that will be executed.
+ """
+ return self.__script
+
+ @property
+ def task(self):
+ """The script's task.
+ """
+ return self.__script.task
+
+ @property
+ def result(self):
+ """Access the result.
+ """
+ self.__script.execute()
+ try:
+ return self.__script._getResult(self.__name)
+ except KeyError:
+ default = self.__default
+ if default is not _undefined:
+ return default
+ else:
+ raise
+
+class Script(object):
+ """A class that represents an instance of a Cake script.
+ """
+
+ _current = threading.local()
+
+ def __init__(self, path, configuration, variant, engine, task, tools=None, parent=None):
+ """Constructor.
+
+ @param path: The path to the script file.
+ @param configuration: The configuration to build.
+ @param variant: The variant to build.
+ @param engine: The engine instance.
+ @param task: A task that should complete when all tasks within
+ the script have completed.
+ @param tools: The tools dictionary to use as cake.tools for this script.
+ @param parent: The parent script or None if this is the root script.
+ """
+ self.path = path
+ self.dir = cake.path.dirName(path) or '.'
+ self.configuration = configuration
+ self.variant = variant
+ self.engine = engine
+ self.task = task
+ if tools is None:
+ self.tools = {}
+ else:
+ self.tools = tools
+ self._results = {}
+ if parent is None:
+ self.root = self
+ else:
+ self.root = parent.root
+ self._executionLock = threading.Lock()
+ self._executed = False
+
+ def _getResult(self, name):
+ # Immediately access the result. Potentially even before the scripts task
+ # has completed.
+ return self._results[name]
+
+ @staticmethod
+ def getCurrent():
+ """Get the current thread's currently executing script.
+
+ @return: The currently executing script.
+ @rtype: L{Script}
+ """
+ return getattr(Script._current, "value", None)
+
+ @staticmethod
+ def getCurrentRoot():
+ """Get the current thread's root script.
+
+ This is the top-level script currently being executed.
+ A script may not be the top-level script if it is executed due
+ to inclusion from another script.
+ """
+ current = Script.getCurrent()
+ if current is not None:
+ return current.root
+ else:
+ return None
+
+ def setResult(self, **kwargs):
+ """Return a set of named values from the script execution.
+ """
+ self._results.update(kwargs)
+
+ def getResult(self, name, *args, **kwargs):
+ """Get a placeholder for a result defined by this script when it is
+ executed.
+
+ The script will be executed immediately if the '.result' member is
+ accessed.
+
+ @param name: The name of the script result to retrieve.
+ @param default: If supplied then the default value to return in case
+ the script does not define that result.
+ """
+ return ScriptResult(self, name, *args, **kwargs)
+
+ def cwd(self, *args):
+ """Return the path prefixed with the current script's directory.
+ """
+ d = self.dir
+ if d == '.' and args:
+ return cake.path.join(*args)
+ else:
+ return cake.path.join(d, *args)
+
+ def execute(self, cached=True):
+ """Execute this script if it hasn't already been executed.
+
+ @param cached: True if the byte code should be cached to a separate
+ file for quicker loading next time.
+ @type cached: bool
+ """
+ if self._executed:
+ return
+
+ self._executionLock.acquire()
+ try:
+ # Must check again in case we have been waiting for another thread to execute.
+ if not self._executed:
+ try:
+ # Use an absolute path so an absolute path is embedded in the .pyc file.
+ # This will make exceptions clickable in Eclipse, but it means copying
+ # your .pyc files may cause their embedded paths to be incorrect.
+ if self.configuration is not None:
+ absPath = self.configuration.abspath(self.path)
+ else:
+ absPath = cake.path.absPath(self.path)
+ byteCode = self.engine.getByteCode(absPath, cached=cached)
+ scriptGlobals = {'__file__': absPath}
+ if self.configuration is not None:
+ scriptGlobals.update(self.configuration.scriptGlobals)
+ old = Script.getCurrent()
+ Script._current.value = self
+ try:
+ exec byteCode in scriptGlobals
+ finally:
+ Script._current.value = old
+ finally:
+ self._executed = True
+ finally:
+ self._executionLock.release()
Index: cake/cake/system.py
===================================================================
--- cake/cake/system.py (revision 0)
+++ cake/cake/system.py (working copy)
@@ -0,0 +1,131 @@
+"""System Utilities.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import os
+import os.path
+import platform as platty
+
+_platform = platty.system()
+
+# Some builds of Python can have platform.system() -> "Windows"
+# while others have platform.system() -> "Microsoft".
+# Make them all use "Windows" here.
+_platformLower = _platform.lower()
+if _platformLower.startswith('microsoft'):
+ _platform, _platformLower = "Windows", "windows"
+
+_isWindows = _platformLower.startswith('windows')
+_isCygwin = _platformLower.startswith('cygwin')
+_isDarwin = _platformLower.startswith('darwin')
+_isLinux = _platformLower.startswith('linux')
+del _platformLower
+
+if _isWindows or _isCygwin:
+ try:
+ _architecture = os.environ['PROCESSOR_ARCHITEW6432']
+ except KeyError:
+ try:
+ _architecture = os.environ['PROCESSOR_ARCHITECTURE']
+ except KeyError:
+ _architecture = platty.machine()
+else:
+ _architecture = platty.machine()
+if not _architecture:
+ _architecture = 'unknown'
+
+_isWindows64 = (_isWindows or _isCygwin) and \
+ _architecture.lower() in ('amd64', 'x64', 'x86_64', 'ia64')
+
+def platform():
+ """Returns the current operating system (platform).
+ """
+ return _platform
+
+def isWindows():
+ """Returns True if the current platform is Windows.
+ """
+ return _isWindows
+
+def isWindows64():
+ """Returns True if the current underlying platform is Windows 64-bit.
+
+ This can return True even if running under 32-bit Python or running
+ under Cygwin.
+ """
+ return _isWindows64
+
+def isCygwin():
+ """Returns True if the current platform is Cygwin.
+ """
+ return _isCygwin
+
+def isLinux():
+ """Returns True if the current platform is Linux.
+ """
+ return _isLinux
+
+def isDarwin():
+ """Returns True if the current platform is Darwin.
+ """
+ return _isDarwin
+
+def architecture():
+ """Returns the current machine's architecture.
+
+ @return: The host architecture, or 'unknown' if the host
+ architecture could not be determined.
+ @rtype: string
+ """
+ return _architecture
+
+if isCygwin():
+ def findExecutable(name, paths):
+ """Find an executable given its name and a list of paths.
+ """
+ for p in paths:
+ executable = os.path.join(p, name)
+ if os.path.isfile(executable):
+ # On cygwin it can sometimes say a file exists at a path
+ # when its real filename includes a .exe on the end.
+ # We detect this by actually trying to open the path
+ # for read, if it fails we know it should have a .exe.
+ try:
+ f = open(executable, 'rb')
+ f.close()
+ return executable
+ except EnvironmentError:
+ return executable + '.exe'
+ else:
+ raise EnvironmentError("Could not find executable.")
+
+elif isWindows():
+ def findExecutable(name, paths):
+ """Find an executable given its name and a list of paths.
+ """
+ # Windows executables could have any of a number of extensions
+ # We just search through standard extensions so that we're not
+ # dependent on the user's environment.
+ pathExt = ['', '.bat', '.exe', '.com', '.cmd']
+ for p in paths:
+ basePath = os.path.join(p, name)
+ for ext in pathExt:
+ executable = basePath + ext
+ if os.path.isfile(executable):
+ return executable
+ else:
+ raise EnvironmentError("Could not find executable.")
+
+else:
+ def findExecutable(name, paths):
+ """Find an executable given its name and a list of paths.
+ """
+ for p in paths:
+ executable = os.path.join(p, name)
+ if os.path.isfile(executable):
+ return executable
+ else:
+ raise EnvironmentError("Could not find executable.")
Index: cake/cake/task.py
===================================================================
--- cake/cake/task.py (revision 0)
+++ cake/cake/task.py (working copy)
@@ -0,0 +1,406 @@
+"""Task Utilities.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import sys
+import threading
+
+_threadPool = None
+_threadPoolLock = threading.Lock()
+
+def setThreadPool(threadPool):
+ """Set the default thread pool to use for executing new tasks.
+
+ @param threadPool: The new default thread pool.
+
+ @return: The previous default thread pool. This is intially None.
+ """
+
+ global _threadPool, _threadPoolLock
+
+ _threadPoolLock.acquire()
+ try:
+ oldThreadPool = _threadPool
+ _threadPool = threadPool
+ finally:
+ _threadPoolLock.release()
+
+ return oldThreadPool
+
+def getDefaultThreadPool():
+ """Get the current default thread pool for new tasks.
+
+ If no default thread pool exists then one will be created automatically.
+ """
+
+ global _threadPool, _threadPoolLock
+ if _threadPool is None:
+ import cake.threadpool
+ processorCount = cake.threadpool.getProcessorCount()
+ _threadPoolLock.acquire()
+ try:
+ if _threadPool is None:
+ _threadPool = cake.threadpool.ThreadPool(numWorkers=processorCount)
+ finally:
+ _threadPoolLock.release()
+ return _threadPool
+
+class TaskError(Exception):
+ """An exception type raised by the L{Task} class.
+ """
+ pass
+
+def _makeTasks(value):
+ if value is None:
+ return []
+ elif isinstance(value, Task):
+ return [value]
+ else:
+ return value
+
+class Task(object):
+ """An operation that is performed on a background thread.
+ """
+
+ class State(object):
+ """A class that represents the state of a L{Task}.
+ """
+ NEW = "new"
+ """The task is in an uninitialised state."""
+ WAITING_FOR_START = "waiting for start"
+ """The task is waiting to be started."""
+ RUNNING = "running"
+ """The task is running."""
+ WAITING_FOR_COMPLETE = "waiting for complete"
+ """The task is waiting to complete."""
+ SUCCEEDED = "succeeded"
+ """The task has succeeded."""
+ FAILED = "failed"
+ """The task has failed."""
+
+ _current = threading.local()
+
+ def __init__(self, func=None):
+ """Construct a task given a function.
+
+ @param func: The function this task should run.
+ @type func: any callable
+ """
+ self._func = func
+ self._immediate = None
+ self._parent = Task.getCurrent()
+ self._state = Task.State.NEW
+ self._lock = threading.Lock()
+ self._startAfterCount = 0
+ self._startAfterFailures = False
+ self._completeAfterCount = 0
+ self._completeAfterFailures = False
+ self._callbacks = []
+
+ if self._parent is not None:
+ self._parent.completeAfter(self)
+
+ @staticmethod
+ def getCurrent():
+ """Get the currently executing task.
+
+ @return: The currently executing Task or None if no current task.
+ @rtype: Task or None
+ """
+ return getattr(Task._current, "value", None)
+
+ @property
+ def state(self):
+ """Get the state of this task.
+ """
+ return self._state
+
+ @property
+ def parent(self):
+ """Get the parent of this task.
+ """
+ return self._parent
+
+ @property
+ def started(self):
+ """True if this task has been started.
+
+ A task is started if start(), startAfter() or cancel() has been
+ called on it.
+ """
+ return self._state is not Task.State.NEW
+
+ @property
+ def completed(self):
+ """True if this task has finished execution or has been cancelled.
+ """
+ s = self._state
+ return s is Task.State.SUCCEEDED or s is Task.State.FAILED
+
+ @property
+ def succeeded(self):
+ """True if this task successfully finished execution.
+ """
+ return self._state is Task.State.SUCCEEDED
+
+ @property
+ def failed(self):
+ """True if this task failed or was cancelled.
+ """
+ return self._state is Task.State.FAILED
+
+ @property
+ def result(self):
+ """If the task has completed successfully then holds the
+ return value of the task, otherwise raises AttributeError.
+ """
+ if self.succeeded:
+ task = self
+ while isinstance(task._result, Task):
+ task = task._result
+ return task._result
+ else:
+ raise AttributeError("result only available on successful tasks")
+
+ def start(self, immediate=False, threadPool=None):
+ """Start this task now.
+
+ @param immediate: If True the task is pushed ahead of any other (waiting)
+ tasks on the task queue.
+ @type immediate: bool
+
+ @raise TaskError: If this task has already been started or
+ cancelled.
+ """
+ self.startAfter(None, immediate, threadPool)
+
+ def startAfter(self, other, immediate=False, threadPool=None):
+ """Start this task after other tasks have completed.
+
+ This task is cancelled (transition to Task.State.FAILED state) if any of the
+ other tasks fail.
+
+ @param other: The task or a list of tasks to start after.
+ @type other: L{Task} or C{list}(L{Task})
+
+ @param immediate: If True the task is pushed ahead of any other (waiting)
+ tasks on the task queue.
+ @type immediate: bool
+
+ @param threadPool: An optional thread pool to start this task on.
+ @type threadPool: L{ThreadPool} or none
+
+ @raise TaskError: If this task has already been started or
+ cancelled.
+ """
+ otherTasks = _makeTasks(other)
+ if threadPool is None:
+ threadPool = getDefaultThreadPool()
+
+ self._lock.acquire()
+ try:
+ if self._state is not Task.State.NEW:
+ raise TaskError("task already started")
+ self._state = Task.State.WAITING_FOR_START
+ self._startAfterCount = len(otherTasks) + 1
+ self._immediate = immediate
+ finally:
+ self._lock.release()
+
+ for t in otherTasks:
+ t.addCallback(lambda t=t, tp=threadPool: self._startAfterCallback(t, tp))
+
+ self._startAfterCallback(self, threadPool)
+
+ def _startAfterCallback(self, task, threadPool):
+ """Callback that is called by each task we must start after.
+ """
+ callbacks = None
+
+ self._lock.acquire()
+ try:
+ # If one task fails we should fail too
+ if task.failed:
+ self._startAfterFailures = True
+
+ # Wait for all other tasks to complete
+ self._startAfterCount -= 1
+ if self._startAfterCount > 0:
+ return
+
+ # Someone may have eg. cancelled us already
+ if self._state is not Task.State.WAITING_FOR_START:
+ return
+
+ if self._startAfterFailures:
+ self._state = Task.State.FAILED
+ callbacks = self._callbacks
+ self._callbacks = None
+ else:
+ self._state = Task.State.RUNNING
+ finally:
+ self._lock.release()
+
+ if callbacks is None:
+ threadPool.queueJob(self._execute, front=self._immediate)
+ else:
+ for callback in callbacks:
+ callback()
+
+ def _execute(self):
+ """Actually execute this task.
+
+ This should typically be run on a background thread.
+ """
+ if self._state is not Task.State.RUNNING:
+ assert self._state is Task.State.FAILED, "should have been cancelled"
+ return
+
+ callbacks = None
+
+ try:
+ old = self.getCurrent()
+ self._current.value = self
+ # Don't hold onto the func after it has been executed so it can
+ # be garbage collected.
+ func = self._func
+ self._func = None
+ try:
+ if func is not None:
+ result = func()
+ else:
+ result = None
+ finally:
+ self._current.value = old
+
+ # If the result of the task was another task
+ # then our result will be the same as that other
+ # task's result. So make sure we don't complete
+ # before the other task does.
+ if isinstance(result, Task):
+ self.completeAfter(result)
+
+ self._lock.acquire()
+ try:
+ self._result = result
+ if self._state is Task.State.RUNNING:
+ if not self._completeAfterCount:
+ callbacks = self._callbacks
+ self._callbacks = None
+ if not self._completeAfterFailures:
+ self._state = Task.State.SUCCEEDED
+ else:
+ self._state = Task.State.FAILED
+ else:
+ self._state = Task.State.WAITING_FOR_COMPLETE
+ else:
+ assert self._state is Task.State.FAILED, "should have been cancelled"
+ finally:
+ self._lock.release()
+
+ except Exception, e:
+ trace = sys.exc_info()[2]
+ self._lock.acquire()
+ try:
+ self._exception = e
+ self._trace = trace
+ if self._state is Task.State.RUNNING:
+ if not self._completeAfterCount:
+ callbacks = self._callbacks
+ self._callbacks = None
+ self._state = Task.State.FAILED
+ else:
+ self._state = Task.State.WAITING_FOR_COMPLETE
+ else:
+ assert self._state is Task.State.FAILED, "should have been cancelled"
+ finally:
+ self._lock.release()
+
+ if callbacks:
+ for callback in callbacks:
+ callback()
+
+ def completeAfter(self, other):
+ """Make sure this task doesn't complete until other tasks have completed.
+
+ @param other: The Task or list of Tasks to wait for.
+ @type other: L{Task} or C{list}(L{Task})
+
+ @raise TaskError: If this task has already finished executing.
+ """
+ otherTasks = _makeTasks(other)
+
+ self._lock.acquire()
+ try:
+ if self.completed:
+ raise TaskError("Task function has already finished executing.")
+ self._completeAfterCount += len(otherTasks)
+ finally:
+ self._lock.release()
+
+ for t in otherTasks:
+ t.addCallback(lambda t=t: self._completeAfterCallback(t))
+
+ def _completeAfterCallback(self, task):
+ """Callback that is called by each task we must complete after.
+ """
+ callbacks = None
+
+ self._lock.acquire()
+ try:
+ self._completeAfterCount -= 1
+ if task.failed:
+ self._completeAfterFailures = True
+
+ if self._state is Task.State.WAITING_FOR_COMPLETE and self._completeAfterCount == 0:
+ if hasattr(self, "_result") and not self._completeAfterFailures:
+ self._state = Task.State.SUCCEEDED
+ else:
+ self._state = Task.State.FAILED
+ callbacks = self._callbacks
+ self._callbacks = None
+ finally:
+ self._lock.release()
+
+ if callbacks:
+ for callback in callbacks:
+ callback()
+
+ def cancel(self):
+ """Cancel this task if it hasn't already started.
+
+ Completes the task, setting its state to Task.State.FAILED.
+
+ @raise TaskError: if the task has already completed.
+ """
+ self._lock.acquire()
+ try:
+ if self.completed:
+ raise TaskError("Task already completed")
+
+ self._state = Task.State.FAILED
+ callbacks = self._callbacks
+ self._callbacks = None
+ finally:
+ self._lock.release()
+
+ for callback in callbacks:
+ callback()
+
+ def addCallback(self, callback):
+ """Register a callback to be run when this task is complete.
+
+ @param callback: The callback to add.
+ @type callback: any callable
+ """
+ self._lock.acquire()
+ try:
+ if self._callbacks is not None:
+ self._callbacks.append(callback)
+ else:
+ callback()
+ finally:
+ self._lock.release()
Index: cake/cake/test/__init__.py
===================================================================
--- cake/cake/test/__init__.py (revision 0)
+++ cake/cake/test/__init__.py (working copy)
@@ -0,0 +1,25 @@
+"""Unit Tests.
+"""
+
+import unittest
+
+_modules = [
+ "cake.test.task",
+ ]
+
+def suite():
+ loader = unittest.TestLoader()
+ s = unittest.TestSuite()
+ for name in _modules:
+ print name
+ s.addTests(loader.loadTestsFromName(name))
+ return s
+
+def run():
+ s = suite()
+ runner = unittest.TextTestRunner(verbosity=2)
+ return runner.run(s)
+
+if __name__ == "__main__":
+ import sys
+ sys.exit(run())
Index: cake/cake/test/asyncresult.py
===================================================================
--- cake/cake/test/asyncresult.py (revision 0)
+++ cake/cake/test/asyncresult.py (working copy)
@@ -0,0 +1,151 @@
+"""AsyncResult Unit Tests.
+
+Tests for AsyncResult and @waitForAsyncResult decorator.
+"""
+
+import unittest
+import threading
+import sys
+from cake.library import waitForAsyncResult, flatten, DeferredResult, AsyncResult
+from cake.task import Task
+
+class AsyncResultTests(unittest.TestCase):
+
+ def testCallNoAsync(self):
+
+ @waitForAsyncResult
+ def makeTuple(*args):
+ return tuple(args)
+
+ @waitForAsyncResult
+ def makeDict(**kwargs):
+ return dict(kwargs)
+
+ self.assertEqual(makeTuple(), ())
+ self.assertEqual(makeTuple(1, 2, 3), (1, 2, 3))
+
+ self.assertEqual(makeDict(), {})
+ self.assertEqual(makeDict(x=1, y=2), {"x": 1, "y": 2})
+
+ def testCallWithAsyncResultArgs(self):
+
+ @waitForAsyncResult
+ def makeArgs(*args, **kwargs):
+ return (args, kwargs)
+
+ def returnValue(value):
+ return value
+
+ t1 = Task(lambda: returnValue(1))
+ t2 = Task(lambda: returnValue(2))
+ t3 = Task(lambda: returnValue(3))
+
+ r1 = DeferredResult(t1)
+ r2 = DeferredResult(t2)
+ r3 = DeferredResult(t3)
+
+ result = makeArgs(r1, r2, x=r3)
+
+ assert isinstance(result, AsyncResult)
+
+ e = threading.Event()
+ result.task.addCallback(e.set)
+
+ t1.start()
+ t2.start()
+ t3.start()
+
+ e.wait(0.5)
+
+ self.assertTrue(result.task.completed)
+ self.assertTrue(result.task.succeeded)
+
+ args, kwargs = result.result
+
+ self.assertEqual(args, (1, 2))
+ self.assertEqual(kwargs, {"x": 3})
+
+ def testCallWithNestedAsyncResultArgs(self):
+
+ @waitForAsyncResult
+ def makeArgs(*args, **kwargs):
+ return (args, kwargs)
+
+ def returnValue(value):
+ return value
+
+ t1 = Task(lambda: returnValue(1))
+ t2 = Task(lambda: returnValue(2))
+ t3 = Task(lambda: returnValue(3))
+ t4 = Task(lambda: returnValue(4))
+
+ r1 = DeferredResult(t1)
+ r2 = DeferredResult(t2)
+ r3 = DeferredResult(t3)
+ r4 = DeferredResult(t4)
+
+ r5 = makeArgs(r1, r2)
+
+ r6 = makeArgs([r1, r4], x=r3, y=r5)
+
+ assert isinstance(r6, AsyncResult)
+
+ e = threading.Event()
+ r6.task.addCallback(e.set)
+
+ t1.start()
+ t2.start()
+ t3.start()
+ t4.start()
+
+ e.wait(0.5)
+
+ self.assertTrue(r5.task.succeeded)
+ self.assertTrue(r6.task.succeeded)
+
+ args, kwargs = r5.result
+ self.assertEqual(args, (1, 2))
+ self.assertEqual(kwargs, {})
+
+ args, kwargs = r6.result
+
+ self.assertEqual(args, ([1, 4],))
+ self.assertEqual(kwargs, {"x": 3, "y": ((1, 2), {})})
+
+ def testFlattenNoAsync(self):
+ self.assertEqual(flatten([]), [])
+ self.assertEqual(flatten([1, 2, 3]), [1, 2, 3])
+ self.assertEqual(flatten([1, [2, 3], 4]), [1, 2, 3, 4])
+ self.assertEqual(flatten([[1, 2], [3, [4, 5], 6], 7]), [1, 2, 3, 4, 5, 6, 7])
+
+ def testFlattenWithAsync(self):
+
+ def makeAsync(value):
+ task = Task(lambda: value)
+ task.start()
+ return DeferredResult(task)
+
+ value = makeAsync([
+ makeAsync([1, 2]),
+ [3, makeAsync([4, 5]), makeAsync(6)],
+ makeAsync(7)
+ ])
+
+ result = flatten(value)
+
+ self.assertTrue(isinstance(result, AsyncResult))
+
+ e = threading.Event()
+
+ result.task.addCallback(e.set)
+
+ e.wait(0.5)
+
+ self.assertTrue(result.task.succeeded)
+
+ self.assertEqual(result.result, [1, 2, 3, 4, 5, 6, 7])
+
+if __name__ == "__main__":
+ suite = unittest.TestLoader().loadTestsFromTestCase(AsyncResultTests)
+ runner = unittest.TextTestRunner(verbosity=2)
+ sys.exit(not runner.run(suite).wasSuccessful())
Index: cake/cake/test/path.py
===================================================================
--- cake/cake/test/path.py (revision 0)
+++ cake/cake/test/path.py (working copy)
@@ -0,0 +1,248 @@
+"""Path Unit Tests.
+"""
+
+import unittest
+import os.path
+import os
+import sys
+import platform
+
+class PathTests(unittest.TestCase):
+
+ def testAbsPath(self):
+ from cake.path import absPath
+ # Just test it runs
+ self.assertEqual(absPath(__file__), os.path.abspath(__file__))
+
+ def testAddPrefix(self):
+ from cake.path import addPrefix
+ self.assertEqual(addPrefix(".dat", "lib"), "lib.dat")
+ self.assertEqual(addPrefix("file", "lib"), "libfile")
+ self.assertEqual(addPrefix("file.dat", "lib"), "libfile.dat")
+ self.assertEqual(addPrefix("/file.dat", "lib"), "/libfile.dat")
+ self.assertEqual(addPrefix("/path/to/file", "lib"), "/path/to/libfile")
+ self.assertEqual(addPrefix("/path/to/file.dat", "lib"), "/path/to/libfile.dat")
+
+ def testBaseName(self):
+ from cake.path import baseName
+ self.assertEqual(baseName(".dat"), ".dat")
+ self.assertEqual(baseName("file"), "file")
+ self.assertEqual(baseName("file.dat"), "file.dat")
+ self.assertEqual(baseName("/path/to/file"), "file")
+ self.assertEqual(baseName("/path/to/file.dat"), "file.dat")
+
+ def testBaseNameWithoutExtension(self):
+ from cake.path import baseNameWithoutExtension
+ self.assertEqual(baseNameWithoutExtension(".dat"), "")
+ self.assertEqual(baseNameWithoutExtension("file"), "file")
+ self.assertEqual(baseNameWithoutExtension("file.dat"), "file")
+ self.assertEqual(baseNameWithoutExtension("/path/to/file"), "file")
+ self.assertEqual(baseNameWithoutExtension("/path/to/file.dat"), "file")
+
+ def testCommonPath(self):
+ from cake.path import commonPath
+ self.assertEqual(commonPath("", ""), "")
+ self.assertEqual(commonPath(".", ".."), "")
+ self.assertEqual(commonPath("/.", "/.."), "")
+ self.assertEqual(commonPath("/./", "/./.."), "/.")
+ self.assertEqual(commonPath("/..", "/../"), "/..")
+ self.assertEqual(commonPath("./", "./"), ".")
+ self.assertEqual(commonPath("a", "a"), "a")
+ self.assertEqual(commonPath("a", "ab"), "")
+ self.assertEqual(commonPath("a/b", "a/c"), "a")
+ self.assertEqual(commonPath("ab/c", "a"), "")
+ self.assertEqual(commonPath("ab/c", "ab"), "ab")
+ self.assertEqual(commonPath("a/b/c", "a/b/d"), "a/b")
+ self.assertEqual(commonPath("a/b/cd", "a/b/c"), "a/b")
+ self.assertEqual(commonPath("a/bc/d", "a/bcd/e"), "a")
+ self.assertEqual(commonPath("a/b/c", "a/b/c/d"), "a/b/c")
+
+ def testDirName(self):
+ from cake.path import dirName
+ self.assertEqual(dirName(".dat"), "")
+ self.assertEqual(dirName("file"), "")
+ self.assertEqual(dirName("file.dat"), "")
+ self.assertEqual(dirName("/path/to/file"), "/path/to")
+ self.assertEqual(dirName("/path/to/file.dat"), "/path/to")
+
+ def testExists(self):
+ from cake.path import exists
+ # Just test it runs
+ self.assertEqual(exists(__file__), os.path.exists(__file__))
+
+ def testExpandVars(self):
+ from cake.path import expandVars
+ self.assertEqual(expandVars("", {}), "")
+ self.assertEqual(expandVars("foo", {}), "foo")
+ self.assertEqual(expandVars("$", {}), "{MISSING_SYMBOL_}")
+ self.assertEqual(expandVars("$$", {}), "$")
+ self.assertEqual(expandVars("${}", {}), "{MISSING_SYMBOL_}")
+ self.assertEqual(expandVars("$var", {}), "{MISSING_SYMBOL_var}")
+ self.assertEqual(
+ expandVars("$var/$foo", {}),
+ "{MISSING_SYMBOL_var}/{MISSING_SYMBOL_foo}"
+ )
+ self.assertEqual(expandVars("${var}", {}), "{MISSING_SYMBOL_var}")
+ self.assertEqual(expandVars("${var}", {"var": "x"}), "x")
+ self.assertEqual(expandVars("${var}", {"var": "$x/$x", "x": "foo"}), "foo/foo")
+
+ def testExtension(self):
+ from cake.path import extension
+ self.assertEqual(extension(""), "")
+ self.assertEqual(extension("."), "")
+ self.assertEqual(extension(".."), "")
+ self.assertEqual(extension("/."), "")
+ self.assertEqual(extension("/.."), "")
+ self.assertEqual(extension("foo/."), "")
+ self.assertEqual(extension("foo/.."), "")
+ self.assertEqual(extension("./"), "")
+ self.assertEqual(extension(".foo"), "")
+ self.assertEqual(extension(".foo."), ".")
+ self.assertEqual(extension(".foo.bar"), ".bar")
+ self.assertEqual(extension("foo/.bar"), "")
+ self.assertEqual(extension("foo.bar"), ".bar")
+ self.assertEqual(extension("foo.bar.baz"), ".baz")
+ self.assertEqual(extension("foo/baz"), "")
+ self.assertEqual(extension("foo.bar/baz"), "")
+ self.assertEqual(extension("foo.bar/baz.blah"), ".blah")
+
+ def testFileSystemPath(self):
+ # Tests are only valid on case-insensitive platforms
+ if os.path.normcase('aBcD') == 'aBcD':
+ return
+
+ from cake.path import fileSystemPath
+ self.assertEqual(fileSystemPath(""), "")
+ self.assertEqual(fileSystemPath("."), ".")
+
+ fileName = "aBcD.tXt"
+ f = open(fileName, "wt")
+ f.close()
+ try:
+ self.assertEqual(fileSystemPath("abcd.txt"), fileName)
+ self.assertEqual(fileSystemPath("./abcd.txt"), "./" + fileName)
+ finally:
+ os.remove(fileName)
+
+ dirName = "WhaT"
+ os.mkdir(dirName)
+ try:
+ path = dirName + "/" + fileName
+ f = open(path, "wt")
+ f.close()
+ try:
+ self.assertEqual(fileSystemPath("whAT/aBCd.txt"), path)
+ self.assertEqual(fileSystemPath("./whAT/aBCd.txt"), "./" + path)
+ self.assertEqual(fileSystemPath("whAT/.."), dirName + "/..")
+ self.assertEqual(fileSystemPath("whAT/../WHat"), dirName + "/../" + dirName)
+ self.assertEqual(
+ fileSystemPath("./whAT/../WHAT/./aBCd.txt"),
+ "./" + dirName + "/../" + dirName + "/./" + fileName,
+ )
+ finally:
+ os.remove(path)
+ finally:
+ os.rmdir(dirName)
+
+ def testHasExtension(self):
+ from cake.path import hasExtension
+ self.assertFalse(hasExtension(""))
+ self.assertFalse(hasExtension("."))
+ self.assertFalse(hasExtension("/."))
+ self.assertFalse(hasExtension("/.."))
+ self.assertFalse(hasExtension(".."))
+ self.assertFalse(hasExtension("..foo"))
+ self.assertFalse(hasExtension(".hidden"))
+ self.assertTrue(hasExtension(".hidden.foo"))
+ self.assertFalse(hasExtension("dir/.hidden"))
+ self.assertTrue(hasExtension("foo.txt"))
+ self.assertTrue(hasExtension("foo."))
+ self.assertTrue(hasExtension("foo.c"))
+ self.assertTrue(hasExtension("foo.bar.baz"))
+ self.assertTrue(hasExtension("/foo.bar"))
+ self.assertTrue(hasExtension("baz/foo.bar"))
+ self.assertTrue(hasExtension("baz.blah/foo.bar"))
+ self.assertFalse(hasExtension("foo"))
+ self.assertFalse(hasExtension("foo.bar/"))
+ self.assertFalse(hasExtension("foo.bar/foo"))
+ self.assertFalse(hasExtension("foo/baz.bar\\blah"))
+ self.assertFalse(hasExtension("foo\\baz.bar/blah"))
+ self.assertTrue(hasExtension("foo/baz\\blah.bar"))
+ self.assertTrue(hasExtension("foo\\baz/blah.bar"))
+
+ def testIsAbs(self):
+ from cake.path import isAbs
+ # Just test it runs
+ self.assertEqual(isAbs(__file__), os.path.isabs(__file__))
+
+ def testIsDir(self):
+ from cake.path import isDir
+ # Just test it runs
+ self.assertEqual(isDir(__file__), os.path.isdir(__file__))
+
+ def testIsFile(self):
+ from cake.path import isFile
+ # Just test it runs
+ self.assertEqual(isFile(__file__), os.path.isfile(__file__))
+
+ def testIsMount(self):
+ from cake.path import isMount
+ # Just test it runs
+ self.assertEqual(isMount(__file__), os.path.ismount(__file__))
+
+ def testJoin(self):
+ from cake.path import join
+ self.assertEqual(join(), "")
+ self.assertEqual(join("a"), "a")
+ self.assertEqual(join("a", "b"), os.path.join("a", "b"))
+ self.assertEqual(join(["a"]), ["a"])
+ self.assertEqual(join(["a", "b"]), ["a", "b"])
+ self.assertEqual(join("a", ["b"]), [os.path.join("a", "b")])
+ self.assertEqual(join("a", ["b", "c"]), [
+ os.path.join("a", "b"),
+ os.path.join("a", "c"),
+ ])
+ self.assertEqual(join("a", ["b", "c"], "d"), [
+ os.path.join("a", "b", "d"),
+ os.path.join("a", "c", "d"),
+ ])
+ self.assertEqual(join(["a", "b"], ["c", "d"]), [
+ os.path.join("a", "c"),
+ os.path.join("a", "d"),
+ os.path.join("b", "c"),
+ os.path.join("b", "d"),
+ ])
+
+ def testRelativePath(self):
+ from cake.system import isWindows
+ from cake.path import relativePath
+
+ self.assertEqual(relativePath("", ""), ".")
+ self.assertEqual(relativePath("a", "a"), ".")
+ self.assertEqual(relativePath("a", "ab"), ".." + os.path.sep + "a")
+ self.assertEqual(relativePath("a/b", "a/c"), ".." + os.path.sep + "b")
+ self.assertEqual(relativePath("ab/c", "a"), ".." + os.path.sep + "ab" + os.path.sep + "c")
+ self.assertEqual(relativePath("ab/c", "ab"), "c")
+ self.assertEqual(relativePath("a/b/c", "a/b"), "c")
+ self.assertEqual(relativePath("a/b/c", "a/b/"), "c")
+ self.assertEqual(relativePath("a/b/c", "a/b/d"), ".." + os.path.sep + "c")
+ self.assertEqual(relativePath("a/b/cd", "a/b/c"), ".." + os.path.sep + "cd")
+ self.assertEqual(
+ relativePath("a/bc/d", "a/bcd/e"),
+ ".." + os.path.sep + ".." + os.path.sep + "bc" + os.path.sep + "d",
+ )
+ self.assertEqual(relativePath("a/b/c", "a/b/c/d"), "..")
+ self.assertEqual(relativePath("a/b/c/d", "a/b"), "c" + os.path.sep + "d")
+ self.assertEqual(relativePath("a/b/c/d", "a/b/"), "c" + os.path.sep + "d")
+
+ if isWindows():
+ self.assertEqual(relativePath("c:", "d:"), "c:")
+ self.assertEqual(relativePath("c:\\", "d:\\"), "c:\\")
+ self.assertEqual(relativePath("c:\\ab", "d:\\dc"), "c:\\ab")
+ self.assertEqual(relativePath("c:\\ab", "c:\\dc"), "..\\ab")
+ self.assertEqual(relativePath("\\\\unc1", "\\\\unc2"), "\\\\unc1")
+
+if __name__ == "__main__":
+ suite = unittest.TestLoader().loadTestsFromTestCase(PathTests)
+ runner = unittest.TextTestRunner(verbosity=2)
+ sys.exit(not runner.run(suite).wasSuccessful())
Index: cake/cake/test/task.py
===================================================================
--- cake/cake/test/task.py (revision 0)
+++ cake/cake/test/task.py (working copy)
@@ -0,0 +1,464 @@
+"""Task Unit Tests.
+"""
+
+import unittest
+import threading
+import sys
+
+import cake.task
+
+class TaskTests(unittest.TestCase):
+
+ def testTaskFunctionExecutedExactlyOnce(self):
+ result = []
+ def f():
+ result.append(None)
+
+ e = threading.Event()
+ t = cake.task.Task(f)
+ t.addCallback(e.set)
+
+ self.assertFalse(t.started)
+ self.assertFalse(t.completed)
+ self.assertFalse(t.succeeded)
+ self.assertFalse(t.failed)
+
+ t.start()
+
+ self.assertTrue(t.started)
+
+ e.wait(0.5)
+
+ self.assertTrue(t.completed)
+ self.assertTrue(t.started)
+ self.assertTrue(t.succeeded)
+ self.assertFalse(t.failed)
+ self.assertEqual(len(result), 1)
+
+ def testFailingTask(self):
+ def f():
+ raise RuntimeError()
+
+ e = threading.Event()
+ t = cake.task.Task(f)
+ t.addCallback(e.set)
+
+ t.start()
+
+ e.wait(0.5)
+
+ self.assertTrue(t.completed)
+ self.assertTrue(t.started)
+ self.assertFalse(t.succeeded)
+ self.assertTrue(t.failed)
+
+ def testStartAfter(self):
+ result = []
+ def a():
+ result.append("a")
+ def b():
+ result.append("b")
+
+ eb = threading.Event()
+ ta = cake.task.Task(a)
+ tb = cake.task.Task(b)
+ tb.addCallback(eb.set)
+
+ tb.startAfter(ta)
+
+ self.assertTrue(tb.started)
+ self.assertFalse(ta.started)
+
+ ta.start()
+
+ self.assertTrue(ta.started)
+
+ eb.wait(0.5)
+
+ self.assertTrue(tb.completed)
+ self.assertTrue(ta.succeeded)
+ self.assertTrue(tb.started)
+ self.assertTrue(tb.succeeded)
+ self.assertEqual(result, ["a", "b"])
+
+ def testStartAfterCompletedTask(self):
+ result = []
+ def a():
+ result.append("a")
+ def b():
+ result.append("b")
+
+ ea = threading.Event()
+ eb = threading.Event()
+ ta = cake.task.Task(a)
+ tb = cake.task.Task(b)
+ ta.addCallback(ea.set)
+ tb.addCallback(eb.set)
+
+ ta.start()
+
+ self.assertTrue(ta.started)
+
+ ea.wait(0.5)
+
+ self.assertTrue(ta.completed)
+
+ tb.startAfter(ta)
+
+ self.assertTrue(tb.started)
+
+ eb.wait(0.5)
+
+ self.assertTrue(tb.completed)
+ self.assertTrue(ta.succeeded)
+ self.assertTrue(tb.started)
+ self.assertTrue(tb.succeeded)
+ self.assertEqual(result, ["a", "b"])
+
+ def testStartAfterFailedTask(self):
+ result = []
+ def a():
+ result.append("a")
+ raise RuntimeError()
+ def b():
+ result.append("b")
+
+ eb = threading.Event()
+ ta = cake.task.Task(a)
+ tb = cake.task.Task(b)
+ tb.addCallback(eb.set)
+ tb.startAfter(ta)
+
+ self.assertTrue(tb.started)
+ self.assertFalse(ta.started)
+
+ ta.start()
+
+ self.assertTrue(ta.started)
+
+ eb.wait(0.5)
+
+ self.assertTrue(tb.completed)
+ self.assertTrue(tb.failed)
+ self.assertTrue(tb.started)
+ self.assertTrue(tb.failed)
+ self.assertEqual(result, ["a"])
+
+ def testCompleteAfter(self):
+
+ result = []
+
+ def a():
+ result.append("a")
+ def b():
+ result.append("b")
+ t = cake.task.Task(b)
+ t.start()
+ cake.task.Task.getCurrent().completeAfter(t)
+
+ def c():
+ result.append("c")
+
+ ec = threading.Event()
+ ta = cake.task.Task(a)
+ tc = cake.task.Task(c)
+ tc.addCallback(ec.set)
+ tc.startAfter(ta)
+ ta.start()
+
+ ec.wait(0.5)
+
+ self.assertTrue(tc.completed)
+ self.assertEqual(result, ["a", "b", "c"])
+
+ def testStartAfterMultiple(self):
+
+ result = []
+ def a():
+ result.append("a")
+ def b():
+ result.append("b")
+ def c():
+ result.append("c")
+
+ ec = threading.Event()
+ ta = cake.task.Task(a)
+ tb = cake.task.Task(b)
+ tc = cake.task.Task(c)
+ tc.addCallback(ec.set)
+ tc.startAfter([ta, tb])
+
+ self.assertTrue(tc.started)
+ self.assertFalse(ta.started)
+ self.assertFalse(tb.started)
+
+ ta.start()
+
+ self.assertFalse(tc.completed)
+
+ tb.start()
+
+ ec.wait(0.5)
+
+ self.assertTrue(tc.completed)
+ self.assertTrue(ta.succeeded)
+ self.assertTrue(tb.succeeded)
+ self.assertTrue(tc.succeeded)
+ self.assertTrue(result in [["a", "b", "c"], ["b", "a", "c"]])
+
+ def testStartAfterMultipleSomeFail(self):
+
+ result = []
+ def a():
+ raise Exception()
+ def b():
+ result.append("b")
+ def c():
+ result.append("c")
+
+ eb = threading.Event()
+ ec = threading.Event()
+ ta = cake.task.Task(a)
+ tb = cake.task.Task(b)
+ tc = cake.task.Task(c)
+ tb.addCallback(eb.set)
+ tc.addCallback(ec.set)
+ tc.startAfter([ta, tb])
+
+ self.assertTrue(tc.started)
+ self.assertFalse(ta.started)
+ self.assertFalse(tb.started)
+
+ tb.start()
+
+ eb.wait(0.5)
+
+ self.assertTrue(tb.completed)
+ self.assertTrue(tb.succeeded)
+
+ self.assertFalse(tc.completed)
+
+ ta.start()
+
+ ec.wait(0.5)
+
+ self.assertTrue(tc.completed)
+ self.assertTrue(ta.failed)
+ self.assertTrue(tb.succeeded)
+ self.assertTrue(tc.failed)
+ self.assertEqual(result, ["b"])
+
+ def testMultipleSubTasks(self):
+ result = []
+
+ def a():
+ result.append("a")
+ t = cake.task.Task.getCurrent()
+
+ def b1():
+ self.assertTrue(cake.task.Task.getCurrent() is t1)
+ result.append("b1")
+
+ def b2():
+ self.assertTrue(cake.task.Task.getCurrent() is t2)
+ result.append("b2")
+
+ t1 = cake.task.Task(b1)
+ t1.start()
+
+ t2 = cake.task.Task(b2)
+ t2.start()
+
+ self.assertTrue(t1 is not t)
+ self.assertTrue(t2 is not t)
+ self.assertTrue(t1 is not t2)
+
+ t.completeAfter([t1, t2])
+
+ def c():
+ result.append("c")
+
+ ec = threading.Event()
+ ta = cake.task.Task(a)
+ tc = cake.task.Task(c)
+ tc.addCallback(ec.set)
+ tc.startAfter(ta)
+ ta.start()
+
+ ec.wait(0.5)
+
+ self.assertTrue(tc.completed)
+ self.assertTrue(tc.succeeded)
+ self.assertTrue(result in [
+ ["a", "b1", "b2", "c"],
+ ["a", "b2", "b1", "c"],
+ ])
+
+ def testFailedSubTasksFailsParent(self):
+
+ result = []
+
+ def a():
+ result.append("a")
+
+ def b():
+ result.append("b")
+ raise RuntimeError()
+
+ t = cake.task.Task(b)
+ t.start()
+
+ def c():
+ result.append("c")
+
+ ec = threading.Event()
+ ta = cake.task.Task(a)
+ tc = cake.task.Task(c)
+ tc.addCallback(ec.set)
+ tc.startAfter(ta)
+ ta.start()
+
+ ec.wait(0.5)
+
+ self.assertTrue(tc.completed)
+ self.assertTrue(ta.failed)
+ self.assertTrue(tc.failed)
+ self.assertEqual(result, ["a", "b"])
+
+ def testCompleteAfterMultipleSomeFail(self):
+
+ result = []
+
+ def a():
+ result.append("a")
+
+ def b1():
+ raise Exception()
+
+ def b2():
+ result.append("b2")
+
+ def c():
+ result.append("c")
+
+ tb1 = cake.task.Task(b1)
+ tb2 = cake.task.Task(b2)
+
+ ta = cake.task.Task(a)
+ ta.completeAfter([tb1, tb2])
+
+ ec = threading.Event()
+ tc = cake.task.Task(c)
+ tc.addCallback(ec.set)
+ tc.startAfter(ta)
+
+ ta.start()
+
+ self.assertFalse(tc.completed)
+ self.assertFalse(ta.completed)
+
+ tb2.start()
+
+ self.assertFalse(tc.completed)
+ self.assertFalse(ta.completed)
+
+ tb1.start()
+
+ ec.wait(0.5)
+
+ self.assertTrue(tc.completed)
+ self.assertTrue(ta.failed)
+ self.assertTrue(tb1.failed)
+ self.assertTrue(tb2.succeeded)
+ self.assertTrue(tc.failed)
+ self.assertTrue(result in [["a", "b2"], ["b2", "a"]])
+
+ def testCancelBeforeStart(self):
+
+ def a():
+ pass
+
+ ta = cake.task.Task(a)
+
+ ta.cancel()
+
+ self.assertTrue(ta.started)
+ self.assertTrue(ta.completed)
+ self.assertFalse(ta.succeeded)
+ self.assertTrue(ta.failed)
+
+ def testCancelAfterCompleteThrows(self):
+
+ def a():
+ pass
+
+ ea = threading.Event()
+ ta = cake.task.Task(a)
+ ta.addCallback(ea.set)
+
+ ta.start()
+
+ ea.wait(0.5)
+
+ self.assertTrue(ta.completed)
+ self.assertRaises(cake.task.TaskError, ta.cancel)
+
+ def testCancelWhileExecutingFailsTask(self):
+
+ def a():
+ cake.task.Task.getCurrent().cancel()
+
+ ea = threading.Event()
+ ta = cake.task.Task(a)
+ ta.addCallback(ea.set)
+
+ ta.start()
+
+ ea.wait(0.5)
+
+ self.assertTrue(ta.completed)
+ self.assertTrue(ta.started)
+ self.assertTrue(ta.completed)
+ self.assertFalse(ta.succeeded)
+ self.assertTrue(ta.failed)
+
+ def testTaskResult(self):
+
+ def a():
+ return "a"
+
+ e = threading.Event()
+ t = cake.task.Task(a)
+ t.addCallback(e.set)
+
+ t.start()
+
+ e.wait(0.5)
+
+ self.assertTrue(t.completed)
+ self.assertEqual(t.result, "a")
+
+ def testNestedTaskResult(self):
+
+ def a():
+ tb = cake.task.Task(b)
+ tb.start()
+ return tb
+
+ def b():
+ return "b"
+
+ e = threading.Event()
+ ta = cake.task.Task(a)
+ ta.addCallback(e.set)
+ ta.start()
+
+ e.wait(0.5)
+
+ self.assertTrue(ta.succeeded)
+ self.assertEqual(ta.result, "b")
+
+if __name__ == "__main__":
+ suite = unittest.TestLoader().loadTestsFromTestCase(TaskTests)
+ runner = unittest.TextTestRunner(verbosity=2)
+ sys.exit(not runner.run(suite).wasSuccessful())
Index: cake/cake/test/threadpool.py
===================================================================
--- cake/cake/test/threadpool.py (revision 0)
+++ cake/cake/test/threadpool.py (working copy)
@@ -0,0 +1,44 @@
+"""ThreadPool Unit Tests.
+"""
+
+import unittest
+import threading
+import sys
+
+import cake.threadpool
+
+class ThreadPoolTests(unittest.TestCase):
+
+ def testSingleJob(self):
+ result = []
+ e = threading.Event()
+ def job():
+ result.append(None)
+ e.set()
+
+ threadPool = cake.threadpool.ThreadPool(numWorkers=10)
+ threadPool.queueJob(job)
+ e.wait()
+
+ self.assertEqual(len(result), 1)
+
+ def testMultipleJobs(self):
+ jobCount = 50
+ result = []
+ s = threading.Semaphore(0)
+ def job():
+ result.append(None)
+ s.release()
+
+ threadPool = cake.threadpool.ThreadPool(numWorkers=10)
+ for _ in xrange(jobCount):
+ threadPool.queueJob(job)
+ for _ in xrange(jobCount):
+ s.acquire()
+
+ self.assertEqual(len(result), 50)
+
+if __name__ == "__main__":
+ suite = unittest.TestLoader().loadTestsFromTestCase(ThreadPoolTests)
+ runner = unittest.TextTestRunner(verbosity=2)
+ sys.exit(not runner.run(suite).wasSuccessful())
Index: cake/cake/threadpool.py
===================================================================
--- cake/cake/threadpool.py (revision 0)
+++ cake/cake/threadpool.py (working copy)
@@ -0,0 +1,148 @@
+"""Thread Pooling Class and Utilities.
+
+Provides a simple thread-pool utility for managing execution of multiple jobs
+in parallel on separate threads.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import threading
+import os
+import sys
+import platform
+import traceback
+import atexit
+import collections
+
+import cake.system
+
+if cake.system.isWindows():
+ try:
+ import win32api
+ def getProcessorCount():
+ """Return the number of processors/cores in the current system.
+
+ Useful for determining the maximum parallelism of the current system.
+
+ @return: The number of processors/cores in the current system.
+ @rtype: int
+ """
+ return win32api.GetSystemInfo()[5]
+ except ImportError:
+ def getProcessorCount():
+ try:
+ return int(os.environ["NUMBER_OF_PROCESSORS"])
+ except KeyError:
+ return 1
+else:
+ def getProcessorCount():
+ try:
+ import multiprocessing
+ return multiprocessing.cpu_count()
+ except ImportError:
+ return 1
+
+class ThreadPool(object):
+ """Manages a pool of worker threads that it delegates jobs to.
+
+ Usage::
+ pool = ThreadPool(numWorkers=4)
+ for i in xrange(50):
+ pool.queueJob(lambda i=i: someFunction(i))
+ """
+ def __init__(self, numWorkers):
+ """Initialise the thread pool.
+
+ @param numWorkers: Initial number of worker threads to start.
+ @type numWorkers: int
+ """
+ self._jobQueue = collections.deque()
+ self._workers = []
+ self._wakeCondition = threading.Condition(threading.Lock())
+ self._finished = False
+
+ # Create the worker threads.
+ for _ in xrange(numWorkers):
+ worker = threading.Thread(target=self._runThread)
+ worker.daemon = True
+ worker.start()
+ self._workers.append(worker)
+
+ # Make sure the threads are joined before program exit.
+ atexit.register(self._shutdown)
+
+ def _shutdown(self):
+ """Shutdown the ThreadPool.
+
+ On shutdown we complete any currently executing jobs then exit. Jobs
+ waiting on the queue may not be executed.
+ """
+ # Signal that we've finished.
+ self._finished = True
+
+ # Clear the queue and wake any waiting threads.
+ self._wakeCondition.acquire()
+ try:
+ self._jobQueue.clear()
+ self._wakeCondition.notifyAll()
+ finally:
+ self._wakeCondition.release()
+
+ # Wait for the threads to finish.
+ for thread in self._workers:
+ thread.join()
+
+ @property
+ def numWorkers(self):
+ """Returns the number of worker threads available to process jobs.
+
+ @return: The number of worker threads available to process jobs.
+ @rtype: int
+ """
+ return len(self._workers)
+
+ def queueJob(self, callable, front=False):
+ """Queue a new job to be executed by the thread pool.
+
+ @param callable: The job to queue.
+ @type callable: any callable
+
+ @param front: If True then put the job at the front of the
+ thread pool's job queue, otherwise append it to the end of
+ the job queue.
+ @type front: boolean
+ """
+ self._wakeCondition.acquire()
+ try:
+ if not self._finished: # Don't add jobs if we've shutdown.
+ wasEmpty = len(self._jobQueue) == 0
+ if front:
+ self._jobQueue.appendleft(callable)
+ else:
+ self._jobQueue.append(callable)
+ if wasEmpty:
+ self._wakeCondition.notifyAll()
+ finally:
+ self._wakeCondition.release()
+
+ def _runThread(self):
+ """Process jobs continuously until dismissed.
+ """
+ while not self._finished:
+ self._wakeCondition.acquire()
+ try:
+ try:
+ job = self._jobQueue.popleft()
+ except IndexError:
+ self._wakeCondition.wait() # No more jobs. Sleep until another is pushed.
+ continue
+ finally:
+ self._wakeCondition.release()
+
+ try:
+ job()
+ except Exception:
+ sys.stderr.write("Uncaught Exception:\n")
+ sys.stderr.write(traceback.format_exc())
Index: cake/cake/version.py
===================================================================
--- cake/cake/version.py (revision 0)
+++ cake/cake/version.py (working copy)
@@ -0,0 +1,14 @@
+"""Version number.
+
+@var __version__: Version string for Cake. Useful for printing to screen.
+
+@var __version_info__: Version tuple for Cake. Useful for comparing
+whether one version is newer than another.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+__version_info__ = (0, 9, 7)
+__version__ = '.'.join(str(v) for v in __version_info__)
Index: cake/cake/zipping.py
===================================================================
--- cake/cake/zipping.py (revision 0)
+++ cake/cake/zipping.py (working copy)
@@ -0,0 +1,132 @@
+"""File System Utilities.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+import cake.filesys
+import os
+import os.path
+import time
+import zipfile
+import zlib
+
+def compressFile(source, target):
+ """Compress the contents of a file and write it to another file.
+
+ @param source: The path of the file to compress.
+ @type source: string
+ @param target: The path of the compressed file.
+ @type target: string
+ """
+ data = cake.filesys.readFile(source)
+ try:
+ data = zlib.compress(data, 1)
+ except zlib.error, e:
+ raise EnvironmentError(str(e))
+ cake.filesys.writeFile(target, data)
+
+def decompressFile(source, target):
+ """Decompress the contents of a file and write it to another file.
+
+ @param source: The path of the file to decompress.
+ @type source: string
+ @param target: The path of the decompressed file.
+ @type target: string
+ """
+ data = cake.filesys.readFile(source)
+ try:
+ data = zlib.decompress(data)
+ except zlib.error, e:
+ raise EnvironmentError(str(e))
+ cake.filesys.writeFile(target, data)
+
+def findFilesToCompress(sourcePath, includeMatch=None):
+ """Return a dictionary of files in a given directory.
+
+ @param sourcePath: The path to the file or directory to compress.
+ @type sourcePath: string
+ @param includeMatch: A function that returns True when a path should
+ be included in the zip.
+ @type includeMatch: any callable
+ """
+ toZip = {}
+ if os.path.isdir(sourcePath):
+ # Remove any trailing slash
+ searchDir = os.path.normpath(sourcePath)
+ for path in cake.filesys.walkTree(searchDir, includeMatch=includeMatch):
+ toZip[os.path.normcase(path)] = path
+ else:
+ toZip[os.path.normcase(path)] = path
+
+ return toZip
+
+def isDirectoryInfo(zipInfo):
+ """Determine whether a ZipInfo structure corresponds to a directory.
+
+ @param zipInfo: ZipInfo to check.
+ @type zipInfo: zipfile.ZipInfo
+
+ @return: True if the zipInfo corresponds to a directory.
+ @rtype: bool
+ """
+ return (zipInfo.external_attr & 0x00000010L) != 0L # FILE_ATTRIBUTE_DIRECTORY
+
+def writeFileToZip(zipFile, sourcePath, targetPath):
+ """Write a source file or directory to a zip.
+
+ @param zipFile: The zip file object to write to.
+ @type zipFile: zipfile.ZipFile
+ @param sourcePath: The path to the source file or directory.
+ @type sourcePath: string
+ @param targetPath: The target path within the zip.
+ @param targetPath: string
+ """
+ targetPath = targetPath.replace("\\", "/") # Zips use forward slashes
+ utcTime = time.gmtime(os.stat(sourcePath).st_mtime)
+
+ if os.path.isdir(sourcePath):
+ if not targetPath.endswith("/"):
+ targetPath += "/" # Trailing slash denotes directory for some zip packages
+
+ zi = zipfile.ZipInfo(targetPath, utcTime[0:6])
+ zi.compress_type = zipfile.ZIP_DEFLATED
+ zi.external_attr = 0x00000010L # FILE_ATTRIBUTE_DIRECTORY
+ zipFile.writestr(zi, "")
+ else:
+ f = open(sourcePath, "rb")
+ try:
+ data = f.read()
+ finally:
+ f.close()
+
+ zi = zipfile.ZipInfo(targetPath, utcTime[0:6])
+ zi.compress_type = zipfile.ZIP_DEFLATED
+ zi.external_attr = 0x00000020L # FILE_ATTRIBUTE_ARCHIVE
+ zipFile.writestr(zi, data)
+
+def zipFiles(sourcePath, targetZip):
+ """Zip a file or the contents of a directory.
+
+ @param sourcePath: The source file or directory to zip.
+ @type sourcePath: string
+ @param targetZip: The path of the target zip.
+ @type targetZip: string
+
+ @return: A list of paths to the files and directories
+ compressed.
+ @rtype: list of string
+ """
+ toZip = findFilesToCompress(sourcePath)
+ cake.filesys.makeDirs(os.path.dirname(targetZip))
+ f = open(targetZip, "wb")
+ try:
+ zipFile = zipfile.ZipFile(f, "w")
+ for originalPath in toZip.itervalues():
+ sourceFilePath = os.path.join(sourcePath, originalPath)
+ writeFileToZip(zipFile, sourceFilePath, originalPath)
+ zipFile.close()
+ finally:
+ f.close()
+ return toZip.values()
Index: cake/run.py
===================================================================
--- cake/run.py (revision 0)
+++ cake/run.py (working copy)
@@ -0,0 +1,16 @@
+"""Minimal run script.
+
+This script is just a convenient method for running Cake if it has not been
+installed to your Python directory via 'python setup.py install'. If Cake
+has been installed it can instead be run by simply typing 'cake'.
+
+@see: Cake Build System (http://sourceforge.net/projects/cake-build)
+@copyright: Copyright (c) 2010 Lewis Baker, Stuart McMahon.
+@license: Licensed under the MIT license.
+"""
+
+if __name__ == '__main__':
+ """Main entrypoint.
+ """
+ import cake.main
+ cake.main.execute()
Index: intern/cycles/SConscript
===================================================================
--- intern/cycles/SConscript (revision 42633)
+++ intern/cycles/SConscript (working copy)
@@ -30,6 +30,7 @@
incs.extend('#intern/guardedalloc #source/blender/makesrna #source/blender/makesdna'.split())
incs.extend('#source/blender/blenloader ../../source/blender/makesrna/intern'.split())
incs.extend('#extern/glew/include'.split())
+incs.extend('#${BF_BUILDDIR}/source/blender/makesrna/intern'.split())
incs.append(cycles['BF_OIIO_INC'])
incs.append(cycles['BF_BOOST_INC'])
incs.append(cycles['BF_PYTHON_INC'])
@@ -39,6 +40,10 @@
else:
cxxflags.append('-ffast-math'.split())
+prerequisites = [
+ '#/source/blender/makesrna/SConscript',
+ ]
+
# optimized kernel
if env['WITH_BF_RAYOPTIMIZATION']:
optim_cxxflags = []
@@ -53,9 +58,9 @@
optim_sources = [path.join('kernel', 'kernel_optimized.cpp')]
cycles_optim = cycles.Clone()
- cycles_optim.BlenderLib('bf_intern_cycles_optimized', optim_sources, incs, optim_defs, libtype=['intern'], priority=[10], compileflags=[None], cxx_compileflags=optim_cxxflags)
+ cycles_optim.BlenderLib('bf_intern_cycles_optimized', optim_sources, incs, optim_defs, libtype=['intern'], priority=[10], compileflags=[None], cxx_compileflags=optim_cxxflags, PREREQUISITES=prerequisites)
-cycles.BlenderLib('bf_intern_cycles', sources, incs, defs, libtype=['intern'], priority=[0], compileflags=[None], cxx_compileflags=cxxflags)
+cycles.BlenderLib('bf_intern_cycles', sources, incs, defs, libtype=['intern'], priority=[0], compileflags=[None], cxx_compileflags=cxxflags, PREREQUISITES=prerequisites)
# cuda kernel binaries
if env['WITH_BF_CYCLES_CUDA_BINARIES']:
Index: intern/itasc/SConscript
===================================================================
--- intern/itasc/SConscript (revision 42633)
+++ intern/itasc/SConscript (working copy)
@@ -9,7 +9,7 @@
defs = []
-if env['PLATFORM'] == 'win32':
+if env['OURPLATFORM'] in ('win32-vc', 'win32-mingw', 'linuxcross', 'win64-vc'):
defs.append('EIGEN_DONT_ALIGN_STATICALLY')
env.BlenderLib ('bf_intern_itasc', sources, Split(incs), defs, libtype=['intern','player'], priority=[20,100] )
Index: SConscript
===================================================================
--- SConscript (revision 0)
+++ SConscript (working copy)
@@ -0,0 +1,403 @@
+#
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The Original Code is Copyright (C) 2011, Blender Foundation
+# All rights reserved.
+#
+# The Original Code is: all of this file.
+#
+# Contributor(s): Stuart McMahon.
+#
+# ***** END GPL LICENSE BLOCK *****
+#
+# Despite its name this is the root-level build file for blender when using
+# the Cake build system.
+# It is a mirror of the program building, installing and packaging section
+# of the SConstruct file (used by the SCons build system).
+#
+import os
+import os.path
+
+from cake.tools import env
+
+import btools
+
+VERSION = btools.VERSION # This is used in creating the local config directories
+VERSION_RELEASE_CYCLE = btools.VERSION_RELEASE_CYCLE
+
+B = env.module # This was saved in the env on creation.
+
+scripts = env.script.execute([
+ 'intern/SConscript',
+ 'extern/SConscript',
+ 'source/SConscript',
+ ])
+
+def buildProgram():
+ # now that we have read all SConscripts, we know what
+ # libraries will be built. Create list of
+ # libraries to give as objects to linking phase
+ mainlist = []
+ for tp in B.possible_types:
+ if (not tp == 'player') and (not tp == 'player2'):
+ mainlist += B.create_blender_liblist(env, tp)
+
+ if B.arguments.get('BF_PRIORITYLIST', '0')=='1':
+ B.propose_priorities()
+
+ dobj = B.buildinfo(env, "dynamic") + B.resources
+ creob = B.creator(env)
+ thestatlibs, thelibincs = B.setup_staticlibs(env)
+ thesyslibs = B.setup_syslibs(env)
+
+ if 'blender' in B.targets or not env['WITH_BF_NOBLENDER']:
+ env.BlenderProg(B.root_build_dir, "blender", creob + mainlist + thestatlibs + dobj, thesyslibs, [B.root_build_dir+'/lib'] + thelibincs, 'blender')
+ if env['WITH_BF_PLAYER']:
+ playerlist = B.create_blender_liblist(env, 'player')
+ playerlist += B.create_blender_liblist(env, 'player2')
+ playerlist += B.create_blender_liblist(env, 'intern')
+ playerlist += B.create_blender_liblist(env, 'extern')
+ env.BlenderProg(B.root_build_dir, "blenderplayer", dobj + playerlist + thestatlibs, thesyslibs, [B.root_build_dir+'/lib'] + thelibincs, 'blenderplayer')
+
+def installProgram():
+ #-- binaries
+ blenderinstall = []
+ if env['OURPLATFORM']=='darwin':
+ for prg in B.program_list:
+ bundle = '%s.app' % prg[0]
+ bundledir = os.path.dirname(bundle)
+ for dp, dn, df in os.walk(bundle):
+ if '.svn' in dn:
+ dn.remove('.svn')
+ if '_svn' in dn:
+ dn.remove('_svn')
+ dir=env['BF_INSTALLDIR']+dp[len(bundledir):]
+ source=[dp+os.sep+f for f in df]
+ blenderinstall.append(env.Install(dir=dir,source=source))
+ else:
+ blenderinstall = env.Install(dir=env['BF_INSTALLDIR'], source=B.program_list)
+
+ #-- local path = config files in install dir: installdir\VERSION
+ #- dont do config and scripts for darwin, it is already in the bundle
+ dotblendlist = []
+ datafileslist = []
+ datafilestargetlist = []
+ dottargetlist = []
+ scriptinstall = []
+
+ if env['OURPLATFORM']!='darwin':
+ dotblenderinstall = []
+ for targetdir,srcfile in zip(dottargetlist, dotblendlist):
+ td, tf = os.path.split(targetdir)
+ dotblenderinstall.append(env.Install(dir=td, source=srcfile))
+ for targetdir,srcfile in zip(datafilestargetlist, datafileslist):
+ td, tf = os.path.split(targetdir)
+ dotblenderinstall.append(env.Install(dir=td, source=srcfile))
+
+ if env['WITH_BF_PYTHON']:
+ #-- local/VERSION/scripts
+ scriptpaths=['release/scripts']
+ for scriptpath in scriptpaths:
+ for dp, dn, df in os.walk(scriptpath):
+ if '.svn' in dn:
+ dn.remove('.svn')
+ if '_svn' in dn:
+ dn.remove('_svn')
+ if '__pycache__' in dn: # py3.2 cache dir
+ dn.remove('__pycache__')
+
+ # only for testing builds
+ if VERSION_RELEASE_CYCLE == "release" and "addons_contrib" in dn:
+ dn.remove('addons_contrib')
+
+ dir = os.path.join(env['BF_INSTALLDIR'], VERSION)
+ dir += os.sep + os.path.basename(scriptpath) + dp[len(scriptpath):]
+
+ source=[os.path.join(dp, f) for f in df if not f.endswith(".pyc")]
+ # To ensure empty dirs are created too
+ if len(source)==0:
+ env.Execute(Mkdir(dir))
+ scriptinstall.append(env.Install(dir=dir,source=source))
+ if env['WITH_BF_CYCLES']:
+ # cycles python code
+ dir=os.path.join(env['BF_INSTALLDIR'], VERSION, 'scripts', 'addons','cycles')
+ source=os.listdir('intern/cycles/blender/addon')
+ if '.svn' in source: source.remove('.svn')
+ if '_svn' in source: source.remove('_svn')
+ if '__pycache__' in source: source.remove('__pycache__')
+ source=['intern/cycles/blender/addon/'+s for s in source]
+ scriptinstall.append(env.Install(dir=dir,source=source))
+
+ # cycles kernel code
+ dir=os.path.join(env['BF_INSTALLDIR'], VERSION, 'scripts', 'addons','cycles', 'kernel')
+ source=os.listdir('intern/cycles/kernel')
+ if '.svn' in source: source.remove('.svn')
+ if '_svn' in source: source.remove('_svn')
+ if '__pycache__' in source: source.remove('__pycache__')
+ source.remove('kernel.cpp')
+ source.remove('CMakeLists.txt')
+ source.remove('svm')
+ source.remove('osl')
+ source=['intern/cycles/kernel/'+s for s in source]
+ source.append('intern/cycles/util/util_color.h')
+ source.append('intern/cycles/util/util_math.h')
+ source.append('intern/cycles/util/util_transform.h')
+ source.append('intern/cycles/util/util_types.h')
+ scriptinstall.append(env.Install(dir=dir,source=source))
+ # svm
+ dir=os.path.join(env['BF_INSTALLDIR'], VERSION, 'scripts', 'addons','cycles', 'kernel', 'svm')
+ source=os.listdir('intern/cycles/kernel/svm')
+ if '.svn' in source: source.remove('.svn')
+ if '_svn' in source: source.remove('_svn')
+ if '__pycache__' in source: source.remove('__pycache__')
+ source=['intern/cycles/kernel/svm/'+s for s in source]
+ scriptinstall.append(env.Install(dir=dir,source=source))
+
+ # licenses
+ dir=os.path.join(env['BF_INSTALLDIR'], VERSION, 'scripts', 'addons','cycles', 'license')
+ source=os.listdir('intern/cycles/doc/license')
+ if '.svn' in source: source.remove('.svn')
+ if '_svn' in source: source.remove('_svn')
+ if '__pycache__' in source: source.remove('__pycache__')
+ source.remove('CMakeLists.txt')
+ source=['intern/cycles/doc/license/'+s for s in source]
+ scriptinstall.append(env.Install(dir=dir,source=source))
+
+ # cuda binaries
+ if env['WITH_BF_CYCLES_CUDA_BINARIES']:
+ dir=os.path.join(env['BF_INSTALLDIR'], VERSION, 'scripts', 'addons','cycles', 'lib')
+ for arch in env['BF_CYCLES_CUDA_BINARIES_ARCH']:
+ kernel_build_dir = os.path.join(B.root_build_dir, 'intern/cycles/kernel')
+ cubin_file = os.path.join(kernel_build_dir, "kernel_%s.cubin" % arch)
+ scriptinstall.append(env.Install(dir=dir,source=cubin_file))
+
+ if env['WITH_BF_INTERNATIONAL']:
+ internationalpaths=['release' + os.sep + 'datafiles']
+
+ def check_path(path, member):
+ return (member in path.split(os.sep))
+
+ for intpath in internationalpaths:
+ for dp, dn, df in os.walk(intpath):
+ if '.svn' in dn:
+ dn.remove('.svn')
+ if '_svn' in dn:
+ dn.remove('_svn')
+
+ # we only care about release/datafiles/fonts, release/datafiles/locales
+ if check_path(dp, "fonts") or check_path(dp, "locale"):
+ pass
+ else:
+ continue
+
+ dir = os.path.join(env['BF_INSTALLDIR'], VERSION)
+ dir += os.sep + os.path.basename(intpath) + dp[len(intpath):]
+
+ source=[os.path.join(dp, f) for f in df if not f.endswith(".pyc")]
+ # To ensure empty dirs are created too
+ if len(source)==0:
+ env.Execute(Mkdir(dir))
+ scriptinstall.append(env.Install(dir=dir,source=source))
+
+ #-- icons
+ if env['OURPLATFORM']=='linux':
+ iconlist = []
+ icontargetlist = []
+
+ for tp, tn, tf in os.walk('release/freedesktop/icons'):
+ if '.svn' in tn:
+ tn.remove('.svn')
+ if '_svn' in tn:
+ tn.remove('_svn')
+ for f in tf:
+ iconlist.append(os.path.join(tp, f))
+ icontargetlist.append( os.path.join(*([env['BF_INSTALLDIR']] + tp.split(os.sep)[2:] + [f])) )
+
+ iconinstall = []
+ for targetdir,srcfile in zip(icontargetlist, iconlist):
+ td, tf = os.path.split(targetdir)
+ iconinstall.append(env.Install(dir=td, source=srcfile))
+
+ # dlls for linuxcross
+ # TODO - add more libs, for now this lets blenderlite run
+ if env['OURPLATFORM']=='linuxcross':
+ dir=env['BF_INSTALLDIR']
+ source = []
+
+ if env['WITH_BF_OPENMP']:
+ source += ['../lib/windows/pthreads/lib/pthreadGC2.dll']
+
+ scriptinstall.append(env.Install(dir=dir, source=source))
+
+ #-- plugins
+ pluglist = []
+ plugtargetlist = []
+ for tp, tn, tf in os.walk('release/plugins'):
+ if '.svn' in tn:
+ tn.remove('.svn')
+ if '_svn' in tn:
+ tn.remove('_svn')
+ df = tp[8:] # remove 'release/'
+ for f in tf:
+ pluglist.append(os.path.join(tp, f))
+ plugtargetlist.append( os.path.join(env['BF_INSTALLDIR'], VERSION, df, f) )
+
+
+ # header files for plugins
+ pluglist.append('source/blender/blenpluginapi/documentation.h')
+ plugtargetlist.append(os.path.join(env['BF_INSTALLDIR'], VERSION, 'plugins', 'include', 'documentation.h'))
+ pluglist.append('source/blender/blenpluginapi/externdef.h')
+ plugtargetlist.append(os.path.join(env['BF_INSTALLDIR'], VERSION, 'plugins', 'include', 'externdef.h'))
+ pluglist.append('source/blender/blenpluginapi/floatpatch.h')
+ plugtargetlist.append(os.path.join(env['BF_INSTALLDIR'], VERSION, 'plugins', 'include', 'floatpatch.h'))
+ pluglist.append('source/blender/blenpluginapi/iff.h')
+ plugtargetlist.append(os.path.join(env['BF_INSTALLDIR'], VERSION, 'plugins', 'include', 'iff.h'))
+ pluglist.append('source/blender/blenpluginapi/plugin.h')
+ plugtargetlist.append(os.path.join(env['BF_INSTALLDIR'], VERSION, 'plugins', 'include', 'plugin.h'))
+ pluglist.append('source/blender/blenpluginapi/util.h')
+ plugtargetlist.append(os.path.join(env['BF_INSTALLDIR'], VERSION, 'plugins', 'include', 'util.h'))
+ pluglist.append('source/blender/blenpluginapi/plugin.DEF')
+ plugtargetlist.append(os.path.join(env['BF_INSTALLDIR'], VERSION, 'plugins', 'include', 'plugin.def'))
+
+ plugininstall = []
+ # plugins in blender 2.5 don't work at the moment.
+ #for targetdir,srcfile in zip(plugtargetlist, pluglist):
+ # td, tf = os.path.split(targetdir)
+ # plugininstall.append(env.Install(dir=td, source=srcfile))
+
+ textlist = []
+ texttargetlist = []
+ for tp, tn, tf in os.walk('release/text'):
+ if '.svn' in tn:
+ tn.remove('.svn')
+ if '_svn' in tn:
+ tn.remove('_svn')
+ for f in tf:
+ textlist.append(tp+os.sep+f)
+
+ textinstall = env.Install(dir=env['BF_INSTALLDIR'], source=textlist)
+
+ if env['OURPLATFORM']=='darwin':
+ allinstall = [blenderinstall, plugininstall, textinstall]
+ elif env['OURPLATFORM']=='linux':
+ allinstall = [blenderinstall, dotblenderinstall, scriptinstall, plugininstall, textinstall, iconinstall]
+ else:
+ allinstall = [blenderinstall, dotblenderinstall, scriptinstall, plugininstall, textinstall]
+
+ if env['OURPLATFORM'] in ('win32-vc', 'win32-mingw', 'win64-vc', 'linuxcross'):
+ dllsources = []
+
+ if not env['OURPLATFORM'] in ('win32-mingw', 'linuxcross'):
+ # For MinGW and linuxcross static linking will be used
+ dllsources += ['${LCGDIR}/gettext/lib/gnu_gettext.dll']
+
+ #currently win64-vc doesn't appear to have libpng.dll
+ if env['OURPLATFORM'] != 'win64-vc':
+ dllsources += ['${BF_PNG_LIBPATH}/libpng.dll']
+
+ dllsources += ['${BF_ZLIB_LIBPATH}/zlib.dll']
+ # Used when linking to libtiff was dynamic
+ # keep it here until compilation on all platform would be ok
+ # dllsources += ['${BF_TIFF_LIBPATH}/${BF_TIFF_LIB}.dll']
+
+ if env['OURPLATFORM'] != 'linuxcross':
+ # pthreads library is already added
+ dllsources += ['${BF_PTHREADS_LIBPATH}/${BF_PTHREADS_LIB}.dll']
+
+ if env['WITH_BF_SDL']:
+ if env['OURPLATFORM'] == 'win64-vc':
+ pass # we link statically already to SDL on win64
+ else:
+ dllsources.append('${BF_SDL_LIBPATH}/SDL.dll')
+
+ if env['WITH_BF_PYTHON']:
+ if env['BF_DEBUG']:
+ dllsources.append('${BF_PYTHON_LIBPATH}/${BF_PYTHON_DLL}_d.dll')
+ else:
+ dllsources.append('${BF_PYTHON_LIBPATH}/${BF_PYTHON_DLL}.dll')
+
+ if env['WITH_BF_ICONV']:
+ if env['OURPLATFORM'] == 'win64-vc':
+ pass # we link statically to iconv on win64
+ elif not env['OURPLATFORM'] in ('win32-mingw', 'linuxcross'):
+ #gettext for MinGW and cross-compilation is compiled staticly
+ dllsources += ['${BF_ICONV_LIBPATH}/iconv.dll']
+
+ if env['WITH_BF_OPENAL']:
+ dllsources.append('${LCGDIR}/openal/lib/OpenAL32.dll')
+ dllsources.append('${LCGDIR}/openal/lib/wrap_oal.dll')
+
+ if env['WITH_BF_SNDFILE']:
+ dllsources.append('${LCGDIR}/sndfile/lib/libsndfile-1.dll')
+
+ if env['WITH_BF_FFMPEG']:
+ dllsources += env['BF_FFMPEG_DLL'].split()
+
+ # Since the thumb handler is loaded by Explorer, architecture is
+ # strict: the x86 build fails on x64 Windows. We need to ship
+ # both builds in x86 packages.
+ if env['OURPLATFORM'] in ('win32-vc', 'win32-mingw', 'linuxcross'):
+ dllsources.append('${LCGDIR}/thumbhandler/lib/BlendThumb.dll')
+ dllsources.append('${LCGDIR}/thumbhandler/lib/BlendThumb64.dll')
+
+ if env['WITH_BF_OIIO']:
+ dllsources.append('${LCGDIR}/openimageio/bin/OpenImageIO.dll')
+
+ dllsources.append('#source/icons/blender.exe.manifest')
+
+ windlls = env.Install(dir=env['BF_INSTALLDIR'], source = dllsources)
+ allinstall += windlls
+
+ installtarget = env.Alias('install', allinstall)
+ bininstalltarget = env.Alias('install-bin', blenderinstall)
+
+ if 'blender' in B.targets:
+ blenderexe= env.Alias('blender', B.program_list)
+ Depends(blenderexe,installtarget)
+
+ if env['WITH_BF_PLAYER']:
+ blenderplayer = env.Alias('blenderplayer', B.program_list)
+ Depends(blenderplayer,installtarget)
+
+ if not env['WITH_BF_GAMEENGINE']:
+ blendernogame = env.Alias('blendernogame', B.program_list)
+ Depends(blendernogame,installtarget)
+
+ if 'blenderlite' in B.targets:
+ blenderlite = env.Alias('blenderlite', B.program_list)
+ Depends(blenderlite,installtarget)
+
+def packageProgram():
+ if 'nsis' in B.targets:
+ nsisaction = env.Action(btools.NSIS_Installer, btools.NSIS_print)
+ nsiscmd = env.Command('nsisinstaller', None, nsisaction)
+ nsisalias = env.Alias('nsis', nsiscmd)
+
+ if 'buildslave' in B.targets:
+ buildslave_action = env.Action(btools.buildslave, btools.buildslave_print)
+ buildslave_cmd = env.Command('buildslave_exec', None, buildslave_action)
+ buildslave_alias = env.Alias('buildslave', buildslave_cmd)
+
+buildTask = env.engine.createTask(buildProgram)
+buildTask.startAfter([s.task for s in scripts])
+
+if not env['WITHOUT_BF_INSTALL']:
+ installTask = env.engine.createTask(installProgram)
+ installTask.startAfter(buildTask)
+
+ packageTask = env.engine.createTask(packageProgram)
+ packageTask.startAfter(installTask)
Index: source/blender/blenlib/SConscript
===================================================================
--- source/blender/blenlib/SConscript (revision 42633)
+++ source/blender/blenlib/SConscript (working copy)
@@ -23,6 +23,6 @@
if env['OURPLATFORM'] == 'darwin':
if env['WITH_BF_OPENMP']:
- env.Append(CFLAGS=['-DPARALLEL=1'])
+ defs.append('PARALLEL=1')
env.BlenderLib ( 'bf_blenlib', sources, Split(incs), Split(defs), libtype=['core','player'], priority = [363,170], compileflags =cflags )
Index: source/blender/editors/render/SConscript
===================================================================
--- source/blender/editors/render/SConscript (revision 42633)
+++ source/blender/editors/render/SConscript (working copy)
@@ -8,6 +8,7 @@
incs += ' ../../gpu'
incs += ' ../../makesrna ../../render/extern/include #/intern/elbeem/extern'
incs += ' ../../blenloader'
+defs = []
if env['OURPLATFORM'] == 'linux':
cflags='-pthread'
@@ -19,13 +20,13 @@
if env['WITH_BF_QUICKTIME']:
incs += ' ../../quicktime'
- env.Append(CFLAGS=['-DWITH_QUICKTIME'])
+ defs.append('WITH_QUICKTIME')
if env['USE_QTKIT']:
- env.Append(CFLAGS=['-DUSE_QTKIT'])
+ defs.append('USE_QTKIT')
if env['OURPLATFORM'] == 'darwin':
if env['WITH_BF_OPENMP']:
- env.Append(CFLAGS=['-DPARALLEL=1'])
+ defs.append('PARALLEL=1')
-env.BlenderLib ( 'bf_editors_render', sources, Split(incs), [], libtype=['core'], priority=[45])
+env.BlenderLib ( 'bf_editors_render', sources, Split(incs), Split(defs), libtype=['core'], priority=[45])
Index: source/blender/ikplugin/SConscript
===================================================================
--- source/blender/ikplugin/SConscript (revision 42633)
+++ source/blender/ikplugin/SConscript (working copy)
@@ -8,7 +8,7 @@
defs.append('WITH_IK_ITASC')
-if env['PLATFORM'] == 'win32':
+if env['OURPLATFORM'] in ('win32-vc', 'win32-mingw', 'linuxcross', 'win64-vc'):
defs.append('EIGEN_DONT_ALIGN_STATICALLY')
env.BlenderLib ( 'bf_ikplugin', sources, Split(incs), defs, libtype=['core','player'], priority=[180, 190] )
Index: source/blender/makesdna/intern/SConscript
===================================================================
--- source/blender/makesdna/intern/SConscript (revision 42633)
+++ source/blender/makesdna/intern/SConscript (working copy)
@@ -50,23 +50,28 @@
if not (root_build_dir[0]==os.sep or root_build_dir[1]==':'):
targetdir = '#' + targetdir
-makesdna = makesdna_tool.Program (target = targetdir, source = source_files, LIBS=['bf_intern_guardedalloc', 'bf_blenlib'])
+prerequisites = [
+ '#/intern/guardedalloc/SConscript',
+ '#/source/blender/blenlib/SConscript',
+ ]
+makesdna = makesdna_tool.Program (target = targetdir, source = source_files, LIBS=['bf_intern_guardedalloc', 'bf_blenlib'], PREREQUISITES=prerequisites)
+
dna_dict = dna.Dictionary()
dna.Depends ('dna.c', makesdna)
dna.Depends ('dna.c', header_files)
if env['OURPLATFORM'] != 'linuxcross':
if env['OURPLATFORM'] in ('win32-vc', 'win64-vc', 'win32-mingw'):
- dna.Command ('dna.c', '', "\"" + root_build_dir+os.sep+"makesdna\" $TARGET")
+ dna_c = dna.Command ('dna.c', makesdna, "\"" + root_build_dir+os.sep+"makesdna.exe\" $TARGET")
else:
- dna.Command ('dna.c', '', "\"" + root_build_dir+os.sep+"makesdna\" $TARGET")
+ dna_c = dna.Command ('dna.c', makesdna, "\"" + root_build_dir+os.sep+"makesdna\" $TARGET")
else:
if USE_WINE:
- dna.Command ('dna.c', '', 'wine ' + root_build_dir+os.sep+"makesdna $TARGET")
+ dna_c = dna.Command ('dna.c', makesdna, 'wine ' + root_build_dir+os.sep+"makesdna.exe $TARGET")
else:
- dna.Command ('dna.c', '', root_build_dir+os.sep+"makesdna.exe $TARGET")
+ dna_c = dna.Command ('dna.c', makesdna, root_build_dir+os.sep+"makesdna.exe $TARGET")
# TODO, get WITH_DNA_GHASH working, see CMake's 'WITH_DNA_GHASH'
-obj = ['intern/dna.c', 'intern/dna_genfile.c']
+obj = dna_c + ['intern/dna_genfile.c']
Return ('obj')
Index: source/blender/makesrna/intern/SConscript
===================================================================
--- source/blender/makesrna/intern/SConscript (revision 42633)
+++ source/blender/makesrna/intern/SConscript (working copy)
@@ -143,10 +143,16 @@
if not (root_build_dir[0]==os.sep or root_build_dir[1]==':'):
targetpath = '#' + targetpath
+prerequisites = [
+ '#/intern/guardedalloc/SConscript',
+ '#/source/blender/makesdna/SConscript',
+ '#/source/blender/blenlib/SConscript',
+ ]
+
if env['OURPLATFORM'] == 'linux' and root_build_dir[0]==os.sep:
- makesrna = makesrna_tool.Program (target = targetpath, source = source_files, LIBS=['bf_intern_guardedalloc', 'bf_dna', 'bf_blenlib'])
+ makesrna = makesrna_tool.Program (target = targetpath, source = source_files, LIBS=['bf_intern_guardedalloc', 'bf_dna', 'bf_blenlib'], PREREQUISITES=prerequisites)
else:
- makesrna = makesrna_tool.Program (target = targetpath, source = source_files, LIBS=['bf_intern_guardedalloc', 'bf_dna', 'bf_blenlib'])
+ makesrna = makesrna_tool.Program (target = targetpath, source = source_files, LIBS=['bf_intern_guardedalloc', 'bf_dna', 'bf_blenlib'], PREREQUISITES=prerequisites)
rna_dict = rna.Dictionary()
rna.Depends (generated_files, makesrna)
@@ -156,21 +162,17 @@
if env['OURPLATFORM'] != 'linuxcross':
if env['OURPLATFORM'] in ('win32-vc', 'win64-vc', 'win32-mingw'):
- rna.Command (generated_files, '', "\"" + root_build_dir+os.sep+"makesrna.exe\" \"" + build_dir )
+ rna_files = rna.Command (generated_files, makesrna, "\"" + root_build_dir+os.sep+"makesrna.exe\" \"" + build_dir )
else:
- rna.Command (generated_files, '', "\"" + root_build_dir+os.sep+"makesrna\" \"" + build_dir + '"' )
+ rna_files = rna.Command (generated_files, makesrna, "\"" + root_build_dir+os.sep+"makesrna\" \"" + build_dir + '"' )
else:
- rna.Command (generated_files, '', root_build_dir+os.sep+"makesrna.exe " + build_dir)
-
if USE_WINE:
- rna.Command (generated_files, '', 'wine ' + root_build_dir+os.sep+"makesrna.exe " + build_dir)
+ rna_files = rna.Command (generated_files, makesrna, 'wine ' + root_build_dir+os.sep+"makesrna.exe " + build_dir)
else:
- rna.Command (generated_files, '', root_build_dir+os.sep+"makesrna.exe " + build_dir)
+ rna_files = rna.Command (generated_files, makesrna, root_build_dir+os.sep+"makesrna.exe " + build_dir)
-obj = ['intern/rna_access.c']
-for generated_file in generated_files:
- obj += ['intern/' + generated_file]
+obj = ['intern/rna_access.c'] + rna_files
Return ('obj')
File Metadata
Details
Mime Type
text/x-diff
Storage Engine
local-disk
Storage Format
Raw Data
Storage Handle
fb/66/c0041ca5f4d88b70228a24642487
Event Timeline
Log In to Comment