third_party: Update waf to version 2.0.25
authorAndreas Schneider <asn@samba.org>
Wed, 4 Jan 2023 08:39:45 +0000 (09:39 +0100)
committerStefan Metzmacher <metze@samba.org>
Fri, 6 Jan 2023 14:02:35 +0000 (14:02 +0000)
Signed-off-by: Andreas Schneider <asn@samba.org>
Reviewed-by: Stefan Metzmacher <metze@samba.org>
14 files changed:
buildtools/bin/waf
buildtools/wafsamba/wafsamba.py
third_party/waf/waflib/Configure.py
third_party/waf/waflib/Context.py
third_party/waf/waflib/TaskGen.py
third_party/waf/waflib/Tools/msvc.py
third_party/waf/waflib/Tools/python.py
third_party/waf/waflib/Utils.py
third_party/waf/waflib/extras/cpplint.py
third_party/waf/waflib/extras/fc_fujitsu.py [new file with mode: 0644]
third_party/waf/waflib/extras/gccdeps.py
third_party/waf/waflib/extras/pyqt5.py
third_party/waf/waflib/extras/sphinx.py
third_party/waf/waflib/extras/wafcache.py

index d9cba343623e31014b1740e08f7bf1c9446e6579..f754b52a7bcc613c7612ee7f00a1a1a7038385c6 100755 (executable)
@@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE.
 
 import os, sys, inspect
 
-VERSION="2.0.24"
+VERSION="2.0.25"
 REVISION="x"
 GIT="x"
 INSTALL="x"
index 79f352878a8778ac6ca5dadb3fd43b1fc5126cf4..17a188f5036487e67cd8dfa6f6289d1f452fb82e 100644 (file)
@@ -38,7 +38,7 @@ LIB_PATH="shared"
 
 os.environ['PYTHONUNBUFFERED'] = '1'
 
-if Context.HEXVERSION not in (0x2001800,):
+if Context.HEXVERSION not in (0x2001900,):
     Logs.error('''
 Please use the version of waf that comes with Samba, not
 a system installed version. See http://wiki.samba.org/index.php/Waf
index e7333948489644f2bd70d3cb31118a10d0a1badf..f6fdc4e94a79d724daefab889c250b2f2eb7c54f 100644 (file)
@@ -439,7 +439,7 @@ def find_program(self, filename, **kw):
 
        var = kw.get('var', '')
        if not var:
-               var = re.sub(r'[-.]', '_', filename[0].upper())
+               var = re.sub(r'\W', '_', filename[0].upper())
 
        path_list = kw.get('path_list', '')
        if path_list:
index 4a0130b24a0048577fd94f7808257382f3f29462..ee8c5c9c5dfaa27d4a9101662f174fdce5ccd0c0 100644 (file)
@@ -18,13 +18,13 @@ else:
        import imp
 
 # the following 3 constants are updated on each new release (do not touch)
-HEXVERSION=0x2001800
+HEXVERSION=0x2001900
 """Constant updated on new releases"""
 
-WAFVERSION="2.0.24"
+WAFVERSION="2.0.25"
 """Constant updated on new releases"""
 
-WAFREVISION="1af97c71f5a6756abf36d0f78ed8fd551596d7cb"
+WAFREVISION="2db0b41b2805cd5db3b55476c06b23c1e46d319f"
 """Git revision when the waf version is updated"""
 
 WAFNAME="waf"
index 89f631699108a351e5514419409d79eb8a54913e..32468f03d3c765952064adf198fd000b3e9899ed 100644 (file)
@@ -400,7 +400,7 @@ def feature(*k):
        Decorator that registers a task generator method that will be executed when the
        object attribute ``feature`` contains the corresponding key(s)::
 
-               from waflib.Task import feature
+               from waflib.TaskGen import feature
                @feature('myfeature')
                def myfunction(self):
                        print('that is my feature!')
index 026a4c7fc48ec92798ff212e31ec36d0b5db3fa2..d60f6702681aca5234ab307628df91ea7d80498c 100644 (file)
@@ -111,7 +111,7 @@ def options(opt):
 
 class MSVCVersion(object):
        def __init__(self, ver):
-               m = re.search('^(.*)\s+(\d+[.]\d+)', ver)
+               m = re.search(r'^(.*)\s+(\d+[.]\d+)', ver)
                if m:
                        self.name = m.group(1)
                        self.number = float(m.group(2))
index a23bd019335200098622e3344471af453e5b3f36..b2dd1a9bcc315b3be5d96a09def17746b93ebd2c 100644 (file)
@@ -53,7 +53,17 @@ py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
 Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
 """
 
-DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
+DISTUTILS_IMP = """
+try:
+       from distutils.sysconfig import get_config_var, get_python_lib
+except ImportError:
+       from sysconfig import get_config_var, get_path
+       def get_python_lib(*k, **kw):
+               keyword='platlib' if kw.get('plat_specific') else 'purelib'
+               if 'prefix' in kw:
+                       return get_path(keyword, vars={'installed_base': kw['prefix'], 'platbase': kw['prefix']})
+               return get_path(keyword)
+""".splitlines()
 
 @before_method('process_source')
 @feature('py')
@@ -219,7 +229,7 @@ def get_python_variables(self, variables, imports=None):
        try:
                out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
        except Errors.WafError:
-               self.fatal('The distutils module is unusable: install "python-devel"?')
+               self.fatal('Could not run %r' % self.env.PYTHON)
        self.to_log(out)
        return_values = []
        for s in out.splitlines():
@@ -291,7 +301,8 @@ def python_cross_compile(self, features='pyembed pyext'):
 @conf
 def check_python_headers(conf, features='pyembed pyext'):
        """
-       Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
+       Check for headers and libraries necessary to extend or embed python.
+       It may use the module *distutils* or sysconfig in newer Python versions.
        On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
 
        * PYEXT: for compiling python extensions
@@ -439,7 +450,7 @@ def check_python_headers(conf, features='pyembed pyext'):
                env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
                env.LIB_PYEXT = env.LIB_PYEMBED
 
-       conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],))
+       conf.to_log("Found an include path for Python extensions: %r\n" % (dct['INCLUDEPY'],))
        env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
        env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
 
@@ -452,15 +463,21 @@ def check_python_headers(conf, features='pyembed pyext'):
                env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
 
        if env.CC_NAME == "msvc":
-               from distutils.msvccompiler import MSVCCompiler
-               dist_compiler = MSVCCompiler()
-               dist_compiler.initialize()
-               env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
-               env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
-               env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
+               try:
+                       from distutils.msvccompiler import MSVCCompiler
+               except ImportError:
+                       # From https://github.com/python/cpython/blob/main/Lib/distutils/msvccompiler.py
+                       env.append_value('CFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
+                       env.append_value('CXXFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
+                       env.append_value('LINKFLAGS_PYEXT', ['/DLL', '/nologo', '/INCREMENTAL:NO'])
+               else:
+                       dist_compiler = MSVCCompiler()
+                       dist_compiler.initialize()
+                       env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
+                       env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
+                       env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
 
-       # See if it compiles
-       conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!')
+       conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Could not build a Python embedded interpreter')
 
 @conf
 def check_python_version(conf, minver=None):
@@ -506,17 +523,9 @@ def check_python_version(conf, minver=None):
                else:
                        # Finally, try to guess
                        if Utils.is_win32:
-                               (python_LIBDEST, pydir) = conf.get_python_variables(
-                                         ["get_config_var('LIBDEST') or ''",
-                                          "get_python_lib(standard_lib=0) or ''"])
+                               (pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0) or ''"])
                        else:
-                               python_LIBDEST = None
-                               (pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
-                       if python_LIBDEST is None:
-                               if conf.env.LIBDIR:
-                                       python_LIBDEST = os.path.join(conf.env.LIBDIR, 'python' + pyver)
-                               else:
-                                       python_LIBDEST = os.path.join(conf.env.PREFIX, 'lib', 'python' + pyver)
+                               (pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
 
                if 'PYTHONARCHDIR' in conf.env:
                        # Check if --pythonarchdir was specified
@@ -526,7 +535,7 @@ def check_python_version(conf, minver=None):
                        pyarchdir = conf.environ['PYTHONARCHDIR']
                else:
                        # Finally, try to guess
-                       (pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
+                       (pyarchdir, ) = conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
                        if not pyarchdir:
                                pyarchdir = pydir
 
@@ -585,13 +594,12 @@ def check_python_module(conf, module_name, condition=''):
                if ret == 'unknown version':
                        conf.fatal('Could not check the %s version' % module_name)
 
-               from distutils.version import LooseVersion
                def num(*k):
                        if isinstance(k[0], int):
-                               return LooseVersion('.'.join([str(x) for x in k]))
+                               return Utils.loose_version('.'.join([str(x) for x in k]))
                        else:
-                               return LooseVersion(k[0])
-               d = {'num': num, 'ver': LooseVersion(ret)}
+                               return Utils.loose_version(k[0])
+               d = {'num': num, 'ver': Utils.loose_version(ret)}
                ev = eval(condition, {}, d)
                if not ev:
                        conf.fatal('The %s version does not satisfy the requirements' % module_name)
index 669490ca908770e78b391408308ac3961587adac..ea0f7a9db8813f8c65279330108bba988ca11073 100644 (file)
@@ -452,6 +452,8 @@ def console_encoding():
                        pass
                else:
                        if codepage:
+                               if 65001 == codepage and sys.version_info < (3, 3):
+                                       return 'utf-8'
                                return 'cp%d' % codepage
        return sys.stdout.encoding or ('cp1252' if is_win32 else 'latin-1')
 
@@ -868,6 +870,19 @@ def lib64():
                                return '64'
        return ''
 
+def loose_version(ver_str):
+       # private for the time being!
+       # see #2402
+       lst = re.split(r'([.]|\\d+|[a-zA-Z])', ver_str)
+       ver = []
+       for i, val in enumerate(lst):
+               try:
+                       ver.append(int(val))
+               except ValueError:
+                       if val != '.':
+                               ver.append(val)
+       return ver
+
 def sane_path(p):
        # private function for the time being!
        return os.path.abspath(os.path.expanduser(p))
index 8cdd6ddacb36103d753562a014d8561eca2bc27f..afc09c9472ca40647fec2304233d28f3ba383302 100644 (file)
@@ -169,7 +169,7 @@ class cpplint(Task.Task):
         global critical_errors
         with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
             params = {key: str(self.env[key]) for key in self.env if 'CPPLINT_' in key}
-            if params['CPPLINT_OUTPUT'] is 'waf':
+            if params['CPPLINT_OUTPUT'] == 'waf':
                 params['CPPLINT_OUTPUT'] = 'emacs'
             params['CPPLINT'] = self.env.get_flat('CPPLINT')
             cmd = Utils.subst_vars(CPPLINT_STR, params)
diff --git a/third_party/waf/waflib/extras/fc_fujitsu.py b/third_party/waf/waflib/extras/fc_fujitsu.py
new file mode 100644 (file)
index 0000000..cae676c
--- /dev/null
@@ -0,0 +1,52 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Detection of the Fujitsu Fortran compiler for ARM64FX
+
+import re
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_fujitsu')
+
+@conf
+def find_fujitsu(conf):
+       fc=conf.find_program(['frtpx'],var='FC')
+       conf.get_fujitsu_version(fc)
+       conf.env.FC_NAME='FUJITSU'
+       conf.env.FC_MOD_CAPITALIZATION='lower'
+
+@conf
+def fujitsu_flags(conf):
+       v=conf.env
+       v['_FCMODOUTFLAGS']=[]
+       v['FCFLAGS_DEBUG']=[]
+       v['FCFLAGS_fcshlib']=[]
+       v['LINKFLAGS_fcshlib']=[]
+       v['FCSTLIB_MARKER']=''
+       v['FCSHLIB_MARKER']=''
+
+@conf
+def get_fujitsu_version(conf,fc):
+       version_re=re.compile(r"frtpx\s*\(FRT\)\s*(?P<major>\d+)\.(?P<minor>\d+)\.",re.I).search
+       cmd=fc+['--version']
+       out,err=fc_config.getoutput(conf,cmd,stdin=False)
+       if out:
+               match=version_re(out)
+       else:
+               match=version_re(err)
+       if not match:
+               return(False)
+               conf.fatal('Could not determine the Fujitsu FRT Fortran compiler version.')
+       else:
+               k=match.groupdict()
+               conf.env['FC_VERSION']=(k['major'],k['minor'])
+
+def configure(conf):
+       conf.find_fujitsu()
+       conf.find_program('ar',var='AR')
+       conf.add_os_flags('ARFLAGS')
+       if not conf.env.ARFLAGS:
+               conf.env.ARFLAGS=['rcs']
+       conf.fc_flags()
+       conf.fc_add_flags()
+       conf.fujitsu_flags()
index 9e9952f2f7d21b7e0a74f0b9e4dd3acd28506bd8..5d2f0dd230cd976de7e628a7a9b1dfffdf72c714 100644 (file)
@@ -17,7 +17,7 @@ Usage::
 
 import os, re, threading
 from waflib import Task, Logs, Utils, Errors
-from waflib.Tools import c_preproc
+from waflib.Tools import asm, c, c_preproc, cxx
 from waflib.TaskGen import before_method, feature
 
 lock = threading.Lock()
index 9c941764cc2492213ab38453c4c612504be35c43..0c083a1247a1969583a1d6d42693f2e3ec97d574 100644 (file)
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Federico Pellegrin, 2016-2019 (fedepell) adapted for Python
+# Federico Pellegrin, 2016-2022 (fedepell) adapted for Python
 
 """
 This tool helps with finding Python Qt5 tools and libraries,
@@ -137,7 +137,7 @@ class pyrcc(Task.Task):
        Processes ``.qrc`` files
        """
        color   = 'BLUE'
-       run_str = '${QT_PYRCC} ${SRC} -o ${TGT}'
+       run_str = '${QT_PYRCC} ${QT_PYRCC_FLAGS} ${SRC} -o ${TGT}'
        ext_out = ['.py']
 
        def rcname(self):
@@ -175,7 +175,7 @@ class ui5py(Task.Task):
        Processes ``.ui`` files for python
        """
        color   = 'BLUE'
-       run_str = '${QT_PYUIC} ${SRC} -o ${TGT}'
+       run_str = '${QT_PYUIC} ${QT_PYUIC_FLAGS} ${SRC} -o ${TGT}'
        ext_out = ['.py']
 
 class ts2qm(Task.Task):
@@ -216,17 +216,17 @@ def find_pyqt5_binaries(self):
                self.find_program(['pyrcc5'], var='QT_PYRCC')
                self.find_program(['pylupdate5'], var='QT_PYLUPDATE')
        elif getattr(Options.options, 'want_pyside2', True):
-               self.find_program(['pyside2-uic'], var='QT_PYUIC')
-               self.find_program(['pyside2-rcc'], var='QT_PYRCC')
-               self.find_program(['pyside2-lupdate'], var='QT_PYLUPDATE')
+               self.find_program(['pyside2-uic','uic-qt5'], var='QT_PYUIC')
+               self.find_program(['pyside2-rcc','rcc-qt5'], var='QT_PYRCC')
+               self.find_program(['pyside2-lupdate','lupdate-qt5'], var='QT_PYLUPDATE')
        elif getattr(Options.options, 'want_pyqt4', True):
                self.find_program(['pyuic4'], var='QT_PYUIC')
                self.find_program(['pyrcc4'], var='QT_PYRCC')
                self.find_program(['pylupdate4'], var='QT_PYLUPDATE')
        else:
-               self.find_program(['pyuic5','pyside2-uic','pyuic4'], var='QT_PYUIC')
-               self.find_program(['pyrcc5','pyside2-rcc','pyrcc4'], var='QT_PYRCC')
-               self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4'], var='QT_PYLUPDATE')
+               self.find_program(['pyuic5','pyside2-uic','pyuic4','uic-qt5'], var='QT_PYUIC')
+               self.find_program(['pyrcc5','pyside2-rcc','pyrcc4','rcc-qt5'], var='QT_PYRCC')
+               self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4','lupdate-qt5'], var='QT_PYLUPDATE')
 
        if not env.QT_PYUIC:
                self.fatal('cannot find the uic compiler for python for qt5')
index 71d1028393b54883e6586160d8359f49aaf37d48..7d88f8ad126d58e987ebab66299bb073c73cd30d 100644 (file)
@@ -1,7 +1,15 @@
 """Support for Sphinx documentation
 
-This is a wrapper for sphinx-build program. Please note that sphinx-build supports only one output format which can
-passed to build via sphinx_output_format attribute. The default output format is html.
+This is a wrapper for sphinx-build program. Please note that sphinx-build supports only
+one output format at a time, but the tool can create multiple tasks to handle more.
+The output formats can be passed via the sphinx_output_format, which is an array of
+strings. For backwards compatibility if only one output is needed, it can be passed
+as a single string.
+The default output format is html.
+
+Specific formats can be installed in different directories by specifying the
+install_path_<FORMAT> attribute. If not defined, the standard install_path
+will be used instead.
 
 Example wscript:
 
@@ -13,7 +21,8 @@ def build(bld):
         features='sphinx',
         sphinx_source='sources',  # path to source directory
         sphinx_options='-a -v',  # sphinx-build program additional options
-        sphinx_output_format='man'  # output format of sphinx documentation
+        sphinx_output_format=['html', 'man'],  # output format of sphinx documentation
+        install_path_man='${DOCDIR}/man'       # put man pages in a specific directory
         )
 
 """
@@ -43,30 +52,36 @@ def build_sphinx(self):
     if not self.sphinx_source:
         self.bld.fatal('Can\'t find sphinx_source: %r' % self.sphinx_source)
 
+    # In the taskgen we have the complete list of formats
     Utils.def_attrs(self, sphinx_output_format='html')
-    self.env.SPHINX_OUTPUT_FORMAT = self.sphinx_output_format
+    self.sphinx_output_format = Utils.to_list(self.sphinx_output_format)
+
     self.env.SPHINX_OPTIONS = getattr(self, 'sphinx_options', [])
 
     for source_file in self.sphinx_source.ant_glob('**/*'):
         self.bld.add_manual_dependency(self.sphinx_source, source_file)
 
-    sphinx_build_task = self.create_task('SphinxBuildingTask')
-    sphinx_build_task.set_inputs(self.sphinx_source)
-    sphinx_build_task.set_outputs(self.path.get_bld())
+    for cfmt in self.sphinx_output_format:
+        sphinx_build_task = self.create_task('SphinxBuildingTask')
+        sphinx_build_task.set_inputs(self.sphinx_source)
+        # In task we keep the specific format this task is generating
+        sphinx_build_task.env.SPHINX_OUTPUT_FORMAT = cfmt
+
+        # the sphinx-build results are in <build + output_format> directory
+        sphinx_build_task.sphinx_output_directory = self.path.get_bld().make_node(cfmt)
+        sphinx_build_task.set_outputs(sphinx_build_task.sphinx_output_directory)
+        sphinx_build_task.sphinx_output_directory.mkdir()
 
-    # the sphinx-build results are in <build + output_format> directory
-    self.sphinx_output_directory = self.path.get_bld().make_node(self.env.SPHINX_OUTPUT_FORMAT)
-    self.sphinx_output_directory.mkdir()
-    Utils.def_attrs(self, install_path=get_install_path(self))
+        Utils.def_attrs(sphinx_build_task, install_path=getattr(self, 'install_path_' + cfmt, getattr(self, 'install_path', get_install_path(sphinx_build_task))))
 
 
-def get_install_path(tg):
-    if tg.env.SPHINX_OUTPUT_FORMAT == 'man':
-        return tg.env.MANDIR
-    elif tg.env.SPHINX_OUTPUT_FORMAT == 'info':
-        return tg.env.INFODIR
+def get_install_path(object):
+    if object.env.SPHINX_OUTPUT_FORMAT == 'man':
+        return object.env.MANDIR
+    elif object.env.SPHINX_OUTPUT_FORMAT == 'info':
+        return object.env.INFODIR
     else:
-        return tg.env.DOCDIR
+        return object.env.DOCDIR
 
 
 class SphinxBuildingTask(Task.Task):
@@ -96,10 +111,10 @@ class SphinxBuildingTask(Task.Task):
 
 
     def add_install(self):
-        nodes = self.generator.sphinx_output_directory.ant_glob('**/*', quiet=True)
+        nodes = self.sphinx_output_directory.ant_glob('**/*', quiet=True)
         self.outputs += nodes
-        self.generator.add_install_files(install_to=self.generator.install_path,
+        self.generator.add_install_files(install_to=self.install_path,
                                          install_from=nodes,
                                          postpone=False,
-                                         cwd=self.generator.sphinx_output_directory,
+                                         cwd=self.sphinx_output_directory.make_node(self.env.SPHINX_OUTPUT_FORMAT),
                                          relative_trick=True)
index 2cef46c0e1c4a573c965bd41c79c3429fbfc17ba..30ac3ef518d4ad59b8d15a215317f237b69f004a 100644 (file)
@@ -39,7 +39,14 @@ File cache specific options:
 * WAFCACHE_TRIM_MAX_FOLDER: maximum amount of tasks to cache (1M)
 * WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB)
 * WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try
-                                   and trim the cache (3 minutess)
+                                   and trim the cache (3 minutes)
+
+Upload specific options:
+* WAFCACHE_ASYNC_WORKERS: define a number of workers to upload results asynchronously
+                          this may improve build performance with many/long file uploads
+                          the default is unset (synchronous uploads)
+* WAFCACHE_ASYNC_NOWAIT: do not wait for uploads to complete (default: False)
+                         this requires asynchonous uploads to have an effect
 
 Usage::
 
@@ -49,10 +56,10 @@ Usage::
 
 To troubleshoot::
 
-       waf clean build --zones=wafcache
+       waf clean build --zone=wafcache
 """
 
-import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, traceback, urllib3, shlex
+import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, threading, traceback, urllib3, shlex
 try:
        import subprocess32 as subprocess
 except ImportError:
@@ -71,6 +78,8 @@ EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10))
 WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0
 WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0
 WAFCACHE_STATS = 1 if os.environ.get('WAFCACHE_STATS') else 0
+WAFCACHE_ASYNC_WORKERS = os.environ.get('WAFCACHE_ASYNC_WORKERS')
+WAFCACHE_ASYNC_NOWAIT = os.environ.get('WAFCACHE_ASYNC_NOWAIT')
 OK = "ok"
 
 re_waf_cmd = re.compile('(?P<src>%{SRC})|(?P<tgt>%{TGT})')
@@ -99,7 +108,9 @@ def can_retrieve_cache(self):
                self.generator.bld.cache_reqs += 1
 
        files_to = [node.abspath() for node in self.outputs]
-       err = cache_command(ssig, [], files_to)
+       proc = get_process()
+       err = cache_command(proc, ssig, [], files_to)
+       process_pool.append(proc)
        if err.startswith(OK):
                if WAFCACHE_VERBOSITY:
                        Logs.pprint('CYAN', '  Fetched %r from cache' % files_to)
@@ -132,23 +143,50 @@ def put_files_cache(self):
                files_from.append(path)
 
        bld = self.generator.bld
+       old_sig = self.signature()
+
+       for node in self.inputs:
+               try:
+                       del node.ctx.cache_sig[node]
+               except KeyError:
+                       pass
+
+       delattr(self, 'cache_sig')
        sig = self.signature()
-       ssig = Utils.to_hex(self.uid() + sig)
 
-       err = cache_command(ssig, files_from, [])
+       def _async_put_files_cache(bld, ssig, files_from):
+               proc = get_process()
+               if WAFCACHE_ASYNC_WORKERS:
+                       with bld.wafcache_lock:
+                               if bld.wafcache_stop:
+                                       process_pool.append(proc)
+                                       return
+                               bld.wafcache_procs.add(proc)
+
+               err = cache_command(proc, ssig, files_from, [])
+               process_pool.append(proc)
+               if err.startswith(OK):
+                       if WAFCACHE_VERBOSITY:
+                               Logs.pprint('CYAN', '  Successfully uploaded %s to cache' % files_from)
+                       else:
+                               Logs.debug('wafcache: Successfully uploaded %r to cache', files_from)
+                       if WAFCACHE_STATS:
+                               bld.cache_puts += 1
+               else:
+                       if WAFCACHE_VERBOSITY:
+                               Logs.pprint('RED', '  Error caching step results %s: %s' % (files_from, err))
+                       else:
+                               Logs.debug('wafcache: Error caching results %s: %s', files_from, err)
 
-       if err.startswith(OK):
-               if WAFCACHE_VERBOSITY:
-                       Logs.pprint('CYAN', '  Successfully uploaded %s to cache' % files_from)
+       if old_sig == sig:
+               ssig = Utils.to_hex(self.uid() + sig)
+               if WAFCACHE_ASYNC_WORKERS:
+                       fut = bld.wafcache_executor.submit(_async_put_files_cache, bld, ssig, files_from)
+                       bld.wafcache_uploads.append(fut)
                else:
-                       Logs.debug('wafcache: Successfully uploaded %r to cache', files_from)
-               if WAFCACHE_STATS:
-                       self.generator.bld.cache_puts += 1
+                       _async_put_files_cache(bld, ssig, files_from)
        else:
-               if WAFCACHE_VERBOSITY:
-                       Logs.pprint('RED', '  Error caching step results %s: %s' % (files_from, err))
-               else:
-                       Logs.debug('wafcache: Error caching results %s: %s', files_from, err)
+               Logs.debug('wafcache: skipped %r upload due to late input modifications %r', self.outputs, self.inputs)
 
        bld.task_sigs[self.uid()] = self.cache_sig
 
@@ -245,19 +283,45 @@ def get_process():
                return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0)
 
 def atexit_pool():
-       for k in process_pool:
-               try:
-                       os.kill(k.pid, 9)
-               except OSError:
-                       pass
-               else:
-                       k.wait()
+       for proc in process_pool:
+               proc.kill()
 atexit.register(atexit_pool)
 
 def build(bld):
        """
        Called during the build process to enable file caching
        """
+
+       if WAFCACHE_ASYNC_WORKERS:
+               try:
+                       num_workers = int(WAFCACHE_ASYNC_WORKERS)
+               except ValueError:
+                       Logs.warn('Invalid WAFCACHE_ASYNC_WORKERS specified: %r' % WAFCACHE_ASYNC_WORKERS)
+               else:
+                       from concurrent.futures import ThreadPoolExecutor
+                       bld.wafcache_executor = ThreadPoolExecutor(max_workers=num_workers)
+                       bld.wafcache_uploads = []
+                       bld.wafcache_procs = set([])
+                       bld.wafcache_stop = False
+                       bld.wafcache_lock = threading.Lock()
+
+               def finalize_upload_async(bld):
+                       if WAFCACHE_ASYNC_NOWAIT:
+                               with bld.wafcache_lock:
+                                       bld.wafcache_stop = True
+
+                               for fut in reversed(bld.wafcache_uploads):
+                                       fut.cancel()
+
+                               for proc in bld.wafcache_procs:
+                                       proc.kill()
+
+                               bld.wafcache_procs.clear()
+                       else:
+                               Logs.pprint('CYAN', '... waiting for wafcache uploads to complete (%s uploads)' % len(bld.wafcache_uploads))
+                       bld.wafcache_executor.shutdown(wait=True)
+               bld.add_post_fun(finalize_upload_async)
+
        if WAFCACHE_STATS:
                # Init counter for statistics and hook to print results at the end
                bld.cache_reqs = bld.cache_hits = bld.cache_puts = 0
@@ -266,9 +330,8 @@ def build(bld):
                        hit_ratio = 0
                        if bld.cache_reqs > 0:
                                hit_ratio = (bld.cache_hits / bld.cache_reqs) * 100
-                       Logs.pprint('CYAN', '  wafcache stats: requests: %s, hits, %s, ratio: %.2f%%, writes %s' %
+                       Logs.pprint('CYAN', '  wafcache stats: %s requests, %s hits (ratio: %.2f%%), %s writes' %
                                         (bld.cache_reqs, bld.cache_hits, hit_ratio, bld.cache_puts) )
-
                bld.add_post_fun(printstats)
 
        if process_pool:
@@ -286,15 +349,13 @@ def build(bld):
        for x in reversed(list(Task.classes.values())):
                make_cached(x)
 
-def cache_command(sig, files_from, files_to):
+def cache_command(proc, sig, files_from, files_to):
        """
        Create a command for cache worker processes, returns a pickled
        base64-encoded tuple containing the task signature, a list of files to
        cache and a list of files files to get from cache (one of the lists
        is assumed to be empty)
        """
-       proc = get_process()
-
        obj = base64.b64encode(cPickle.dumps([sig, files_from, files_to]))
        proc.stdin.write(obj)
        proc.stdin.write('\n'.encode())
@@ -302,7 +363,6 @@ def cache_command(sig, files_from, files_to):
        obj = proc.stdout.readline()
        if not obj:
                raise OSError('Preforked sub-process %r died' % proc.pid)
-       process_pool.append(proc)
        return cPickle.loads(base64.b64decode(obj))
 
 try:
@@ -456,7 +516,10 @@ class netcache(object):
 class fcache(object):
        def __init__(self):
                if not os.path.exists(CACHE_DIR):
-                       os.makedirs(CACHE_DIR)
+                       try:
+                               os.makedirs(CACHE_DIR)
+                       except OSError:
+                               pass
                if not os.path.exists(CACHE_DIR):
                        raise ValueError('Could not initialize the cache directory')