From ab4aca868d68b9de9b20a9991bbbb5e78ab48a30 Mon Sep 17 00:00:00 2001 From: Dirk Pranke Date: Tue, 4 Dec 2018 11:43:29 -0800 Subject: [PATCH] Mostly mechanical changes for Python3 support. Everything should still work fine in Python 2. With this change (and the others I've just landed), Python 3 should work on at least Linux, but I need to do more sanity-checking on Mac and Win. The changes in this CL are all mechanical -- things like print() instead of print, `key in dict` rather than dict.has_key(key), and switching to use .decode('utf8') to handle the binary streams returned from a subprocess (and a few other things). Most of this work is derived from Ashley.Whetter@gmail.com's original work in https://codereview.chromium.org/1454433002/. Bug: gyp:36 Change-Id: Ie04ebcf2d82e7b8ff34c6a112215eac46af688ba Reviewed-on: https://chromium-review.googlesource.com/c/1357805 Reviewed-by: Mark Mentovai --- PRESUBMIT.py | 3 +- README.md | 3 +- buildbot/buildbot_run.py | 24 +- pylib/gyp/MSVSSettings.py | 37 ++- pylib/gyp/MSVSSettings_test.py | 7 +- pylib/gyp/MSVSUserFile.py | 4 +- pylib/gyp/MSVSUtil.py | 2 +- pylib/gyp/MSVSVersion.py | 11 +- pylib/gyp/__init__.py | 39 ++- pylib/gyp/common.py | 10 +- pylib/gyp/common_test.py | 1 + pylib/gyp/easy_xml.py | 7 +- pylib/gyp/easy_xml_test.py | 7 +- pylib/gyp/flock_tool.py | 2 +- pylib/gyp/generator/analyzer.py | 89 +++--- pylib/gyp/generator/cmake.py | 26 +- pylib/gyp/generator/dump_dependency_json.py | 4 +- pylib/gyp/generator/eclipse.py | 6 +- pylib/gyp/generator/gypd.py | 2 +- pylib/gyp/generator/make.py | 32 +- pylib/gyp/generator/msvs.py | 101 +++--- pylib/gyp/generator/msvs_test.py | 7 +- pylib/gyp/generator/ninja.py | 41 +-- pylib/gyp/generator/ninja_test.py | 1 - pylib/gyp/generator/xcode.py | 46 +-- pylib/gyp/input.py | 114 ++++--- pylib/gyp/input_test.py | 6 +- pylib/gyp/mac_tool.py | 35 ++- pylib/gyp/msvs_emulation.py | 37 ++- pylib/gyp/ordered_dict.py | 289 ------------------ pylib/gyp/simple_copy.py | 2 +- pylib/gyp/win_tool.py | 28 +- pylib/gyp/xcode_emulation.py | 25 +- pylib/gyp/xcode_ninja.py | 8 +- pylib/gyp/xcodeproj_file.py | 54 ++-- pylib/gyp/xml_fix.py | 3 +- test/actions-bare/src/bare.py | 2 +- .../gyptest-action.py | 4 +- .../gyptest-multiple-outputs.py | 4 +- test/actions-multiple/src/actions.gyp | 14 +- .../src/{copy.py => copyfile.py} | 0 test/actions-none/src/fake_cross.py | 2 +- test/actions-subdir/src/make-file.py | 2 +- .../src/subdir/make-subdir-file.py | 2 +- test/actions/src/subdir2/make-file.py | 2 +- test/additional-targets/src/dir1/emit.py | 2 +- test/analyzer/gyptest-analyzer.py | 40 +-- test/arflags/gyptest-arflags.py | 4 +- .../gyptest-compiler-global-settings.py | 3 +- test/compiler-override/my_cc.py | 3 +- test/compiler-override/my_cxx.py | 3 +- test/compiler-override/my_ld.py | 3 +- test/compiler-override/my_nm.py | 3 +- test/compiler-override/my_readelf.py | 3 +- .../inheritance/gyptest-duplicates.py | 4 +- .../gyptest-target_platform.py | 2 +- test/copies/gyptest-updir.py | 4 +- test/custom-generator/mygenerator.py | 2 +- test/dependencies/adso/write_args.py | 2 +- test/determinism/gyptest-solibs.py | 4 +- test/determinism/rule.py | 7 +- test/escaping/gyptest-colon.py | 4 +- .../actions/subdir2/make-file.py | 2 +- test/generator-output/gyptest-mac-bundle.py | 4 +- test/generator-output/rules/copy-file.py | 2 +- test/hard_dependency/src/emit.py | 2 +- test/ios/gyptest-app-ios.py | 4 +- test/ios/gyptest-extension.py | 6 +- test/ios/gyptest-per-config-settings.py | 20 +- test/ios/gyptest-watch.py | 4 +- test/lib/TestCmd.py | 178 ++++++----- test/lib/TestCommon.py | 135 ++++---- test/lib/TestGyp.py | 42 +-- test/lib/TestMac.py | 11 +- test/lib/TestWin.py | 2 +- test/linux/gyptest-implicit-rpath.py | 2 +- .../linux/gyptest-ldflags-from-environment.py | 2 +- test/linux/gyptest-target-rpath.py | 2 +- .../linux/ldflags-duplicates/check-ldflags.py | 7 +- test/mac/gyptest-app-assets-catalog.py | 6 +- test/mac/gyptest-app-error.py | 4 +- test/mac/gyptest-app.py | 8 +- test/mac/gyptest-archs.py | 3 +- test/mac/gyptest-bundle-resources.py | 4 +- test/mac/gyptest-copies.py | 4 +- test/mac/gyptest-depend-on-bundle.py | 4 +- test/mac/gyptest-framework.py | 4 +- test/mac/gyptest-infoplist-process.py | 4 +- test/mac/gyptest-installname.py | 4 +- test/mac/gyptest-ldflags-passed-to-libtool.py | 4 +- test/mac/gyptest-loadable-module.py | 4 +- test/mac/gyptest-lto.py | 2 +- test/mac/gyptest-missing-cfbundlesignature.py | 4 +- test/mac/gyptest-non-strs-flattened-to-env.py | 4 +- test/mac/gyptest-postbuild-defaults.py | 4 +- test/mac/gyptest-postbuild-fail.py | 7 +- test/mac/gyptest-rebuild.py | 4 +- test/mac/gyptest-rpath.py | 2 +- test/mac/gyptest-sdkroot.py | 4 +- test/mac/gyptest-sourceless-module.py | 4 +- test/mac/gyptest-strip-default.py | 8 +- test/mac/gyptest-strip.py | 6 +- test/mac/gyptest-swift-library.py | 6 +- test/mac/gyptest-xcode-env-order.py | 4 +- test/mac/gyptest-xcode-gcc.py | 1 + test/mac/gyptest-xcuitest.py | 2 +- .../ar/gyptest-make_global_settings_ar.py | 2 +- .../basics/gyptest-make_global_settings.py | 8 +- .../env-wrapper/gyptest-wrapper.py | 4 +- .../gyptest-make_global_settings.py | 4 +- .../full-toolchain/my_nm.py | 3 +- .../full-toolchain/my_readelf.py | 3 +- .../ld/gyptest-make_global_settings_ld.py | 2 +- .../wrapper/gyptest-wrapper.py | 6 +- .../gyptest-many-actions-unsorted.py | 4 +- test/many-actions/gyptest-many-actions.py | 4 +- .../msvs/config_attrs/gyptest-config_attrs.py | 4 +- test/msvs/rules_stdout_stderr/rule_stderr.py | 3 +- test/msvs/rules_stdout_stderr/rule_stdout.py | 3 +- test/ninja/action-rule-hash/subdir/emit.py | 2 +- .../gyptest-action-dependencies.py | 4 +- test/ninja/action_dependencies/src/emit.py | 2 +- .../gyptest-solibs-avoid-relinking.py | 4 +- test/no-cpp/gyptest-no-cpp.py | 2 +- test/rules-dirname/gyptest-dirname.py | 4 +- test/rules-dirname/src/copy-file.py | 2 +- test/rules-rebuild/src/make-sources.py | 4 +- .../gyptest-rules-variables.py | 4 +- test/rules/gyptest-all.py | 4 +- test/rules/gyptest-default.py | 4 +- test/rules/src/copy-file.py | 2 +- test/small/gyptest-small.py | 1 + test/standalone/gyptest-standalone.py | 6 +- test/symlinks/gyptest-symlinks.py | 4 +- test/variables/commands/commands-repeated.gyp | 14 +- .../commands/commands-repeated.gyp.stdout | 52 ++-- .../commands/commands-repeated.gypd.golden | 2 +- test/variables/commands/commands.gyp | 8 +- .../commands/commands.gyp.ignore-env.stdout | 26 +- test/variables/commands/commands.gyp.stdout | 26 +- test/variables/commands/commands.gypd.golden | 2 +- .../commands/gyptest-commands-ignore-env.py | 3 +- .../commands/gyptest-commands-repeated.py | 4 +- test/variables/commands/gyptest-commands.py | 3 +- .../repeated_multidir/print_cwd_basename.py | 3 +- test/variables/commands/test.py | 8 +- .../filelist/gyptest-filelist-golden.py | 6 +- test/variables/filelist/gyptest-filelist.py | 2 +- ...yptest-generator-output-different-drive.py | 2 +- ...test-cl-enable-enhanced-instruction-set.py | 4 +- test/win/gyptest-cl-function-level-linking.py | 6 +- test/win/gyptest-command-quote.py | 4 +- test/win/gyptest-link-defrelink.py | 4 +- .../gyptest-link-enable-winrt-app-revision.py | 4 +- ...nk-enable-winrt-target-platform-version.py | 4 +- test/win/gyptest-link-enable-winrt.py | 4 +- test/win/gyptest-link-large-pdb.py | 8 +- test/win/gyptest-link-ordering.py | 8 +- test/win/gyptest-link-restat-importlib.py | 4 +- test/win/gyptest-link-update-manifest.py | 2 +- test/win/gyptest-macro-targetfilename.py | 4 +- test/win/gyptest-rc-build.py | 4 +- test/win/idl-excluded/copy-file.py | 2 +- test/win/vs-macros/as.py | 6 +- test/win/vs-macros/test_exists.py | 2 +- tools/graphviz.py | 30 +- tools/pretty_gyp.py | 10 +- tools/pretty_sln.py | 48 +-- tools/pretty_vcproj.py | 20 +- 169 files changed, 1086 insertions(+), 1130 deletions(-) delete mode 100644 pylib/gyp/ordered_dict.py rename test/actions-multiple/src/{copy.py => copyfile.py} (100%) diff --git a/PRESUBMIT.py b/PRESUBMIT.py index 4bc1b8ca..5ee669b5 100644 --- a/PRESUBMIT.py +++ b/PRESUBMIT.py @@ -76,8 +76,7 @@ def _LicenseHeader(input_api): # Accept any year number from 2009 to the current year. current_year = int(input_api.time.strftime('%Y')) - allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1))) - + allowed_years = (str(s) for s in reversed(range(2009, current_year + 1))) years_re = '(' + '|'.join(allowed_years) + ')' # The (c) is deprecated, but tolerate it until it's removed from all files. diff --git a/README.md b/README.md index c0d73ac9..b4766c9d 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ GYP can Generate Your Projects. =================================== -Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline. +Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can +check out ```md-pages``` branch to read those documents offline. diff --git a/buildbot/buildbot_run.py b/buildbot/buildbot_run.py index 9a2b71f1..89416520 100755 --- a/buildbot/buildbot_run.py +++ b/buildbot/buildbot_run.py @@ -5,6 +5,8 @@ """Argument-less script to select what to run on the buildbots.""" +from __future__ import print_function + import os import shutil import subprocess @@ -24,14 +26,14 @@ def CallSubProcess(*args, **kwargs): with open(os.devnull) as devnull_fd: retcode = subprocess.call(stdin=devnull_fd, *args, **kwargs) if retcode != 0: - print '@@@STEP_EXCEPTION@@@' + print('@@@STEP_EXCEPTION@@@') sys.exit(1) def PrepareCmake(): """Build CMake 2.8.8 since the version in Precise is 2.8.7.""" if os.environ['BUILDBOT_CLOBBER'] == '1': - print '@@@BUILD_STEP Clobber CMake checkout@@@' + print('@@@BUILD_STEP Clobber CMake checkout@@@') shutil.rmtree(CMAKE_DIR) # We always build CMake 2.8.8, so no need to do anything @@ -39,10 +41,10 @@ def PrepareCmake(): if os.path.isdir(CMAKE_DIR): return - print '@@@BUILD_STEP Initialize CMake checkout@@@' + print('@@@BUILD_STEP Initialize CMake checkout@@@') os.mkdir(CMAKE_DIR) - print '@@@BUILD_STEP Sync CMake@@@' + print('@@@BUILD_STEP Sync CMake@@@') CallSubProcess( ['git', 'clone', '--depth', '1', @@ -53,7 +55,7 @@ def PrepareCmake(): CMAKE_DIR], cwd=CMAKE_DIR) - print '@@@BUILD_STEP Build CMake@@@' + print('@@@BUILD_STEP Build CMake@@@') CallSubProcess( ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR], cwd=CMAKE_DIR) @@ -74,7 +76,7 @@ def GypTestFormat(title, format=None, msvs_version=None, tests=[]): if not format: format = title - print '@@@BUILD_STEP ' + title + '@@@' + print('@@@BUILD_STEP ' + title + '@@@') sys.stdout.flush() env = os.environ.copy() if msvs_version: @@ -89,17 +91,17 @@ def GypTestFormat(title, format=None, msvs_version=None, tests=[]): retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True) if retcode: # Emit failure tag, and keep going. - print '@@@STEP_FAILURE@@@' + print('@@@STEP_FAILURE@@@') return 1 return 0 def GypBuild(): # Dump out/ directory. - print '@@@BUILD_STEP cleanup@@@' - print 'Removing %s...' % OUT_DIR + print('@@@BUILD_STEP cleanup@@@') + print('Removing %s...' % OUT_DIR) shutil.rmtree(OUT_DIR, ignore_errors=True) - print 'Done.' + print('Done.') retcode = 0 if sys.platform.startswith('linux'): @@ -128,7 +130,7 @@ def GypBuild(): # after the build proper that could be used for cumulative failures), # use that instead of this. This isolates the final return value so # that it isn't misattributed to the last stage. - print '@@@BUILD_STEP failures@@@' + print('@@@BUILD_STEP failures@@@') sys.exit(retcode) diff --git a/pylib/gyp/MSVSSettings.py b/pylib/gyp/MSVSSettings.py index 8ae19180..1d2e25ab 100644 --- a/pylib/gyp/MSVSSettings.py +++ b/pylib/gyp/MSVSSettings.py @@ -14,9 +14,17 @@ MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild """ +from __future__ import print_function + import sys import re +try: + # basestring was removed in python3. + basestring +except NameError: + basestring = str + # Dictionaries of settings validators. The key is the tool name, the value is # a dictionary mapping setting names to validation functions. _msvs_validators = {} @@ -400,7 +408,7 @@ def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr): if unrecognized: # We don't know this setting. Give a warning. - print >> stderr, error_msg + print(error_msg, file=stderr) def FixVCMacroSlashes(s): @@ -433,7 +441,7 @@ def ConvertVCMacrosToMSBuild(s): '$(PlatformName)': '$(Platform)', '$(SafeInputName)': '%(Filename)', } - for old, new in replace_map.iteritems(): + for old, new in replace_map.items(): s = s.replace(old, new) s = FixVCMacroSlashes(s) return s @@ -453,17 +461,18 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): dictionaries of settings and their values. """ msbuild_settings = {} - for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems(): + for msvs_tool_name, msvs_tool_settings in msvs_settings.items(): if msvs_tool_name in _msvs_to_msbuild_converters: msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] - for msvs_setting, msvs_value in msvs_tool_settings.iteritems(): + for msvs_setting, msvs_value in msvs_tool_settings.items(): if msvs_setting in msvs_tool: # Invoke the translation function. try: msvs_tool[msvs_setting](msvs_value, msbuild_settings) - except ValueError, e: - print >> stderr, ('Warning: while converting %s/%s to MSBuild, ' - '%s' % (msvs_tool_name, msvs_setting, e)) + except ValueError as e: + print(('Warning: while converting %s/%s to MSBuild, ' + '%s' % (msvs_tool_name, msvs_setting, e)), + file=stderr) else: _ValidateExclusionSetting(msvs_setting, msvs_tool, @@ -472,8 +481,8 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): (msvs_tool_name, msvs_setting)), stderr) else: - print >> stderr, ('Warning: unrecognized tool %s while converting to ' - 'MSBuild.' % msvs_tool_name) + print(('Warning: unrecognized tool %s while converting to ' + 'MSBuild.' % msvs_tool_name), file=stderr) return msbuild_settings @@ -513,13 +522,13 @@ def _ValidateSettings(validators, settings, stderr): for tool_name in settings: if tool_name in validators: tool_validators = validators[tool_name] - for setting, value in settings[tool_name].iteritems(): + for setting, value in settings[tool_name].items(): if setting in tool_validators: try: tool_validators[setting](value) - except ValueError, e: - print >> stderr, ('Warning: for %s/%s, %s' % - (tool_name, setting, e)) + except ValueError as e: + print(('Warning: for %s/%s, %s' % + (tool_name, setting, e)), file=stderr) else: _ValidateExclusionSetting(setting, tool_validators, @@ -528,7 +537,7 @@ def _ValidateSettings(validators, settings, stderr): stderr) else: - print >> stderr, ('Warning: unrecognized tool %s' % tool_name) + print(('Warning: unrecognized tool %s' % tool_name), file=stderr) # MSVS and MBuild names of the tools. diff --git a/pylib/gyp/MSVSSettings_test.py b/pylib/gyp/MSVSSettings_test.py index bf6ea6b8..73ed25e2 100755 --- a/pylib/gyp/MSVSSettings_test.py +++ b/pylib/gyp/MSVSSettings_test.py @@ -6,7 +6,10 @@ """Unit tests for the MSVSSettings.py file.""" -import StringIO +try: + from StringIO import StringIO +except ImportError: + from io import StringIO import unittest import gyp.MSVSSettings as MSVSSettings @@ -14,7 +17,7 @@ class TestSequenceFunctions(unittest.TestCase): def setUp(self): - self.stderr = StringIO.StringIO() + self.stderr = StringIO() def _ExpectedWarnings(self, expected): """Compares recorded lines to expected warnings.""" diff --git a/pylib/gyp/MSVSUserFile.py b/pylib/gyp/MSVSUserFile.py index 6c07e9a8..2264d640 100644 --- a/pylib/gyp/MSVSUserFile.py +++ b/pylib/gyp/MSVSUserFile.py @@ -91,7 +91,7 @@ def AddDebugSettings(self, config_name, command, environment = {}, if environment and isinstance(environment, dict): env_list = ['%s="%s"' % (key, val) - for (key,val) in environment.iteritems()] + for (key,val) in environment.items()] environment = ' '.join(env_list) else: environment = '' @@ -135,7 +135,7 @@ def AddDebugSettings(self, config_name, command, environment = {}, def WriteIfChanged(self): """Writes the user file.""" configs = ['Configurations'] - for config, spec in sorted(self.configurations.iteritems()): + for config, spec in sorted(self.configurations.items()): configs.append(spec) content = ['VisualStudioUserFile', diff --git a/pylib/gyp/MSVSUtil.py b/pylib/gyp/MSVSUtil.py index 96dea6c2..f24530b2 100644 --- a/pylib/gyp/MSVSUtil.py +++ b/pylib/gyp/MSVSUtil.py @@ -236,7 +236,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars): # Set up the shim to output its PDB to the same location as the final linker # target. - for config_name, config in shim_dict.get('configurations').iteritems(): + for config_name, config in shim_dict.get('configurations').items(): pdb_path = _GetPdbPath(target_dict, config_name, vars) # A few keys that we don't want to propagate. diff --git a/pylib/gyp/MSVSVersion.py b/pylib/gyp/MSVSVersion.py index 44b958d5..5f316b67 100644 --- a/pylib/gyp/MSVSVersion.py +++ b/pylib/gyp/MSVSVersion.py @@ -189,7 +189,7 @@ def _RegistryQuery(key, value=None): text = None try: text = _RegistryQueryBase('Sysnative', key, value) - except OSError, e: + except OSError as e: if e.errno == errno.ENOENT: text = _RegistryQueryBase('System32', key, value) else: @@ -207,12 +207,15 @@ def _RegistryGetValueUsingWinReg(key, value): contents of the registry key's value, or None on failure. Throws ImportError if _winreg is unavailable. """ - import _winreg + try: + import _winreg as winreg + except ImportError: + import winreg try: root, subkey = key.split('\\', 1) assert root == 'HKLM' # Only need HKLM for now. - with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey: - return _winreg.QueryValueEx(hkey, value)[0] + with winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, subkey) as hkey: + return winreg.QueryValueEx(hkey, value)[0] except WindowsError: return None diff --git a/pylib/gyp/__init__.py b/pylib/gyp/__init__.py index 668f38b6..e038151b 100755 --- a/pylib/gyp/__init__.py +++ b/pylib/gyp/__init__.py @@ -4,6 +4,8 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import copy import gyp.input import optparse @@ -14,6 +16,12 @@ import traceback from gyp.common import GypError +try: + # basestring was removed in python3. + basestring +except NameError: + basestring = str + # Default debug modes for GYP debug = {} @@ -22,7 +30,6 @@ DEBUG_VARIABLES = 'variables' DEBUG_INCLUDES = 'includes' - def DebugOutput(mode, message, *args): if 'all' in gyp.debug or mode in gyp.debug: ctx = ('unknown', 0, 'unknown') @@ -34,8 +41,8 @@ def DebugOutput(mode, message, *args): pass if args: message %= args - print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), - ctx[1], ctx[2], message) + print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), + ctx[1], ctx[2], message)) def FindBuildFiles(): extension = '.gyp' @@ -207,7 +214,7 @@ def Noop(value): # We always want to ignore the environment when regenerating, to avoid # duplicate or changed flags in the environment at the time of regeneration. flags = ['--ignore-environment'] - for name, metadata in options._regeneration_metadata.iteritems(): + for name, metadata in options._regeneration_metadata.items(): opt = metadata['opt'] value = getattr(options, name) value_predicate = metadata['type'] == 'path' and FixPath or Noop @@ -226,12 +233,13 @@ def Noop(value): (action == 'store_false' and not value)): flags.append(opt) elif options.use_environment and env_name: - print >>sys.stderr, ('Warning: environment regeneration unimplemented ' + print(('Warning: environment regeneration unimplemented ' 'for %s flag %r env_name %r' % (action, opt, - env_name)) + env_name)), + file=sys.stderr) else: - print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' - 'flag %r' % (action, opt)) + print(('Warning: regeneration unimplemented for action %r ' + 'flag %r' % (action, opt)), file=sys.stderr) return flags @@ -431,12 +439,11 @@ def gyp_main(args): for build_file in build_files: build_file_dir = os.path.abspath(os.path.dirname(build_file)) build_file_dir_components = build_file_dir.split(os.path.sep) - components_len = len(build_file_dir_components) - for index in xrange(components_len - 1, -1, -1): - if build_file_dir_components[index] == 'src': + for component in reversed(build_file_dir_components): + if component == 'src': options.depth = os.path.sep.join(build_file_dir_components) break - del build_file_dir_components[index] + del build_file_dir_components[-1] # If the inner loop found something, break without advancing to another # build file. @@ -475,7 +482,7 @@ def gyp_main(args): if home_dot_gyp != None: default_include = os.path.join(home_dot_gyp, 'include.gypi') if os.path.exists(default_include): - print 'Using overrides found in ' + default_include + print('Using overrides found in ' + default_include) includes.append(default_include) # Command-line --include files come after the default include. @@ -490,7 +497,7 @@ def gyp_main(args): if options.generator_flags: gen_flags += options.generator_flags generator_flags = NameValueListToDict(gen_flags) - if DEBUG_GENERAL in gyp.debug.keys(): + if DEBUG_GENERAL in gyp.debug: DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) # Generate all requested formats (use a set in case we got one format request @@ -523,7 +530,7 @@ def gyp_main(args): generator.GenerateOutput(flat_list, targets, data, params) if options.configs: - valid_configs = targets[flat_list[0]]['configurations'].keys() + valid_configs = targets[flat_list[0]]['configurations'] for conf in options.configs: if conf not in valid_configs: raise GypError('Invalid config specified via --build: %s' % conf) @@ -536,7 +543,7 @@ def gyp_main(args): def main(args): try: return gyp_main(args) - except GypError, e: + except GypError as e: sys.stderr.write("gyp: %s\n" % e) return 1 diff --git a/pylib/gyp/common.py b/pylib/gyp/common.py index 1b245ec7..1823de89 100644 --- a/pylib/gyp/common.py +++ b/pylib/gyp/common.py @@ -345,7 +345,7 @@ def __init__(self): prefix=os.path.split(filename)[1] + '.gyp.', dir=os.path.split(filename)[0]) try: - self.tmp_file = os.fdopen(tmp_fd, 'wb') + self.tmp_file = os.fdopen(tmp_fd, 'w') except Exception: # Don't leave turds behind. os.unlink(self.tmp_path) @@ -363,7 +363,7 @@ def close(self): same = False try: same = filecmp.cmp(self.tmp_path, filename, False) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: raise @@ -382,9 +382,9 @@ def close(self): # # No way to get the umask without setting a new one? Set a safe one # and then set it back to the old value. - umask = os.umask(077) + umask = os.umask(0o77) os.umask(umask) - os.chmod(self.tmp_path, 0666 & ~umask) + os.chmod(self.tmp_path, 0o666 & ~umask) if sys.platform == 'win32' and os.path.exists(filename): # NOTE: on windows (but not cygwin) rename will not replace an # existing file, so it must be preceded with a remove. Sadly there @@ -471,7 +471,7 @@ def CopyTool(flavor, out_path, generator_flags={}): ''.join([source[0], header] + source[1:])) # Make file executable. - os.chmod(tool_path, 0755) + os.chmod(tool_path, 0o755) # From Alex Martelli, diff --git a/pylib/gyp/common_test.py b/pylib/gyp/common_test.py index ad6f9a14..0b8ada3d 100755 --- a/pylib/gyp/common_test.py +++ b/pylib/gyp/common_test.py @@ -63,6 +63,7 @@ def test_platform_default(self): self.assertFlavor('solaris', 'sunos' , {}); self.assertFlavor('linux' , 'linux2' , {}); self.assertFlavor('linux' , 'linux3' , {}); + self.assertFlavor('linux' , 'linux' , {}); def test_param(self): self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'}) diff --git a/pylib/gyp/easy_xml.py b/pylib/gyp/easy_xml.py index 2522efb2..15c66511 100644 --- a/pylib/gyp/easy_xml.py +++ b/pylib/gyp/easy_xml.py @@ -6,6 +6,11 @@ import os import locale +try: + # reduce moved to functools in python3. + reduce +except NameError: + from functools import reduce def XmlToString(content, encoding='utf-8', pretty=False): """ Writes the XML content to disk, touching the file only if it has changed. @@ -80,7 +85,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0): # Optionally in second position is a dictionary of the attributes. rest = specification[1:] if rest and isinstance(rest[0], dict): - for at, val in sorted(rest[0].iteritems()): + for at, val in sorted(rest[0].items()): xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) rest = rest[1:] if rest: diff --git a/pylib/gyp/easy_xml_test.py b/pylib/gyp/easy_xml_test.py index df643549..a1fdb188 100755 --- a/pylib/gyp/easy_xml_test.py +++ b/pylib/gyp/easy_xml_test.py @@ -8,13 +8,16 @@ import gyp.easy_xml as easy_xml import unittest -import StringIO +try: + from StringIO import StringIO +except ImportError: + from io import StringIO class TestSequenceFunctions(unittest.TestCase): def setUp(self): - self.stderr = StringIO.StringIO() + self.stderr = StringIO() def test_EasyXml_simple(self): self.assertEqual( diff --git a/pylib/gyp/flock_tool.py b/pylib/gyp/flock_tool.py index b38d8660..81fb79d1 100755 --- a/pylib/gyp/flock_tool.py +++ b/pylib/gyp/flock_tool.py @@ -39,7 +39,7 @@ def ExecFlock(self, lockfile, *cmd_list): # where fcntl.flock(fd, LOCK_EX) always fails # with EBADF, that's why we use this F_SETLK # hack instead. - fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666) + fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666) if sys.platform.startswith('aix'): # Python on AIX is compiled with LARGEFILE support, which changes the # struct size. diff --git a/pylib/gyp/generator/analyzer.py b/pylib/gyp/generator/analyzer.py index 921c1a6b..b3484dcb 100644 --- a/pylib/gyp/generator/analyzer.py +++ b/pylib/gyp/generator/analyzer.py @@ -62,6 +62,8 @@ then the "all" target includes "b1" and "b2". """ +from __future__ import print_function + import gyp.common import gyp.ninja_syntax as ninja_syntax import json @@ -155,7 +157,7 @@ def _AddSources(sources, base_path, base_path_components, result): continue result.append(base_path + source) if debug: - print 'AddSource', org_source, result[len(result) - 1] + print('AddSource', org_source, result[len(result) - 1]) def _ExtractSourcesFromAction(action, base_path, base_path_components, @@ -185,7 +187,7 @@ def _ExtractSources(target, target_dict, toplevel_dir): base_path += '/' if debug: - print 'ExtractSources', target, base_path + print('ExtractSources', target, base_path) results = [] if 'sources' in target_dict: @@ -278,7 +280,7 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir): the root of the source tree.""" if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files: if debug: - print 'gyp file modified', build_file + print('gyp file modified', build_file) return True # First element of included_files is the file itself. @@ -291,8 +293,8 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir): _ToGypPath(gyp.common.UnrelativePath(include_file, build_file)) if _ToLocalPath(toplevel_dir, rel_include_file) in files: if debug: - print 'included gyp file modified, gyp_file=', build_file, \ - 'included file=', rel_include_file + print('included gyp file modified, gyp_file=', build_file, \ + 'included file=', rel_include_file) return True return False @@ -373,7 +375,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, # If a build file (or any of its included files) is modified we assume all # targets in the file are modified. if build_file_in_files[build_file]: - print 'matching target from modified build file', target_name + print('matching target from modified build file', target_name) target.match_status = MATCH_STATUS_MATCHES matching_targets.append(target) else: @@ -381,7 +383,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, toplevel_dir) for source in sources: if _ToGypPath(os.path.normpath(source)) in files: - print 'target', target_name, 'matches', source + print('target', target_name, 'matches', source) target.match_status = MATCH_STATUS_MATCHES matching_targets.append(target) break @@ -433,7 +435,7 @@ def _DoesTargetDependOnMatchingTargets(target): for dep in target.deps: if _DoesTargetDependOnMatchingTargets(dep): target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY - print '\t', target.name, 'matches by dep', dep.name + print('\t', target.name, 'matches by dep', dep.name) return True target.match_status = MATCH_STATUS_DOESNT_MATCH return False @@ -445,7 +447,7 @@ def _GetTargetsDependingOnMatchingTargets(possible_targets): supplied as input to analyzer. possible_targets: targets to search from.""" found = [] - print 'Targets that matched by dependency:' + print('Targets that matched by dependency:') for target in possible_targets: if _DoesTargetDependOnMatchingTargets(target): found.append(target) @@ -484,12 +486,13 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result): (add_if_no_ancestor or target.requires_build)) or (target.is_static_library and add_if_no_ancestor and not target.is_or_has_linked_ancestor)): - print '\t\tadding to compile targets', target.name, 'executable', \ - target.is_executable, 'added_to_compile_targets', \ - target.added_to_compile_targets, 'add_if_no_ancestor', \ - add_if_no_ancestor, 'requires_build', target.requires_build, \ - 'is_static_library', target.is_static_library, \ + print('\t\tadding to compile targets', target.name, 'executable', + target.is_executable, 'added_to_compile_targets', + target.added_to_compile_targets, 'add_if_no_ancestor', + add_if_no_ancestor, 'requires_build', target.requires_build, + 'is_static_library', target.is_static_library, 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor + ) result.add(target) target.added_to_compile_targets = True @@ -500,7 +503,7 @@ def _GetCompileTargets(matching_targets, supplied_targets): supplied_targets: set of targets supplied to analyzer to search from.""" result = set() for target in matching_targets: - print 'finding compile targets for match', target.name + print('finding compile targets for match', target.name) _AddCompileTargets(target, supplied_targets, True, result) return result @@ -508,46 +511,46 @@ def _GetCompileTargets(matching_targets, supplied_targets): def _WriteOutput(params, **values): """Writes the output, either to stdout or a file is specified.""" if 'error' in values: - print 'Error:', values['error'] + print('Error:', values['error']) if 'status' in values: - print values['status'] + print(values['status']) if 'targets' in values: values['targets'].sort() - print 'Supplied targets that depend on changed files:' + print('Supplied targets that depend on changed files:') for target in values['targets']: - print '\t', target + print('\t', target) if 'invalid_targets' in values: values['invalid_targets'].sort() - print 'The following targets were not found:' + print('The following targets were not found:') for target in values['invalid_targets']: - print '\t', target + print('\t', target) if 'build_targets' in values: values['build_targets'].sort() - print 'Targets that require a build:' + print('Targets that require a build:') for target in values['build_targets']: - print '\t', target + print('\t', target) if 'compile_targets' in values: values['compile_targets'].sort() - print 'Targets that need to be built:' + print('Targets that need to be built:') for target in values['compile_targets']: - print '\t', target + print('\t', target) if 'test_targets' in values: values['test_targets'].sort() - print 'Test targets:' + print('Test targets:') for target in values['test_targets']: - print '\t', target + print('\t', target) output_path = params.get('generator_flags', {}).get( 'analyzer_output_path', None) if not output_path: - print json.dumps(values) + print(json.dumps(values)) return try: f = open(output_path, 'w') f.write(json.dumps(values) + '\n') f.close() except IOError as e: - print 'Error writing to output file', output_path, str(e) + print('Error writing to output file', output_path, str(e)) def _WasGypIncludeFileModified(params, files): @@ -556,7 +559,7 @@ def _WasGypIncludeFileModified(params, files): if params['options'].includes: for include in params['options'].includes: if _ToGypPath(os.path.normpath(include)) in files: - print 'Include file modified, assuming all changed', include + print('Include file modified, assuming all changed', include) return True return False @@ -638,13 +641,13 @@ def find_matching_test_target_names(self): set(self._root_targets))] else: test_targets = [x for x in test_targets_no_all] - print 'supplied test_targets' + print('supplied test_targets') for target_name in self._test_target_names: - print '\t', target_name - print 'found test_targets' + print('\t', target_name) + print('found test_targets') for target in test_targets: - print '\t', target.name - print 'searching for matching test targets' + print('\t', target.name) + print('searching for matching test targets') matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) matching_test_targets_contains_all = (test_target_names_contains_all and set(matching_test_targets) & @@ -654,14 +657,14 @@ def find_matching_test_target_names(self): # 'all' is subsequentely added to the matching names below. matching_test_targets = [x for x in (set(matching_test_targets) & set(test_targets_no_all))] - print 'matched test_targets' + print('matched test_targets') for target in matching_test_targets: - print '\t', target.name + print('\t', target.name) matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1] for target in matching_test_targets] if matching_test_targets_contains_all: matching_target_names.append('all') - print '\tall' + print('\tall') return matching_target_names def find_matching_compile_target_names(self): @@ -669,7 +672,7 @@ def find_matching_compile_target_names(self): assert self.is_build_impacted(); # Compile targets are found by searching up from changed targets. # Reset the visited status for _GetBuildTargets. - for target in self._name_to_target.itervalues(): + for target in self._name_to_target.values(): target.visited = False supplied_targets = _LookupTargets(self._supplied_target_names_no_all(), @@ -677,10 +680,10 @@ def find_matching_compile_target_names(self): if 'all' in self._supplied_target_names(): supplied_targets = [x for x in (set(supplied_targets) | set(self._root_targets))] - print 'Supplied test_targets & compile_targets' + print('Supplied test_targets & compile_targets') for target in supplied_targets: - print '\t', target.name - print 'Finding compile targets' + print('\t', target.name) + print('Finding compile targets') compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets) return [gyp.common.ParseQualifiedTarget(target.name)[1] @@ -699,7 +702,7 @@ def GenerateOutput(target_list, target_dicts, data, params): toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir)) if debug: - print 'toplevel_dir', toplevel_dir + print('toplevel_dir', toplevel_dir) if _WasGypIncludeFileModified(params, config.files): result_dict = { 'status': all_changed_string, diff --git a/pylib/gyp/generator/cmake.py b/pylib/gyp/generator/cmake.py index a2b96291..4a2041cf 100644 --- a/pylib/gyp/generator/cmake.py +++ b/pylib/gyp/generator/cmake.py @@ -28,6 +28,8 @@ CMakeLists.txt file. """ +from __future__ import print_function + import multiprocessing import os import signal @@ -36,6 +38,12 @@ import gyp.common import gyp.xcode_emulation +try: + # maketrans moved to str in python3. + _maketrans = string.maketrans +except NameError: + _maketrans = str.maketrans + generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', @@ -238,7 +246,7 @@ def StringToCMakeTargetName(a): Invalid for make: ':' Invalid for unknown reasons but cause failures: '.' """ - return a.translate(string.maketrans(' /():."', '_______')) + return a.translate(_maketrans(' /():."', '_______')) def WriteActions(target_name, actions, extra_sources, extra_deps, @@ -644,8 +652,8 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type) if cmake_target_type is None: - print ('Target %s has unknown target type %s, skipping.' % - ( target_name, target_type ) ) + print('Target %s has unknown target type %s, skipping.' % + ( target_name, target_type )) return SetVariable(output, 'TARGET', target_name) @@ -868,8 +876,8 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX'] elif target_type != 'executable': - print ('ERROR: What output file should be generated?', - 'type', target_type, 'target', target_name) + print(('ERROR: What output file should be generated?', + 'type', target_type, 'target', target_name)) product_prefix = spec.get('product_prefix', default_product_prefix) product_name = spec.get('product_name', default_product_name) @@ -1207,11 +1215,11 @@ def PerformBuild(data, configurations, params): output_dir, config_name)) arguments = ['cmake', '-G', 'Ninja'] - print 'Generating [%s]: %s' % (config_name, arguments) + print('Generating [%s]: %s' % (config_name, arguments)) subprocess.check_call(arguments, cwd=build_dir) arguments = ['ninja', '-C', build_dir] - print 'Building [%s]: %s' % (config_name, arguments) + print('Building [%s]: %s' % (config_name, arguments)) subprocess.check_call(arguments) @@ -1230,7 +1238,7 @@ def GenerateOutput(target_list, target_dicts, data, params): GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: - config_names = target_dicts[target_list[0]]['configurations'].keys() + config_names = target_dicts[target_list[0]]['configurations'] if params['parallel']: try: pool = multiprocessing.Pool(len(config_names)) @@ -1239,7 +1247,7 @@ def GenerateOutput(target_list, target_dicts, data, params): arglists.append((target_list, target_dicts, data, params, config_name)) pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: pool.terminate() raise e else: diff --git a/pylib/gyp/generator/dump_dependency_json.py b/pylib/gyp/generator/dump_dependency_json.py index 160eafe2..2bf3f397 100644 --- a/pylib/gyp/generator/dump_dependency_json.py +++ b/pylib/gyp/generator/dump_dependency_json.py @@ -2,6 +2,8 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import collections import os import gyp @@ -96,4 +98,4 @@ def GenerateOutput(target_list, target_dicts, data, params): f = open(filename, 'w') json.dump(edges, f) f.close() - print 'Wrote json to %s.' % filename + print('Wrote json to %s.' % filename) diff --git a/pylib/gyp/generator/eclipse.py b/pylib/gyp/generator/eclipse.py index 3544347b..d039f03a 100644 --- a/pylib/gyp/generator/eclipse.py +++ b/pylib/gyp/generator/eclipse.py @@ -141,7 +141,7 @@ def GetAllIncludeDirectories(target_list, target_dicts, compiler_includes_list.append(include_dir) # Find standard gyp include dirs. - if config.has_key('include_dirs'): + if 'include_dirs' in config: include_dirs = config['include_dirs'] for shared_intermediate_dir in shared_intermediate_dirs: for include_dir in include_dirs: @@ -272,7 +272,7 @@ def WriteMacros(out, eclipse_langs, defines): out.write(' \n') for lang in eclipse_langs: out.write(' \n' % lang) - for key in sorted(defines.iterkeys()): + for key in sorted(defines.keys()): out.write(' %s%s\n' % (escape(key), escape(defines[key]))) out.write(' \n') @@ -418,7 +418,7 @@ def GenerateOutput(target_list, target_dicts, data, params): GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: - config_names = target_dicts[target_list[0]]['configurations'].keys() + config_names = target_dicts[target_list[0]]['configurations'] for config_name in config_names: GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) diff --git a/pylib/gyp/generator/gypd.py b/pylib/gyp/generator/gypd.py index 3efdb996..78eeaa61 100644 --- a/pylib/gyp/generator/gypd.py +++ b/pylib/gyp/generator/gypd.py @@ -88,7 +88,7 @@ def GenerateOutput(target_list, target_dicts, data, params): if not output_file in output_files: output_files[output_file] = input_file - for output_file, input_file in output_files.iteritems(): + for output_file, input_file in output_files.items(): output = open(output_file, 'w') pprint.pprint(data[input_file], output) output.close() diff --git a/pylib/gyp/generator/make.py b/pylib/gyp/generator/make.py index fb4f9185..2057e3a9 100644 --- a/pylib/gyp/generator/make.py +++ b/pylib/gyp/generator/make.py @@ -21,6 +21,8 @@ # toplevel Makefile. It may make sense to generate some .mk files on # the side to keep the the files readable. +from __future__ import print_function + import os import re import sys @@ -668,7 +670,7 @@ def _ValidateSourcesForOSX(spec, all_sources): basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) @@ -816,7 +818,7 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs, gyp.xcode_emulation.MacPrefixHeader( self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)), self.Pchify)) - sources = filter(Compilable, all_sources) + sources = [x for x in all_sources if Compilable(x)] if sources: self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) extensions = set([os.path.splitext(s)[1] for s in sources]) @@ -945,7 +947,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs, '%s%s' % (name, cd_action, command)) self.WriteLn() - outputs = map(self.Absolutify, outputs) + outputs = [self.Absolutify(o) for o in outputs] # The makefile rules are all relative to the top dir, but the gyp actions # are defined relative to their containing dir. This replaces the obj # variable for the action rule with an absolute version so that the output @@ -1035,7 +1037,7 @@ def WriteRules(self, rules, extra_sources, extra_outputs, outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - outputs = map(self.Absolutify, outputs) + outputs = [self.Absolutify(o) for o in outputs] all_outputs += outputs # Only write the 'obj' and 'builddir' rules for the "primary" output # (:1); it's superfluous for the "extra outputs", and this avoids @@ -1233,11 +1235,11 @@ def WriteSources(self, configs, deps, sources, self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname) includes = config.get('include_dirs') if includes: - includes = map(Sourceify, map(self.Absolutify, includes)) + includes = [Sourceify(self.Absolutify(include)) for include in includes] self.WriteList(includes, 'INCS_%s' % configname, prefix='-I') compilable = filter(Compilable, sources) - objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable))) + objs = [self.Objectify(self.Absolutify(Target(x))) for x in compilable] self.WriteList(objs, 'OBJS') for obj in objs: @@ -1309,7 +1311,7 @@ def WriteSources(self, configs, deps, sources, # If there are any object files in our input file list, link them into our # output. - extra_link_deps += filter(Linkable, sources) + extra_link_deps += [source for source in sources if Linkable(source)] self.WriteLn() @@ -1377,8 +1379,8 @@ def ComputeOutputBasename(self, spec): elif self.type == 'none': target = '%s.stamp' % target elif self.type != 'executable': - print ("ERROR: What output file should be generated?", - "type", self.type, "target", target) + print(("ERROR: What output file should be generated?", + "type", self.type, "target", target)) target_prefix = spec.get('product_prefix', target_prefix) target = spec.get('product_name', target) @@ -1542,9 +1544,9 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, # Postbuilds expect to be run in the gyp file's directory, so insert an # implicit postbuild to cd to there. postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path])) - for i in xrange(len(postbuilds)): - if not postbuilds[i].startswith('$'): - postbuilds[i] = EscapeShellArgument(postbuilds[i]) + for i, postbuild in enumerate(postbuilds): + if not postbuild.startswith('$'): + postbuilds[i] = EscapeShellArgument(postbuild) self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output)) self.WriteLn('%s: POSTBUILDS := %s' % ( QuoteSpaces(self.output), ' '.join(postbuilds))) @@ -1634,7 +1636,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all, postbuilds=postbuilds) else: - print "WARNING: no output for", self.type, target + print("WARNING: no output for", self.type, target) # Add an alias for each target (if there are any outputs). # Installable target aliases are created below. @@ -1741,7 +1743,7 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, output is just a name to run the rule command: (optional) command name to generate unambiguous labels """ - outputs = map(QuoteSpaces, outputs) + outputs = [QuoteSpaces(o) for o in outputs] inputs = map(QuoteSpaces, inputs) if comment: @@ -1986,7 +1988,7 @@ def PerformBuild(data, configurations, params): if options.toplevel_dir and options.toplevel_dir != '.': arguments += '-C', options.toplevel_dir arguments.append('BUILDTYPE=' + config) - print 'Building [%s]: %s' % (config, arguments) + print('Building [%s]: %s' % (config, arguments)) subprocess.check_call(arguments) diff --git a/pylib/gyp/generator/msvs.py b/pylib/gyp/generator/msvs.py index 8fe9e5af..e8a2b366 100644 --- a/pylib/gyp/generator/msvs.py +++ b/pylib/gyp/generator/msvs.py @@ -2,6 +2,9 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + +import collections import copy import ntpath import os @@ -23,16 +26,6 @@ from gyp.common import GypError from gyp.common import OrderedSet -# TODO: Remove once bots are on 2.7, http://crbug.com/241769 -def _import_OrderedDict(): - import collections - try: - return collections.OrderedDict - except AttributeError: - import gyp.ordered_dict - return gyp.ordered_dict.OrderedDict -OrderedDict = _import_OrderedDict() - # Regular expression for validating Visual Studio GUIDs. If the GUID # contains lowercase hex letters, MSVS will be fine. However, @@ -202,7 +195,7 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, if not prefix: prefix = [] result = [] excluded_result = [] - folders = OrderedDict() + folders = collections.OrderedDict() # Gather files into the final result, excluded, or folders. for s in sources: if len(s) == 1: @@ -469,7 +462,7 @@ def _AddCustomBuildToolForMSVS(p, spec, primary_input, 'CommandLine': cmd, }) # Add to the properties of primary input for each config. - for config_name, c_data in spec['configurations'].iteritems(): + for config_name, c_data in spec['configurations'].items(): p.AddFileConfig(_FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool]) @@ -775,8 +768,8 @@ def _Replace(match): # the VCProj but cause the same problem on the final command-line. Moving # the item to the end of the list does works, but that's only possible if # there's only one such item. Let's just warn the user. - print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' + - 'quotes in ' + s) + print(('Warning: MSVS may misinterpret the odd number of ' + + 'quotes in ' + s), file=sys.stderr) return s @@ -991,7 +984,7 @@ def _ValidateSourcesForMSVSProject(spec, version): basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) @@ -1023,7 +1016,7 @@ def _GenerateMSVSProject(project, options, version, generator_flags): relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) config_type = _GetMSVSConfigurationType(spec, project.build_file) - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) # MSVC08 and prior version cannot handle duplicate basenames in the same @@ -1392,10 +1385,10 @@ def _ConvertToolsToExpectedForm(tools): A list of Tool objects. """ tool_list = [] - for tool, settings in tools.iteritems(): + for tool, settings in tools.items(): # Collapse settings with lists. settings_fixed = {} - for setting, value in settings.iteritems(): + for setting, value in settings.items(): if type(value) == list: if ((tool == 'VCLinkerTool' and setting == 'AdditionalDependencies') or @@ -1570,7 +1563,7 @@ def _IdlFilesHandledNonNatively(spec, sources): def _GetPrecompileRelatedFiles(spec): # Gather a list of precompiled header related sources. precompiled_related = [] - for _, config in spec['configurations'].iteritems(): + for _, config in spec['configurations'].items(): for k in precomp_keys: f = config.get(k) if f: @@ -1581,7 +1574,7 @@ def _GetPrecompileRelatedFiles(spec): def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded): exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - for file_name, excluded_configs in exclusions.iteritems(): + for file_name, excluded_configs in exclusions.items(): if (not list_excluded and len(excluded_configs) == len(spec['configurations'])): # If we're not listing excluded files, then they won't appear in the @@ -1598,7 +1591,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): # Exclude excluded sources from being built. for f in excluded_sources: excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): precomped = [_FixPath(config.get(i, '')) for i in precomp_keys] # Don't do this for ones that are precompiled header related. if f not in precomped: @@ -1608,7 +1601,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): # Exclude them now. for f in excluded_idl: excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): excluded_configs.append((config_name, config)) exclusions[f] = excluded_configs return exclusions @@ -1617,7 +1610,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): def _AddToolFilesToMSVS(p, spec): # Add in tool files (rules). tool_files = OrderedSet() - for _, config in spec['configurations'].iteritems(): + for _, config in spec['configurations'].items(): for f in config.get('msvs_tool_files', []): tool_files.add(f) for f in tool_files: @@ -1630,7 +1623,7 @@ def _HandlePreCompiledHeaders(p, sources, spec): # kind (i.e. C vs. C++) as the precompiled header source stub needs # to have use of precompiled headers disabled. extensions_excluded_from_precompile = [] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): source = config.get('msvs_precompiled_source') if source: source = _FixPath(source) @@ -1651,7 +1644,7 @@ def DisableForSourceTree(source_tree): else: basename, extension = os.path.splitext(source) if extension in extensions_excluded_from_precompile: - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): tool = MSVSProject.Tool('VCCLCompilerTool', {'UsePrecompiledHeader': '0', 'ForcedIncludeFiles': '$(NOINHERIT)'}) @@ -1702,7 +1695,7 @@ def _WriteMSVSUserFile(project_path, version, spec): return # Nothing to add # Write out the user file. user_file = _CreateMSVSUserFile(project_path, version, spec) - for config_name, c_data in spec['configurations'].iteritems(): + for config_name, c_data in spec['configurations'].items(): user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), action, environment, working_directory) user_file.WriteIfChanged() @@ -1756,7 +1749,7 @@ def _GetPathDict(root, path): def _DictsToFolders(base_path, bucket, flat): # Convert to folders recursively. children = [] - for folder, contents in bucket.iteritems(): + for folder, contents in bucket.items(): if type(contents) == dict: folder_children = _DictsToFolders(os.path.join(base_path, folder), contents, flat) @@ -1778,8 +1771,8 @@ def _CollapseSingles(parent, node): # such projects up one level. if (type(node) == dict and len(node) == 1 and - node.keys()[0] == parent + '.vcproj'): - return node[node.keys()[0]] + next(iter(node)) == parent + '.vcproj'): + return node[next(iter(node))] if type(node) != dict: return node for child in node: @@ -1798,8 +1791,8 @@ def _GatherSolutionFolders(sln_projects, project_objects, flat): # Walk down from the top until we hit a folder that has more than one entry. # In practice, this strips the top-level "src/" dir from the hierarchy in # the solution. - while len(root) == 1 and type(root[root.keys()[0]]) == dict: - root = root[root.keys()[0]] + while len(root) == 1 and type(root[next(iter(root))]) == dict: + root = root[next(iter(root))] # Collapse singles. root = _CollapseSingles('', root) # Merge buckets until everything is a root entry. @@ -1828,7 +1821,7 @@ def _GetPlatformOverridesOfProject(spec): # Prepare a dict indicating which project configurations are used for which # solution configurations for this target. config_platform_overrides = {} - for config_name, c in spec['configurations'].iteritems(): + for config_name, c in spec['configurations'].items(): config_fullname = _ConfigFullName(config_name, c) platform = c.get('msvs_target_platform', _ConfigPlatform(c)) fixed_config_fullname = '%s|%s' % ( @@ -1967,7 +1960,7 @@ def PerformBuild(data, configurations, params): msvs_version = params['msvs_version'] devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com') - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue @@ -1977,7 +1970,7 @@ def PerformBuild(data, configurations, params): for config in configurations: arguments = [devenv, sln_path, '/Build', config] - print 'Building [%s]: %s' % (config, arguments) + print('Building [%s]: %s' % (config, arguments)) rtn = subprocess.check_call(arguments) @@ -2029,7 +2022,7 @@ def GenerateOutput(target_list, target_dicts, data, params): configs = set() for qualified_target in target_list: spec = target_dicts[qualified_target] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): configs.add(_ConfigFullName(config_name, config)) configs = list(configs) @@ -2072,7 +2065,7 @@ def GenerateOutput(target_list, target_dicts, data, params): if generator_flags.get('msvs_error_on_missing_sources', False): raise GypError(error_message) else: - print >> sys.stdout, "Warning: " + error_message + print("Warning: " + error_message, file=sys.stdout) def _GenerateMSBuildFiltersFile(filters_path, source_files, @@ -2669,7 +2662,7 @@ def _GetConfigurationCondition(name, settings): def _GetMSBuildProjectConfigurations(configurations): group = ['ItemGroup', {'Label': 'ProjectConfigurations'}] - for (name, settings) in sorted(configurations.iteritems()): + for (name, settings) in sorted(configurations.items()): configuration, platform = _GetConfigurationAndPlatform(name, settings) designation = '%s|%s' % (configuration, platform) group.append( @@ -2742,7 +2735,7 @@ def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name): def _GetMSBuildConfigurationDetails(spec, build_file): properties = {} - for name, settings in spec['configurations'].iteritems(): + for name, settings in spec['configurations'].items(): msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) condition = _GetConfigurationCondition(name, settings) character_set = msbuild_attributes.get('CharacterSet') @@ -2776,9 +2769,9 @@ def _GetMSBuildPropertySheets(configurations): user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props' additional_props = {} props_specified = False - for name, settings in sorted(configurations.iteritems()): + for name, settings in sorted(configurations.items()): configuration = _GetConfigurationCondition(name, settings) - if settings.has_key('msbuild_props'): + if 'msbuild_props' in settings: additional_props[configuration] = _FixPaths(settings['msbuild_props']) props_specified = True else: @@ -2798,7 +2791,7 @@ def _GetMSBuildPropertySheets(configurations): ] else: sheets = [] - for condition, props in additional_props.iteritems(): + for condition, props in additional_props.items(): import_group = [ 'ImportGroup', {'Label': 'PropertySheets', @@ -2831,7 +2824,7 @@ def _ConvertMSVSBuildAttributes(spec, config, build_file): elif a == 'ConfigurationType': msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a]) else: - print 'Warning: Do not know how to convert MSVS attribute ' + a + print('Warning: Do not know how to convert MSVS attribute ' + a) return msbuild_attributes @@ -2927,7 +2920,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): new_paths = '$(ExecutablePath);' + ';'.join(new_paths) properties = {} - for (name, configuration) in sorted(configurations.iteritems()): + for (name, configuration) in sorted(configurations.items()): condition = _GetConfigurationCondition(name, configuration) attributes = _GetMSBuildAttributes(spec, configuration, build_file) msbuild_settings = configuration['finalized_msbuild_settings'] @@ -2952,7 +2945,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): _AddConditionalProperty(properties, condition, 'ExecutablePath', new_paths) tool_settings = msbuild_settings.get('', {}) - for name, value in sorted(tool_settings.iteritems()): + for name, value in sorted(tool_settings.items()): formatted_value = _GetValueFormattedForMSBuild('', name, value) _AddConditionalProperty(properties, condition, name, formatted_value) return _GetMSBuildPropertyGroup(spec, None, properties) @@ -3021,7 +3014,7 @@ def GetEdges(node): # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) for name in reversed(properties_ordered): values = properties[name] - for value, conditions in sorted(values.iteritems()): + for value, conditions in sorted(values.items()): if len(conditions) == num_configurations: # If the value is the same all configurations, # just add one unconditional entry. @@ -3034,18 +3027,18 @@ def GetEdges(node): def _GetMSBuildToolSettingsSections(spec, configurations): groups = [] - for (name, configuration) in sorted(configurations.iteritems()): + for (name, configuration) in sorted(configurations.items()): msbuild_settings = configuration['finalized_msbuild_settings'] group = ['ItemDefinitionGroup', {'Condition': _GetConfigurationCondition(name, configuration)} ] - for tool_name, tool_settings in sorted(msbuild_settings.iteritems()): + for tool_name, tool_settings in sorted(msbuild_settings.items()): # Skip the tool named '' which is a holder of global settings handled # by _GetMSBuildConfigurationGlobalProperties. if tool_name: if tool_settings: tool = [tool_name] - for name, value in sorted(tool_settings.iteritems()): + for name, value in sorted(tool_settings.items()): formatted_value = _GetValueFormattedForMSBuild(tool_name, name, value) tool.append([name, formatted_value]) @@ -3078,8 +3071,8 @@ def _FinalizeMSBuildSettings(spec, configuration): for ignored_setting in ignored_settings: value = configuration.get(ignored_setting) if value: - print ('Warning: The automatic conversion to MSBuild does not handle ' - '%s. Ignoring setting of %s' % (ignored_setting, str(value))) + print('Warning: The automatic conversion to MSBuild does not handle ' + '%s. Ignoring setting of %s' % (ignored_setting, str(value))) defines = [_EscapeCppDefineForMSBuild(d) for d in defines] disabled_warnings = _GetDisabledWarnings(configuration) @@ -3245,7 +3238,7 @@ def _AddSources2(spec, sources, exclusions, grouped_sources, {'Condition': condition}, 'true']) # Add precompile if needed - for config_name, configuration in spec['configurations'].iteritems(): + for config_name, configuration in spec['configurations'].items(): precompiled_source = configuration.get('msvs_precompiled_source', '') if precompiled_source != '': precompiled_source = _FixPath(precompiled_source) @@ -3291,7 +3284,7 @@ def _GetMSBuildProjectReferences(project): ['Project', guid], ['ReferenceOutputAssembly', 'false'] ] - for config in dependency.spec.get('configurations', {}).itervalues(): + for config in dependency.spec.get('configurations', {}).values(): if config.get('msvs_use_library_dependency_inputs', 0): project_ref.append(['UseLibraryDependencyInputs', 'true']) break @@ -3360,7 +3353,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): extension_to_rule_name) missing_sources = _VerifySourcesExist(sources, project_dir) - for configuration in configurations.itervalues(): + for configuration in configurations.values(): _FinalizeMSBuildSettings(spec, configuration) # Add attributes to root element @@ -3486,7 +3479,7 @@ def _GenerateActionsForMSBuild(spec, actions_to_add): """ sources_handled_by_action = OrderedSet() actions_spec = [] - for primary_input, actions in actions_to_add.iteritems(): + for primary_input, actions in actions_to_add.items(): inputs = OrderedSet() outputs = OrderedSet() descriptions = [] diff --git a/pylib/gyp/generator/msvs_test.py b/pylib/gyp/generator/msvs_test.py index c0b021df..838d236a 100755 --- a/pylib/gyp/generator/msvs_test.py +++ b/pylib/gyp/generator/msvs_test.py @@ -7,13 +7,16 @@ import gyp.generator.msvs as msvs import unittest -import StringIO +try: + from StringIO import StringIO +except ImportError: + from io import StringIO class TestSequenceFunctions(unittest.TestCase): def setUp(self): - self.stderr = StringIO.StringIO() + self.stderr = StringIO() def test_GetLibraries(self): self.assertEqual( diff --git a/pylib/gyp/generator/ninja.py b/pylib/gyp/generator/ninja.py index 6de87b70..66faabca 100644 --- a/pylib/gyp/generator/ninja.py +++ b/pylib/gyp/generator/ninja.py @@ -2,6 +2,8 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import collections import copy import hashlib @@ -18,7 +20,10 @@ import gyp.msvs_emulation import gyp.MSVSUtil as MSVSUtil import gyp.xcode_emulation -from cStringIO import StringIO +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO from gyp.common import GetEnvironFallback import gyp.ninja_syntax as ninja_syntax @@ -350,7 +355,7 @@ def WriteCollapsedDependencies(self, name, targets, order_only=None): Uses a stamp file if necessary.""" - assert targets == filter(None, targets), targets + assert targets == [t for t in targets if t], targets if len(targets) == 0: assert not order_only return None @@ -427,8 +432,8 @@ def WriteSpec(self, spec, config_name, generator_flags): compile_depends.append(target.PreCompileInput()) if target.uses_cpp: self.target.uses_cpp = True - actions_depends = filter(None, actions_depends) - compile_depends = filter(None, compile_depends) + actions_depends = [d for d in actions_depends if d] + compile_depends = [d for d in compile_depends if d] actions_depends = self.WriteCollapsedDependencies('actions_depends', actions_depends) compile_depends = self.WriteCollapsedDependencies('compile_depends', @@ -455,8 +460,8 @@ def WriteSpec(self, spec, config_name, generator_flags): try: sources = extra_sources + spec.get('sources', []) except TypeError: - print 'extra_sources: ', str(extra_sources) - print 'spec.get("sources"): ', str(spec.get('sources')) + print('extra_sources: ', str(extra_sources)) + print('spec.get("sources"): ', str(spec.get('sources'))) raise if sources: if self.flavor == 'mac' and len(self.archs) > 1: @@ -485,8 +490,9 @@ def WriteSpec(self, spec, config_name, generator_flags): if self.flavor != 'mac' or len(self.archs) == 1: link_deps += [self.GypPathToNinja(o) for o in obj_outputs] else: - print "Warning: Actions/rules writing object files don't work with " \ - "multiarch targets, dropping. (target %s)" % spec['target_name'] + print("Warning: Actions/rules writing object files don't work with " \ + "multiarch targets, dropping. (target %s)" % + spec['target_name']) elif self.flavor == 'mac' and len(self.archs) > 1: link_deps = collections.defaultdict(list) @@ -838,7 +844,7 @@ def WriteMacXCassets(self, xcassets, bundle_depends): 'XCASSETS_LAUNCH_IMAGE': 'launch-image', } settings = self.xcode_settings.xcode_settings[self.config_name] - for settings_key, arg_name in settings_to_arg.iteritems(): + for settings_key, arg_name in settings_to_arg.items(): value = settings.get(settings_key) if value: extra_arguments[arg_name] = value @@ -1772,7 +1778,7 @@ class MEMORYSTATUSEX(ctypes.Structure): # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM # on a 64 GB machine. - mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB + mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GB hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32))) return min(mem_limit, hard_cap) elif sys.platform.startswith('linux'): @@ -1784,14 +1790,14 @@ class MEMORYSTATUSEX(ctypes.Structure): if not match: continue # Allow 8Gb per link on Linux because Gold is quite memory hungry - return max(1, int(match.group(1)) / (8 * (2 ** 20))) + return max(1, int(match.group(1)) // (8 * (2 ** 20))) return 1 elif sys.platform == 'darwin': try: avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize'])) # A static library debug build of Chromium's unit_tests takes ~2.7GB, so # 4GB per ld process allows for some more bloat. - return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB + return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB except: return 1 else: @@ -1946,7 +1952,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) # Support wrappers from environment variables too. - for key, value in os.environ.iteritems(): + for key, value in os.environ.items(): if key.lower().endswith('_wrapper'): key_prefix = key[:-len('_wrapper')] key_prefix = re.sub(r'\.HOST$', '.host', key_prefix) @@ -1966,7 +1972,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, configs, generator_flags) cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( toplevel_build, generator_flags, shared_system_includes, OpenOutput) - for arch, path in sorted(cl_paths.iteritems()): + for arch, path in sorted(cl_paths.items()): if clang_cl: # If we have selected clang-cl, use that instead. path = clang_cl @@ -2381,6 +2387,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name, toolset) + qualified_target_for_hash = qualified_target_for_hash.encode('utf-8') hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest() base_path = os.path.dirname(build_file) @@ -2447,7 +2454,7 @@ def PerformBuild(data, configurations, params): for config in configurations: builddir = os.path.join(options.toplevel_dir, 'out', config) arguments = ['ninja', '-C', builddir] - print 'Building [%s]: %s' % (config, arguments) + print('Building [%s]: %s' % (config, arguments)) subprocess.check_call(arguments) @@ -2475,7 +2482,7 @@ def GenerateOutput(target_list, target_dicts, data, params): GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: - config_names = target_dicts[target_list[0]]['configurations'].keys() + config_names = target_dicts[target_list[0]]['configurations'] if params['parallel']: try: pool = multiprocessing.Pool(len(config_names)) @@ -2484,7 +2491,7 @@ def GenerateOutput(target_list, target_dicts, data, params): arglists.append( (target_list, target_dicts, data, params, config_name)) pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: pool.terminate() raise e else: diff --git a/pylib/gyp/generator/ninja_test.py b/pylib/gyp/generator/ninja_test.py index 1767b2f4..1ad68e4f 100644 --- a/pylib/gyp/generator/ninja_test.py +++ b/pylib/gyp/generator/ninja_test.py @@ -8,7 +8,6 @@ import gyp.generator.ninja as ninja import unittest -import StringIO import sys import TestCommon diff --git a/pylib/gyp/generator/xcode.py b/pylib/gyp/generator/xcode.py index b35372a1..8bc22bed 100644 --- a/pylib/gyp/generator/xcode.py +++ b/pylib/gyp/generator/xcode.py @@ -2,6 +2,8 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import filecmp import gyp.common import gyp.xcodeproj_file @@ -129,7 +131,7 @@ def __init__(self, gyp_path, path, build_file_dict): try: os.makedirs(self.path) self.created_dir = True - except OSError, e: + except OSError as e: if e.errno != errno.EEXIST: raise @@ -183,7 +185,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): # the tree tree view for UI display. # Any values set globally are applied to all configurations, then any # per-configuration values are applied. - for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): + for xck, xcv in self.build_file_dict.get('xcode_settings', {}).items(): xccl.SetBuildSetting(xck, xcv) if 'xcode_config_file' in self.build_file_dict: config_ref = self.project.AddOrGetFileInRootGroup( @@ -197,7 +199,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): if build_file_configuration_named: xcc = xccl.ConfigurationNamed(config_name) for xck, xcv in build_file_configuration_named.get('xcode_settings', - {}).iteritems(): + {}).items(): xcc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in build_file_configuration_named: config_ref = self.project.AddOrGetFileInRootGroup( @@ -273,7 +275,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): script = script + "\n".join( ['export %s="%s"' % (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) - for (key, val) in command.get('environment').iteritems()]) + "\n" + for (key, val) in command.get('environment').items()]) + "\n" # Some test end up using sockets, files on disk, etc. and can get # confused if more then one test runs at a time. The generator @@ -444,7 +446,7 @@ def Write(self): dir=self.path) try: - output_file = os.fdopen(output_fd, 'wb') + output_file = os.fdopen(output_fd, 'w') self.project_file.Print(output_file) output_file.close() @@ -454,7 +456,7 @@ def Write(self): same = False try: same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: raise @@ -473,10 +475,10 @@ def Write(self): # # No way to get the umask without setting a new one? Set a safe one # and then set it back to the old value. - umask = os.umask(077) + umask = os.umask(0o77) os.umask(umask) - os.chmod(new_pbxproj_path, 0666 & ~umask) + os.chmod(new_pbxproj_path, 0o666 & ~umask) os.rename(new_pbxproj_path, pbxproj_path) except Exception: @@ -566,7 +568,7 @@ def EscapeXcodeDefine(s): def PerformBuild(data, configurations, params): options = params['options'] - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue @@ -577,7 +579,7 @@ def PerformBuild(data, configurations, params): for config in configurations: arguments = ['xcodebuild', '-project', xcodeproj_path] arguments += ['-configuration', config] - print "Building [%s]: %s" % (config, arguments) + print("Building [%s]: %s" % (config, arguments)) subprocess.check_call(arguments) @@ -625,7 +627,7 @@ def GenerateOutput(target_list, target_dicts, data, params): skip_excluded_files = \ not generator_flags.get('xcode_list_excluded_files', True) xcode_projects = {} - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue @@ -744,7 +746,7 @@ def GenerateOutput(target_list, target_dicts, data, params): xctarget_type = gyp.xcodeproj_file.PBXNativeTarget try: target_properties['productType'] = _types[type_bundle_key] - except KeyError, e: + except KeyError as e: gyp.common.ExceptionAppend(e, "-- unknown product type while " "writing target %s" % target_name) raise @@ -1016,22 +1018,21 @@ def GenerateOutput(target_list, target_dicts, data, params): makefile_name) # TODO(mark): try/close? Write to a temporary file and swap it only # if it's got changes? - makefile = open(makefile_path, 'wb') + makefile = open(makefile_path, 'w') # make will build the first target in the makefile by default. By # convention, it's called "all". List all (or at least one) # concrete output for each rule source as a prerequisite of the "all" # target. makefile.write('all: \\\n') - for concrete_output_index in \ - xrange(0, len(concrete_outputs_by_rule_source)): + for concrete_output_index, concrete_output_by_rule_source in \ + enumerate(concrete_outputs_by_rule_source): # Only list the first (index [0]) concrete output of each input # in the "all" target. Otherwise, a parallel make (-j > 1) would # attempt to process each input multiple times simultaneously. # Otherwise, "all" could just contain the entire list of # concrete_outputs_all. - concrete_output = \ - concrete_outputs_by_rule_source[concrete_output_index][0] + concrete_output = concrete_output_by_rule_source[0] if concrete_output_index == len(concrete_outputs_by_rule_source) - 1: eol = '' else: @@ -1047,8 +1048,8 @@ def GenerateOutput(target_list, target_dicts, data, params): # rule source. Collect the names of the directories that are # required. concrete_output_dirs = [] - for concrete_output_index in xrange(0, len(concrete_outputs)): - concrete_output = concrete_outputs[concrete_output_index] + for concrete_output_index, concrete_output in \ + enumerate(concrete_outputs): if concrete_output_index == 0: bol = '' else: @@ -1066,8 +1067,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # the set of additional rule inputs, if any. prerequisites = [rule_source] prerequisites.extend(rule.get('inputs', [])) - for prerequisite_index in xrange(0, len(prerequisites)): - prerequisite = prerequisites[prerequisite_index] + for prerequisite_index, prerequisite in enumerate(prerequisites): if prerequisite_index == len(prerequisites) - 1: eol = '' else: @@ -1279,7 +1279,7 @@ def GenerateOutput(target_list, target_dicts, data, params): set_define = EscapeXcodeDefine(define) xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) if 'xcode_settings' in configuration: - for xck, xcv in configuration['xcode_settings'].iteritems(): + for xck, xcv in configuration['xcode_settings'].items(): xcbc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in configuration: config_ref = pbxp.AddOrGetFileInRootGroup( @@ -1287,7 +1287,7 @@ def GenerateOutput(target_list, target_dicts, data, params): xcbc.SetBaseConfiguration(config_ref) build_files = [] - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): if build_file.endswith('.gyp'): build_files.append(build_file) diff --git a/pylib/gyp/input.py b/pylib/gyp/input.py index 21b4606f..8ac47cb9 100644 --- a/pylib/gyp/input.py +++ b/pylib/gyp/input.py @@ -2,8 +2,9 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import ast +from __future__ import print_function +import ast import gyp.common import gyp.simple_copy import multiprocessing @@ -231,10 +232,10 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, else: build_file_data = eval(build_file_contents, {'__builtins__': None}, None) - except SyntaxError, e: + except SyntaxError as e: e.filename = build_file_path raise - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) raise @@ -254,7 +255,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, else: LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, aux_data, None, check) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while reading includes of ' + build_file_path) raise @@ -291,7 +292,7 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, subdict_path, include) # Recurse into subdictionaries. - for k, v in subdict.iteritems(): + for k, v in subdict.items(): if type(v) is dict: LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check) @@ -456,7 +457,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, try: LoadTargetBuildFile(dependency, data, aux_data, variables, includes, depth, check, load_dependencies) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend( e, 'while loading dependencies of %s' % build_file_path) raise @@ -477,7 +478,7 @@ def CallLoadTargetBuildFile(global_flags, signal.signal(signal.SIGINT, signal.SIG_IGN) # Apply globals so that the worker process behaves the same. - for key, value in global_flags.iteritems(): + for key, value in global_flags.items(): globals()[key] = value SetGeneratorGlobals(generator_input_info) @@ -499,12 +500,12 @@ def CallLoadTargetBuildFile(global_flags, return (build_file_path, build_file_data, dependencies) - except GypError, e: + except GypError as e: sys.stderr.write("gyp: %s\n" % e) return None - except Exception, e: - print >>sys.stderr, 'Exception:', e - print >>sys.stderr, traceback.format_exc() + except Exception as e: + print('Exception:', e, file=sys.stderr) + print(traceback.format_exc(), file=sys.stderr) return None @@ -594,7 +595,7 @@ def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, args = (global_flags, dependency, variables, includes, depth, check, generator_input_info), callback = parallel_state.LoadTargetBuildFileCallback) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: parallel_state.pool.terminate() raise e @@ -894,7 +895,7 @@ def ExpandVariables(input, phase, variables, build_file): stderr=subprocess.PIPE, stdin=subprocess.PIPE, cwd=build_file_dir) - except Exception, e: + except Exception as e: raise GypError("%s while executing command '%s' in %s" % (e, contents, build_file)) @@ -1008,9 +1009,9 @@ def ExpandVariables(input, phase, variables, build_file): # Convert all strings that are canonically-represented integers into integers. if type(output) is list: - for index in xrange(0, len(output)): - if IsStrCanonicalInt(output[index]): - output[index] = int(output[index]) + for index, outstr in enumerate(output): + if IsStrCanonicalInt(outstr): + output[index] = int(outstr) elif IsStrCanonicalInt(output): output = int(output) @@ -1079,13 +1080,13 @@ def EvalSingleCondition( if eval(ast_code, {'__builtins__': None}, variables): return true_dict return false_dict - except SyntaxError, e: + except SyntaxError as e: syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' 'at character %d.' % (str(e.args[0]), e.text, build_file, e.offset), e.filename, e.lineno, e.offset, e.text) raise syntax_error - except NameError, e: + except NameError as e: gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % (cond_expr_expanded, build_file)) raise GypError(e) @@ -1140,7 +1141,7 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file): def LoadAutomaticVariablesFromDict(variables, the_dict): # Any keys with plain string values in the_dict become automatic variables. # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): if type(value) in (str, int, list): variables['_' + key] = value @@ -1153,7 +1154,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): # the_dict in the_dict's parent dict. If the_dict's parent is not a dict # (it could be a list or it could be parentless because it is a root dict), # the_dict_key will be None. - for key, value in the_dict.get('variables', {}).iteritems(): + for key, value in the_dict.get('variables', {}).items(): if type(value) not in (str, int, list): continue @@ -1192,7 +1193,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, # list before we process them so that you can reference one # variable from another. They will be fully expanded by recursion # in ExpandVariables. - for key, value in the_dict['variables'].iteritems(): + for key, value in the_dict['variables'].items(): variables[key] = value # Handle the associated variables dict first, so that any variable @@ -1205,7 +1206,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): # Skip "variables", which was already processed if present. if key != 'variables' and type(value) is str: expanded = ExpandVariables(value, phase, variables, build_file) @@ -1263,7 +1264,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, # Recurse into child dicts, or process child lists which may result in # further recursion into descendant dicts. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): # Skip "variables" and string values, which were already processed if # present. if key == 'variables' or type(value) is str: @@ -1360,14 +1361,14 @@ def QualifyDependencies(targets): for dep in dependency_sections for op in ('', '!', '/')] - for target, target_dict in targets.iteritems(): + for target, target_dict in targets.items(): target_build_file = gyp.common.BuildFile(target) toolset = target_dict['toolset'] for dependency_key in all_dependency_sections: dependencies = target_dict.get(dependency_key, []) - for index in xrange(0, len(dependencies)): + for index, dep in enumerate(dependencies): dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( - target_build_file, dependencies[index], toolset) + target_build_file, dep, toolset) if not multiple_toolsets: # Ignore toolset specification in the dependency if it is specified. dep_toolset = toolset @@ -1400,7 +1401,7 @@ def ExpandWildcardDependencies(targets, data): dependency list, must be qualified when this function is called. """ - for target, target_dict in targets.iteritems(): + for target, target_dict in targets.items(): toolset = target_dict['toolset'] target_build_file = gyp.common.BuildFile(target) for dependency_key in dependency_sections: @@ -1462,7 +1463,7 @@ def Unify(l): def RemoveDuplicateDependencies(targets): """Makes sure every dependency appears only once in all targets's dependency lists.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1478,7 +1479,7 @@ def Filter(l, item): def RemoveSelfDependencies(targets): """Remove self dependencies from targets that have the prune_self_dependency variable set.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1491,7 +1492,7 @@ def RemoveSelfDependencies(targets): def RemoveLinkDependenciesFromNoneTargets(targets): """Remove dependencies having the 'link_dependency' attribute from the 'none' targets.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1783,14 +1784,14 @@ def BuildDependencyList(targets): # Create a DependencyGraphNode for each target. Put it into a dict for easy # access. dependency_nodes = {} - for target, spec in targets.iteritems(): + for target, spec in targets.items(): if target not in dependency_nodes: dependency_nodes[target] = DependencyGraphNode(target) # Set up the dependency links. Targets that have no dependencies are treated # as dependent on root_node. root_node = DependencyGraphNode(None) - for target, spec in targets.iteritems(): + for target, spec in targets.items(): target_node = dependency_nodes[target] target_build_file = gyp.common.BuildFile(target) dependencies = spec.get('dependencies') @@ -1814,7 +1815,7 @@ def BuildDependencyList(targets): if not root_node.dependents: # If all targets have dependencies, add the first target as a dependent # of root_node so that the cycle can be discovered from root_node. - target = targets.keys()[0] + target = next(iter(targets)) target_node = dependency_nodes[target] target_node.dependencies.append(root_node) root_node.dependents.append(target_node) @@ -1833,20 +1834,20 @@ def VerifyNoGYPFileCircularDependencies(targets): # Create a DependencyGraphNode for each gyp file containing a target. Put # it into a dict for easy access. dependency_nodes = {} - for target in targets.iterkeys(): + for target in targets.keys(): build_file = gyp.common.BuildFile(target) if not build_file in dependency_nodes: dependency_nodes[build_file] = DependencyGraphNode(build_file) # Set up the dependency links. - for target, spec in targets.iteritems(): + for target, spec in targets.items(): build_file = gyp.common.BuildFile(target) build_file_node = dependency_nodes[build_file] target_dependencies = spec.get('dependencies', []) for dependency in target_dependencies: try: dependency_build_file = gyp.common.BuildFile(dependency) - except GypError, e: + except GypError as e: gyp.common.ExceptionAppend( e, 'while computing dependencies of .gyp file %s' % build_file) raise @@ -1864,7 +1865,7 @@ def VerifyNoGYPFileCircularDependencies(targets): # Files that have no dependencies are treated as dependent on root_node. root_node = DependencyGraphNode(None) - for build_file_node in dependency_nodes.itervalues(): + for build_file_node in dependency_nodes.values(): if len(build_file_node.dependencies) == 0: build_file_node.dependencies.append(root_node) root_node.dependents.append(build_file_node) @@ -1877,7 +1878,7 @@ def VerifyNoGYPFileCircularDependencies(targets): if not root_node.dependents: # If all files have dependencies, add the first file as a dependent # of root_node so that the cycle can be discovered from root_node. - file_node = dependency_nodes.values()[0] + file_node = next(iter(dependency_nodes.values())) file_node.dependencies.append(root_node) root_node.dependents.append(file_node) cycles = [] @@ -2104,7 +2105,7 @@ def is_in_set_or_list(x, s, l): def MergeDicts(to, fro, to_file, fro_file): # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.iteritems(): + for k, v in fro.items(): # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give # copy semantics. Something else may want to merge from the |fro| dict # later, and having the same dict ref pointed to twice in the tree isn't @@ -2239,13 +2240,13 @@ def SetUpConfigurations(target, target_dict): if not 'configurations' in target_dict: target_dict['configurations'] = {'Default': {}} if not 'default_configuration' in target_dict: - concrete = [i for (i, config) in target_dict['configurations'].iteritems() + concrete = [i for (i, config) in target_dict['configurations'].items() if not config.get('abstract')] target_dict['default_configuration'] = sorted(concrete)[0] merged_configurations = {} configs = target_dict['configurations'] - for (configuration, old_configuration_dict) in configs.iteritems(): + for (configuration, old_configuration_dict) in configs.items(): # Skip abstract configurations (saves work only). if old_configuration_dict.get('abstract'): continue @@ -2253,7 +2254,7 @@ def SetUpConfigurations(target, target_dict): # Get the inheritance relationship right by making a copy of the target # dict. new_configuration_dict = {} - for (key, target_val) in target_dict.iteritems(): + for (key, target_val) in target_dict.items(): key_ext = key[-1:] if key_ext in key_suffixes: key_base = key[:-1] @@ -2274,10 +2275,9 @@ def SetUpConfigurations(target, target_dict): merged_configurations[configuration]) # Now drop all the abstract ones. - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - if old_configuration_dict.get('abstract'): - del target_dict['configurations'][configuration] + configs = target_dict['configurations'] + target_dict['configurations'] = \ + {k: v for k, v in configs.items() if not v.get('abstract')} # Now that all of the target's configurations have been built, go through # the target dict's keys and remove everything that's been moved into a @@ -2337,7 +2337,7 @@ def ProcessListFiltersInDict(name, the_dict): lists = [] del_lists = [] - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): operation = key[-1] if operation != '!' and operation != '/': continue @@ -2385,8 +2385,8 @@ def ProcessListFiltersInDict(name, the_dict): exclude_key = list_key + '!' if exclude_key in the_dict: for exclude_item in the_dict[exclude_key]: - for index in xrange(0, len(the_list)): - if exclude_item == the_list[index]: + for index, list_item in enumerate(the_list): + if exclude_item == list_item: # This item matches the exclude_item, so set its action to 0 # (exclude). list_actions[index] = 0 @@ -2411,8 +2411,7 @@ def ProcessListFiltersInDict(name, the_dict): raise ValueError('Unrecognized action ' + action + ' in ' + name + \ ' key ' + regex_key) - for index in xrange(0, len(the_list)): - list_item = the_list[index] + for index, list_item in enumerate(the_list): if list_actions[index] == action_value: # Even if the regex matches, nothing will change so continue (regex # searches are expensive). @@ -2442,7 +2441,7 @@ def ProcessListFiltersInDict(name, the_dict): # the indices of items that haven't been seen yet don't shift. That means # that things need to be prepended to excluded_list to maintain them in the # same order that they existed in the_list. - for index in xrange(len(list_actions) - 1, -1, -1): + for index in range(len(list_actions) - 1, -1, -1): if list_actions[index] == 0: # Dump anything with action 0 (exclude). Keep anything with action 1 # (include) or -1 (no include or exclude seen for the item). @@ -2455,7 +2454,7 @@ def ProcessListFiltersInDict(name, the_dict): the_dict[excluded_key] = excluded_list # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): if type(value) is dict: ProcessListFiltersInDict(key, value) elif type(value) is list: @@ -2512,7 +2511,7 @@ def ValidateSourcesInTarget(target, target_dict, build_file, basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) @@ -2651,8 +2650,7 @@ def TurnIntIntoStrInDict(the_dict): def TurnIntIntoStrInList(the_list): """Given list the_list, recursively converts all integers into strings. """ - for index in xrange(0, len(the_list)): - item = the_list[index] + for index, item in enumerate(the_list): if type(item) is int: the_list[index] = str(item) elif type(item) is dict: @@ -2769,7 +2767,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, try: LoadTargetBuildFile(build_file, data, aux_data, variables, includes, depth, check, True) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) raise @@ -2791,7 +2789,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, RemoveLinkDependenciesFromNoneTargets(targets) # Apply exclude (!) and regex (/) list filters only for dependency_sections. - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): tmp_dict = {} for key_base in dependency_sections: for op in ('', '!', '/'): diff --git a/pylib/gyp/input_test.py b/pylib/gyp/input_test.py index 4234fbb8..6c4b1cc5 100755 --- a/pylib/gyp/input_test.py +++ b/pylib/gyp/input_test.py @@ -22,7 +22,7 @@ def _create_dependency(self, dependent, dependency): dependency.dependents.append(dependent) def test_no_cycle_empty_graph(self): - for label, node in self.nodes.iteritems(): + for label, node in self.nodes.items(): self.assertEquals([], node.FindCycles()) def test_no_cycle_line(self): @@ -30,7 +30,7 @@ def test_no_cycle_line(self): self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['c'], self.nodes['d']) - for label, node in self.nodes.iteritems(): + for label, node in self.nodes.items(): self.assertEquals([], node.FindCycles()) def test_no_cycle_dag(self): @@ -38,7 +38,7 @@ def test_no_cycle_dag(self): self._create_dependency(self.nodes['a'], self.nodes['c']) self._create_dependency(self.nodes['b'], self.nodes['c']) - for label, node in self.nodes.iteritems(): + for label, node in self.nodes.items(): self.assertEquals([], node.FindCycles()) def test_cycle_self_reference(self): diff --git a/pylib/gyp/mac_tool.py b/pylib/gyp/mac_tool.py index 0ad7e7a1..7d3a8c27 100755 --- a/pylib/gyp/mac_tool.py +++ b/pylib/gyp/mac_tool.py @@ -8,6 +8,8 @@ These functions are executed via gyp-mac-tool when using the Makefile generator. """ +from __future__ import print_function + import fcntl import fnmatch import glob @@ -16,7 +18,6 @@ import plistlib import re import shutil -import string import struct import subprocess import sys @@ -155,11 +156,11 @@ def _DetectInputEncoding(self, file_name): fp.close() return None fp.close() - if header.startswith("\xFE\xFF"): + if header.startswith(b"\xFE\xFF"): return "UTF-16" - elif header.startswith("\xFF\xFE"): + elif header.startswith(b"\xFF\xFE"): return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): + elif header.startswith(b"\xEF\xBB\xBF"): return "UTF-8" else: return None @@ -174,7 +175,7 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). plist = plistlib.readPlistFromString(lines) if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) + plist.update(json.loads(keys[0])) lines = plistlib.writePlistToString(plist) # Go through all the environment variables and replace them as variables in @@ -185,7 +186,7 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): continue evar = '${%s}' % key evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) + lines = lines.replace(evar, evalue) # Xcode supports various suffices on environment variables, which are # all undocumented. :rfc1034identifier is used in the standard project @@ -195,11 +196,11 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): # in a URL either -- oops, hence :rfc1034identifier was born. evar = '${%s:identifier}' % key evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) + lines = lines.replace(evar, evalue) evar = '${%s:rfc1034identifier}' % key evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) + lines = lines.replace(evar, evalue) # Remove any keys with values that haven't been replaced. lines = lines.split('\n') @@ -270,7 +271,7 @@ def ExecFilterLibtool(self, *cmd_list): _, err = libtoolout.communicate() for line in err.splitlines(): if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line + print(line, file=sys.stderr) # Unconditionally touch the output .a file on the command line if present # and the command succeeded. A bit hacky. if not libtoolout.returncode: @@ -385,7 +386,7 @@ def ExecCompileXcassets(self, keys, *inputs): ]) if keys: keys = json.loads(keys) - for key, value in keys.iteritems(): + for key, value in keys.items(): arg_name = '--' + key if isinstance(value, bool): if value: @@ -480,8 +481,9 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): profiles_dir = os.path.join( os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) + print(( + 'cannot find mobile provisioning for %s' % bundle_identifier), + file=sys.stderr) sys.exit(1) provisioning_profiles = None if profile: @@ -502,8 +504,9 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): valid_provisioning_profiles[app_id_pattern] = ( profile_path, profile_data, team_identifier) if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) + print(( + 'cannot find mobile provisioning for %s' % bundle_identifier), + file=sys.stderr) sys.exit(1) # If the user has multiple provisioning profiles installed that can be # used for ${bundle_identifier}, pick the most specific one (ie. the @@ -527,7 +530,7 @@ def _LoadProvisioningProfile(self, profile_path): def _MergePlist(self, merged_plist, plist): """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): + for key, value in plist.items(): if isinstance(value, dict): merged_value = merged_plist.get(key, {}) if isinstance(merged_value, dict): @@ -637,7 +640,7 @@ def _ExpandVariables(self, data, substitutions): the key was not found. """ if isinstance(data, str): - for key, value in substitutions.iteritems(): + for key, value in substitutions.items(): data = data.replace('$(%s)' % key, value) return data if isinstance(data, list): diff --git a/pylib/gyp/msvs_emulation.py b/pylib/gyp/msvs_emulation.py index 6d5b5bd2..63d40e63 100644 --- a/pylib/gyp/msvs_emulation.py +++ b/pylib/gyp/msvs_emulation.py @@ -7,6 +7,7 @@ build systems, primarily ninja. """ +import collections import os import re import subprocess @@ -16,6 +17,12 @@ import gyp.MSVSUtil import gyp.MSVSVersion +try: + # basestring was removed in python3. + basestring +except NameError: + basestring = str + windows_quoter_regex = re.compile(r'(\\*)"') @@ -84,8 +91,8 @@ def _AddPrefix(element, prefix): """Add |prefix| to |element| or each subelement if element is iterable.""" if element is None: return element - # Note, not Iterable because we don't want to handle strings like that. - if isinstance(element, list) or isinstance(element, tuple): + if (isinstance(element, collections.Iterable) and + not isinstance(element, basestring)): return [prefix + e for e in element] else: return prefix + element @@ -97,7 +104,8 @@ def _DoRemapping(element, map): if map is not None and element is not None: if not callable(map): map = map.get # Assume it's a dict, otherwise a callable to do the remap. - if isinstance(element, list) or isinstance(element, tuple): + if (isinstance(element, collections.Iterable) and + not isinstance(element, basestring)): element = filter(None, [map(elem) for elem in element]) else: element = map(element) @@ -109,7 +117,8 @@ def _AppendOrReturn(append, element): then add |element| to it, adding each item in |element| if it's a list or tuple.""" if append is not None and element is not None: - if isinstance(element, list) or isinstance(element, tuple): + if (isinstance(element, collections.Iterable) and + not isinstance(element, basestring)): append.extend(element) else: append.append(element) @@ -209,7 +218,7 @@ def __init__(self, spec, generator_flags): configs = spec['configurations'] for field, default in supported_fields: setattr(self, field, {}) - for configname, config in configs.iteritems(): + for configname, config in configs.items(): getattr(self, field)[configname] = config.get(field, default()) self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.']) @@ -482,7 +491,7 @@ def GetCflags(self, config): # https://msdn.microsoft.com/en-us/library/dn502518.aspx cflags.append('/FS') # ninja handles parallelism by itself, don't have the compiler do it too. - cflags = filter(lambda x: not x.startswith('/MP'), cflags) + cflags = [x for x in cflags if not x.startswith('/MP')] return cflags def _GetPchFlags(self, config, extension): @@ -649,19 +658,17 @@ def GetLdflags(self, config, gyp_to_build_path, expand_special, # If the base address is not specifically controlled, DYNAMICBASE should # be on by default. - base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED', - ldflags) - if not base_flags: + if not any('DYNAMICBASE' in flag or flag == '/FIXED' for flag in ldflags): ldflags.append('/DYNAMICBASE') # If the NXCOMPAT flag has not been specified, default to on. Despite the # documentation that says this only defaults to on when the subsystem is # Vista or greater (which applies to the linker), the IDE defaults it on # unless it's explicitly off. - if not filter(lambda x: 'NXCOMPAT' in x, ldflags): + if not any('NXCOMPAT' in flag for flag in ldflags): ldflags.append('/NXCOMPAT') - have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags) + have_def_file = any(flag.startswith('/DEF:') for flag in ldflags) manifest_flags, intermediate_manifest, manifest_files = \ self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path, is_executable and not have_def_file, build_dir) @@ -953,7 +960,7 @@ def ExpandMacros(string, expansions): """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv for the canonical way to retrieve a suitable dict.""" if '$' in string: - for old, new in expansions.iteritems(): + for old, new in expansions.items(): assert '$(' not in new, new string = string.replace(old, new) return string @@ -1001,7 +1008,7 @@ def _FormatAsEnvironmentBlock(envvar_dict): CreateProcess documentation for more details.""" block = '' nul = '\0' - for key, value in envvar_dict.iteritems(): + for key, value in envvar_dict.items(): block += key + '=' + value + nul block += nul return block @@ -1056,7 +1063,7 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, env['INCLUDE'] = ';'.join(system_includes) env_block = _FormatAsEnvironmentBlock(env) - f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb') + f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'w') f.write(env_block) f.close() @@ -1078,7 +1085,7 @@ def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja): if int(generator_flags.get('msvs_error_on_missing_sources', 0)): no_specials = filter(lambda x: '$' not in x, sources) relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials] - missing = filter(lambda x: not os.path.exists(x), relative) + missing = [x for x in relative if not os.path.exists(x)] if missing: # They'll look like out\Release\..\..\stuff\things.cc, so normalize the # path for a slightly less crazy looking output. diff --git a/pylib/gyp/ordered_dict.py b/pylib/gyp/ordered_dict.py deleted file mode 100644 index a1e89f91..00000000 --- a/pylib/gyp/ordered_dict.py +++ /dev/null @@ -1,289 +0,0 @@ -# Unmodified from http://code.activestate.com/recipes/576693/ -# other than to add MIT license header (as specified on page, but not in code). -# Linked from Python documentation here: -# http://docs.python.org/2/library/collections.html#collections.OrderedDict -# -# This should be deleted once Py2.7 is available on all bots, see -# http://crbug.com/241769. -# -# Copyright (c) 2009 Raymond Hettinger. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. - -try: - from thread import get_ident as _get_ident -except ImportError: - from dummy_thread import get_ident as _get_ident - -try: - from _abcoll import KeysView, ValuesView, ItemsView -except ImportError: - pass - - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as for regular dictionaries. - - # The internal self.__map dictionary maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(self, *args, **kwds): - '''Initialize an ordered dictionary. Signature is the same as for - regular dictionaries, but keyword arguments are not recommended - because their insertion order is arbitrary. - - ''' - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link which goes at the end of the linked - # list, and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which is - # then removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, key = self.__map.pop(key) - link_prev[1] = link_next - link_next[0] = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - root = self.__root - curr = root[1] - while curr is not root: - yield curr[2] - curr = curr[1] - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - root = self.__root - curr = root[0] - while curr is not root: - yield curr[2] - curr = curr[0] - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - try: - for node in self.__map.itervalues(): - del node[:] - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - except AttributeError: - pass - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root[0] - link_prev = link[0] - link_prev[1] = root - root[0] = link_prev - else: - link = root[1] - link_next = link[1] - root[1] = link_next - link_next[0] = root - key = link[2] - del self.__map[key] - value = dict.pop(self, key) - return key, value - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def iteritems(self): - 'od.iteritems -> an iterator over the (key, value) items in od' - for k in self: - yield (k, self[k]) - - # Suppress 'OrderedDict.update: Method has no argument': - # pylint: disable=E0211 - def update(*args, **kwds): - '''od.update(E, **F) -> None. Update od from dict/iterable E and F. - - If E is a dict instance, does: for k in E: od[k] = E[k] - If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] - Or if E is an iterable of items, does: for k, v in E: od[k] = v - In either case, this is followed by: for k, v in F.items(): od[k] = v - - ''' - if len(args) > 2: - raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) - elif not args: - raise TypeError('update() takes at least 1 argument (0 given)') - self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] - if isinstance(other, dict): - for key in other: - self[key] = other[key] - elif hasattr(other, 'keys'): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def __repr__(self, _repr_running={}): - 'od.__repr__() <==> repr(od)' - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S - and values equal to v (which defaults to None). - - ''' - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - # -- the following methods are only used in Python 2.7 -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) - diff --git a/pylib/gyp/simple_copy.py b/pylib/gyp/simple_copy.py index eaf5f8be..58a61c34 100644 --- a/pylib/gyp/simple_copy.py +++ b/pylib/gyp/simple_copy.py @@ -49,7 +49,7 @@ def _deepcopy_list(x): def _deepcopy_dict(x): y = {} - for key, value in x.iteritems(): + for key, value in x.items(): y[deepcopy(key)] = deepcopy(value) return y d[dict] = _deepcopy_dict diff --git a/pylib/gyp/win_tool.py b/pylib/gyp/win_tool.py index 1c843a0b..89734846 100755 --- a/pylib/gyp/win_tool.py +++ b/pylib/gyp/win_tool.py @@ -9,6 +9,8 @@ These functions are executed via gyp-win-tool when using the ninja generator. """ +from __future__ import print_function + import os import re import shutil @@ -134,7 +136,7 @@ def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args): if (not line.startswith(' Creating library ') and not line.startswith('Generating code') and not line.startswith('Finished generating code')): - print line + print(line) return link.returncode def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname, @@ -193,16 +195,18 @@ def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname, our_manifest = '%(out)s.manifest' % variables # Load and normalize the manifests. mt.exe sometimes removes whitespace, # and sometimes doesn't unfortunately. - with open(our_manifest, 'rb') as our_f: - with open(assert_manifest, 'rb') as assert_f: + with open(our_manifest, 'r') as our_f: + with open(assert_manifest, 'r') as assert_f: our_data = our_f.read().translate(None, string.whitespace) assert_data = assert_f.read().translate(None, string.whitespace) if our_data != assert_data: os.unlink(out) def dump(filename): - sys.stderr.write('%s\n-----\n' % filename) - with open(filename, 'rb') as f: - sys.stderr.write(f.read() + '\n-----\n') + print(filename, file=sys.stderr) + print('-----', file=sys.stderr) + with open(filename, 'r') as f: + print(f.read(), file=sys.stderr) + print('-----', file=sys.stderr) dump(intermediate_manifest) dump(our_manifest) dump(assert_manifest) @@ -223,7 +227,7 @@ def ExecManifestWrapper(self, arch, *args): out, _ = popen.communicate() for line in out.splitlines(): if line and 'manifest authoring warning 81010002' not in line: - print line + print(line) return popen.returncode def ExecManifestToRc(self, arch, *args): @@ -231,7 +235,7 @@ def ExecManifestToRc(self, arch, *args): |args| is tuple containing path to resource file, path to manifest file and resource name which can be "1" (for executables) or "2" (for DLLs).""" manifest_path, resource_path, resource_name = args - with open(resource_path, 'wb') as output: + with open(resource_path, 'w') as output: output.write('#include \n%s RT_MANIFEST "%s"' % ( resource_name, os.path.abspath(manifest_path).replace('\\', '/'))) @@ -263,7 +267,7 @@ def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, for x in lines if x.startswith(prefixes)) for line in lines: if not line.startswith(prefixes) and line not in processing: - print line + print(line) return popen.returncode def ExecAsmWrapper(self, arch, *args): @@ -277,7 +281,7 @@ def ExecAsmWrapper(self, arch, *args): not line.startswith('Microsoft (R) Macro Assembler') and not line.startswith(' Assembling: ') and line): - print line + print(line) return popen.returncode def ExecRcWrapper(self, arch, *args): @@ -291,7 +295,7 @@ def ExecRcWrapper(self, arch, *args): if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and not line.startswith('Copyright (C) Microsoft Corporation') and line): - print line + print(line) return popen.returncode def ExecActionWrapper(self, arch, rspfile, *dir): @@ -300,7 +304,7 @@ def ExecActionWrapper(self, arch, rspfile, *dir): env = self._GetEnv(arch) # TODO(scottmg): This is a temporary hack to get some specific variables # through to actions that are set after gyp-time. http://crbug.com/333738. - for k, v in os.environ.iteritems(): + for k, v in os.environ.items(): if k not in env: env[k] = v args = open(rspfile).read() diff --git a/pylib/gyp/xcode_emulation.py b/pylib/gyp/xcode_emulation.py index dba8e769..4c875de3 100644 --- a/pylib/gyp/xcode_emulation.py +++ b/pylib/gyp/xcode_emulation.py @@ -7,6 +7,8 @@ other build systems, such as make and ninja. """ +from __future__ import print_function + import copy import gyp.common import os @@ -73,7 +75,7 @@ def _ExpandArchs(self, archs, sdkroot): if arch not in expanded_archs: expanded_archs.append(arch) except KeyError as e: - print 'Warning: Ignoring unsupported variable "%s".' % variable + print('Warning: Ignoring unsupported variable "%s".' % variable) elif arch not in expanded_archs: expanded_archs.append(arch) return expanded_archs @@ -171,7 +173,7 @@ def __init__(self, spec): # the same for all configs are implicitly per-target settings. self.xcode_settings = {} configs = spec['configurations'] - for configname, config in configs.iteritems(): + for configname, config in configs.items(): self.xcode_settings[configname] = config.get('xcode_settings', {}) self._ConvertConditionalKeys(configname) if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET', @@ -197,8 +199,8 @@ def _ConvertConditionalKeys(self, configname): new_key = key.split("[")[0] settings[new_key] = settings[key] else: - print 'Warning: Conditional keys not implemented, ignoring:', \ - ' '.join(conditional_keys) + print('Warning: Conditional keys not implemented, ignoring:', \ + ' '.join(conditional_keys)) del settings[key] def _Settings(self): @@ -216,7 +218,7 @@ def _Appendf(self, lst, test_key, format_str, default=None): def _WarnUnimplemented(self, test_key): if test_key in self._Settings(): - print 'Warning: Ignoring not yet implemented key "%s".' % test_key + print('Warning: Ignoring not yet implemented key "%s".' % test_key) def IsBinaryOutputFormat(self, configname): default = "binary" if self.isIOS else "xml" @@ -963,7 +965,7 @@ def GetPerTargetSettings(self): result = dict(self.xcode_settings[configname]) first_pass = False else: - for key, value in self.xcode_settings[configname].iteritems(): + for key, value in self.xcode_settings[configname].items(): if key not in result: continue elif result[key] != value: @@ -1084,8 +1086,8 @@ def _GetIOSPostbuilds(self, configname, output_binary): unimpl = ['OTHER_CODE_SIGN_FLAGS'] unimpl = set(unimpl) & set(self.xcode_settings[configname].keys()) if unimpl: - print 'Warning: Some codesign keys not implemented, ignoring: %s' % ( - ', '.join(sorted(unimpl))) + print('Warning: Some codesign keys not implemented, ignoring: %s' % ( + ', '.join(sorted(unimpl)))) if self._IsXCTest(): # For device xctests, Xcode copies two extra frameworks into $TEST_HOST. @@ -1737,7 +1739,7 @@ def GetEdges(node): order = gyp.common.TopologicallySorted(env.keys(), GetEdges) order.reverse() return order - except gyp.common.CycleError, e: + except gyp.common.CycleError as e: raise GypError( 'Xcode environment variables are cyclically dependent: ' + str(e.nodes)) @@ -1774,10 +1776,11 @@ def _HasIOSTarget(targets): def _AddIOSDeviceConfigurations(targets): """Clone all targets and append -iphoneos to the name. Configure these targets to build for iOS devices and use correct architectures for those builds.""" - for target_dict in targets.itervalues(): + for target_dict in targets.values(): toolset = target_dict['toolset'] configs = target_dict['configurations'] - for config_name, simulator_config_dict in dict(configs).iteritems(): + + for config_name, simulator_config_dict in dict(configs).items(): iphoneos_config_dict = copy.deepcopy(simulator_config_dict) configs[config_name + '-iphoneos'] = iphoneos_config_dict configs[config_name + '-iphonesimulator'] = simulator_config_dict diff --git a/pylib/gyp/xcode_ninja.py b/pylib/gyp/xcode_ninja.py index bc76ffff..1d71b8c5 100644 --- a/pylib/gyp/xcode_ninja.py +++ b/pylib/gyp/xcode_ninja.py @@ -28,7 +28,7 @@ def _WriteWorkspace(main_gyp, sources_gyp, params): workspace_path = os.path.join(options.generator_output, workspace_path) try: os.makedirs(workspace_path) - except OSError, e: + except OSError as e: if e.errno != errno.EEXIST: raise output_string = '\n' + \ @@ -85,7 +85,7 @@ def _TargetFromSpec(old_spec, params): "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel if 'configurations' in old_spec: - for config in old_spec['configurations'].iterkeys(): + for config in old_spec['configurations'].keys(): old_xcode_settings = \ old_spec['configurations'][config].get('xcode_settings', {}) if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings: @@ -167,7 +167,7 @@ def CreateWrapper(target_list, target_dicts, data, params): params: Dict of global options for gyp. """ orig_gyp = params['build_files'][0] - for gyp_name, gyp_dict in data.iteritems(): + for gyp_name, gyp_dict in data.items(): if gyp_name == orig_gyp: depth = gyp_dict['_DEPTH'] @@ -238,7 +238,7 @@ def CreateWrapper(target_list, target_dicts, data, params): not generator_flags.get('xcode_ninja_list_excluded_files', True) sources = [] - for target, target_dict in target_dicts.iteritems(): + for target, target_dict in target_dicts.items(): base = os.path.dirname(target) files = target_dict.get('sources', []) + \ target_dict.get('mac_bundle_resources', []) diff --git a/pylib/gyp/xcodeproj_file.py b/pylib/gyp/xcodeproj_file.py index e69235f7..bd238f65 100644 --- a/pylib/gyp/xcodeproj_file.py +++ b/pylib/gyp/xcodeproj_file.py @@ -154,6 +154,11 @@ import sha _new_sha1 = sha.new +try: + # basestring was removed in python3. + basestring +except NameError: + basestring = str # See XCObject._EncodeString. This pattern is used to determine when a string # can be printed unquoted. Strings that match this pattern may be printed @@ -314,7 +319,7 @@ def Copy(self): """ that = self.__class__(id=self.id, parent=self.parent) - for key, value in self._properties.iteritems(): + for key, value in self._properties.items(): is_strong = self._schema[key][2] if isinstance(value, XCObject): @@ -324,8 +329,7 @@ def Copy(self): that._properties[key] = new_value else: that._properties[key] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): + elif isinstance(value, basestring) or isinstance(value, int): that._properties[key] = value elif isinstance(value, list): if is_strong: @@ -449,10 +453,10 @@ def _HashUpdate(hash, data): # is 160 bits. Instead of throwing out 64 bits of the digest, xor them # into the portion that gets used. assert hash.digest_size % 4 == 0 - digest_int_count = hash.digest_size / 4 + digest_int_count = hash.digest_size // 4 digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest()) id_ints = [0, 0, 0] - for index in xrange(0, digest_int_count): + for index in range(0, digest_int_count): id_ints[index % 3] ^= digest_ints[index] self.id = '%08X%08X%08X' % tuple(id_ints) @@ -475,7 +479,7 @@ def Children(self): """Returns a list of all of this object's owned (strong) children.""" children = [] - for property, attributes in self._schema.iteritems(): + for property, attributes in self._schema.items(): (is_list, property_type, is_strong) = attributes[0:3] if is_strong and property in self._properties: if not is_list: @@ -603,7 +607,12 @@ def _XCPrintableValue(self, tabs, value, flatten_list=False): comment = value.Comment() elif isinstance(value, str): printable += self._EncodeString(value) - elif isinstance(value, unicode): + # A python3 compatible way of saying isinstance(value, unicode). + # basestring is str in python3 so this is equivalent to the above + # isinstance. Thus if it failed above it will fail here. + # In python2 we test against str and unicode at this point. str has already + # failed in the above isinstance so we test against unicode. + elif isinstance(value, basestring): printable += self._EncodeString(value.encode('utf-8')) elif isinstance(value, int): printable += str(value) @@ -622,7 +631,7 @@ def _XCPrintableValue(self, tabs, value, flatten_list=False): printable += end_tabs + ')' elif isinstance(value, dict): printable = '{' + sep - for item_key, item_value in sorted(value.iteritems()): + for item_key, item_value in sorted(value.items()): printable += element_tabs + \ self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \ self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \ @@ -691,7 +700,7 @@ def _XCKVPrint(self, file, tabs, key, value): printable_value[0] == '"' and printable_value[-1] == '"': printable_value = printable_value[1:-1] printable += printable_key + ' = ' + printable_value + ';' + after_kv - except TypeError, e: + except TypeError as e: gyp.common.ExceptionAppend(e, 'while printing key "%s"' % key) raise @@ -730,7 +739,7 @@ def Print(self, file=sys.stdout): self._XCKVPrint(file, 3, 'isa', self.__class__.__name__) # The remaining elements of an object dictionary are sorted alphabetically. - for property, value in sorted(self._properties.iteritems()): + for property, value in sorted(self._properties.items()): self._XCKVPrint(file, 3, property, value) # End the object. @@ -752,7 +761,7 @@ def UpdateProperties(self, properties, do_copy=False): if properties is None: return - for property, value in properties.iteritems(): + for property, value in properties.items(): # Make sure the property is in the schema. if not property in self._schema: raise KeyError(property + ' not in ' + self.__class__.__name__) @@ -766,7 +775,7 @@ def UpdateProperties(self, properties, do_copy=False): ' must be list, not ' + value.__class__.__name__) for item in value: if not isinstance(item, property_type) and \ - not (item.__class__ == unicode and property_type == str): + not (isinstance(item, basestring) and property_type == str): # Accept unicode where str is specified. str is treated as # UTF-8-encoded. raise TypeError( @@ -774,7 +783,7 @@ def UpdateProperties(self, properties, do_copy=False): ' must be ' + property_type.__name__ + ', not ' + \ item.__class__.__name__) elif not isinstance(value, property_type) and \ - not (value.__class__ == unicode and property_type == str): + not (isinstance(value, basestring) and property_type == str): # Accept unicode where str is specified. str is treated as # UTF-8-encoded. raise TypeError( @@ -788,8 +797,7 @@ def UpdateProperties(self, properties, do_copy=False): self._properties[property] = value.Copy() else: self._properties[property] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): + elif isinstance(value, basestring) or isinstance(value, int): self._properties[property] = value elif isinstance(value, list): if is_strong: @@ -865,7 +873,7 @@ def VerifyHasRequiredProperties(self): # TODO(mark): A stronger verification mechanism is needed. Some # subclasses need to perform validation beyond what the schema can enforce. - for property, attributes in self._schema.iteritems(): + for property, attributes in self._schema.items(): (is_list, property_type, is_strong, is_required) = attributes[0:4] if is_required and not property in self._properties: raise KeyError(self.__class__.__name__ + ' requires ' + property) @@ -875,7 +883,7 @@ def _SetDefaultsFromSchema(self): overwrite properties that have already been set.""" defaults = {} - for property, attributes in self._schema.iteritems(): + for property, attributes in self._schema.items(): (is_list, property_type, is_strong, is_required) = attributes[0:4] if is_required and len(attributes) >= 5 and \ not property in self._properties: @@ -1426,8 +1434,8 @@ def PathHashables(self): xche = self while xche != None and isinstance(xche, XCHierarchicalElement): xche_hashables = xche.Hashables() - for index in xrange(0, len(xche_hashables)): - hashables.insert(index, xche_hashables[index]) + for index, xche_hashable in enumerate(xche_hashables): + hashables.insert(index, xche_hashable) xche = xche.parent return hashables @@ -2468,8 +2476,7 @@ def HeadersPhase(self): # The headers phase should come before the resources, sources, and # frameworks phases, if any. insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] + for index, phase in enumerate(self._properties['buildPhases']): if isinstance(phase, PBXResourcesBuildPhase) or \ isinstance(phase, PBXSourcesBuildPhase) or \ isinstance(phase, PBXFrameworksBuildPhase): @@ -2489,8 +2496,7 @@ def ResourcesPhase(self): # The resources phase should come before the sources and frameworks # phases, if any. insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] + for index, phase in enumerate(self._properties['buildPhases']): if isinstance(phase, PBXSourcesBuildPhase) or \ isinstance(phase, PBXFrameworksBuildPhase): insert_at = index @@ -2911,7 +2917,7 @@ def CompareProducts(x, y, remote_products): # determine the sort order. return cmp(x_index, y_index) - for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems(): + for other_pbxproject, ref_dict in self._other_pbxprojects.items(): # Build up a list of products in the remote project file, ordered the # same as the targets that produce them. remote_products = [] diff --git a/pylib/gyp/xml_fix.py b/pylib/gyp/xml_fix.py index 5de84815..4308d99b 100644 --- a/pylib/gyp/xml_fix.py +++ b/pylib/gyp/xml_fix.py @@ -32,8 +32,7 @@ def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): writer.write(indent+"<" + self.tagName) attrs = self._get_attributes() - a_names = attrs.keys() - a_names.sort() + a_names = sorted(attrs.keys()) for a_name in a_names: writer.write(" %s=\"" % a_name) diff --git a/test/actions-bare/src/bare.py b/test/actions-bare/src/bare.py index 12307500..e153b774 100755 --- a/test/actions-bare/src/bare.py +++ b/test/actions-bare/src/bare.py @@ -6,6 +6,6 @@ import sys -f = open(sys.argv[1], 'wb') +f = open(sys.argv[1], 'w') f.write('Hello from bare.py\n') f.close() diff --git a/test/actions-multiple-outputs-with-dependencies/gyptest-action.py b/test/actions-multiple-outputs-with-dependencies/gyptest-action.py index ebc7f4fd..a9d21828 100755 --- a/test/actions-multiple-outputs-with-dependencies/gyptest-action.py +++ b/test/actions-multiple-outputs-with-dependencies/gyptest-action.py @@ -10,13 +10,15 @@ This is a regression test for crrev.com/1177163002. """ +from __future__ import print_function + import TestGyp import os import sys import time if sys.platform in ('darwin', 'win32'): - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp() diff --git a/test/actions-multiple-outputs/gyptest-multiple-outputs.py b/test/actions-multiple-outputs/gyptest-multiple-outputs.py index 72a7040a..5e2682d0 100755 --- a/test/actions-multiple-outputs/gyptest-multiple-outputs.py +++ b/test/actions-multiple-outputs/gyptest-multiple-outputs.py @@ -8,12 +8,14 @@ Verifies actions with multiple outputs will correctly rebuild. """ +from __future__ import print_function + import TestGyp import os import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp() diff --git a/test/actions-multiple/src/actions.gyp b/test/actions-multiple/src/actions.gyp index c70a58f7..d7423b58 100644 --- a/test/actions-multiple/src/actions.gyp +++ b/test/actions-multiple/src/actions.gyp @@ -27,7 +27,7 @@ { 'action_name': 'action1', 'inputs': [ - 'copy.py', + 'copyfile.py', 'input.txt', ], 'outputs': [ @@ -42,7 +42,7 @@ { 'action_name': 'action2', 'inputs': [ - 'copy.py', + 'copyfile.py', 'input.txt', ], 'outputs': [ @@ -57,7 +57,7 @@ { 'action_name': 'action3', 'inputs': [ - 'copy.py', + 'copyfile.py', 'input.txt', ], 'outputs': [ @@ -72,7 +72,7 @@ { 'action_name': 'action4', 'inputs': [ - 'copy.py', + 'copyfile.py', 'input.txt', ], 'outputs': [ @@ -168,7 +168,7 @@ { 'action_name': 'action1', 'inputs': [ - 'copy.py', + 'copyfile.py', 'input.txt', ], 'outputs': [ @@ -183,7 +183,7 @@ { 'action_name': 'action2', 'inputs': [ - 'copy.py', + 'copyfile.py', 'input.txt', ], 'outputs': [ @@ -207,7 +207,7 @@ { 'action_name': 'multi_dep', 'inputs': [ - 'copy.py', + 'copyfile.py', 'input.txt', ], 'outputs': [ diff --git a/test/actions-multiple/src/copy.py b/test/actions-multiple/src/copyfile.py similarity index 100% rename from test/actions-multiple/src/copy.py rename to test/actions-multiple/src/copyfile.py diff --git a/test/actions-none/src/fake_cross.py b/test/actions-none/src/fake_cross.py index 2913f66a..a03ea87f 100644 --- a/test/actions-none/src/fake_cross.py +++ b/test/actions-none/src/fake_cross.py @@ -6,7 +6,7 @@ import sys -fh = open(sys.argv[-1], 'wb') +fh = open(sys.argv[-1], 'w') for filename in sys.argv[1:-1]: fh.write(open(filename).read()) fh.close() diff --git a/test/actions-subdir/src/make-file.py b/test/actions-subdir/src/make-file.py index 74e55811..6055ab9b 100755 --- a/test/actions-subdir/src/make-file.py +++ b/test/actions-subdir/src/make-file.py @@ -8,4 +8,4 @@ contents = 'Hello from make-file.py\n' -open(sys.argv[1], 'wb').write(contents) +open(sys.argv[1], 'w').write(contents) diff --git a/test/actions-subdir/src/subdir/make-subdir-file.py b/test/actions-subdir/src/subdir/make-subdir-file.py index 80ce19ae..02c090a0 100755 --- a/test/actions-subdir/src/subdir/make-subdir-file.py +++ b/test/actions-subdir/src/subdir/make-subdir-file.py @@ -8,4 +8,4 @@ contents = 'Hello from make-subdir-file.py\n' -open(sys.argv[1], 'wb').write(contents) +open(sys.argv[1], 'w').write(contents) diff --git a/test/actions/src/subdir2/make-file.py b/test/actions/src/subdir2/make-file.py index fff06531..088a05e0 100755 --- a/test/actions/src/subdir2/make-file.py +++ b/test/actions/src/subdir2/make-file.py @@ -8,4 +8,4 @@ contents = "Hello from make-file.py\n" -open(sys.argv[1], 'wb').write(contents) +open(sys.argv[1], 'w').write(contents) diff --git a/test/additional-targets/src/dir1/emit.py b/test/additional-targets/src/dir1/emit.py index fd313873..96db7a57 100755 --- a/test/additional-targets/src/dir1/emit.py +++ b/test/additional-targets/src/dir1/emit.py @@ -6,6 +6,6 @@ import sys -f = open(sys.argv[1], 'wb') +f = open(sys.argv[1], 'w') f.write('Hello from emit.py\n') f.close() diff --git a/test/analyzer/gyptest-analyzer.py b/test/analyzer/gyptest-analyzer.py index 72de2181..58a1ce6f 100644 --- a/test/analyzer/gyptest-analyzer.py +++ b/test/analyzer/gyptest-analyzer.py @@ -6,6 +6,8 @@ """Tests for analyzer """ +from __future__ import print_function + import json import TestGyp @@ -75,56 +77,56 @@ def EnsureContains(matched=False, compile_targets=set(), test_targets=set()): """Verifies output contains |compile_targets|.""" result = _ReadOutputFileContents() if 'error' in result: - print 'unexpected error', result.get('error') + print('unexpected error', result.get('error')) test.fail_test() if 'invalid_targets' in result: - print 'unexpected invalid_targets', result.get('invalid_targets') + print('unexpected invalid_targets', result.get('invalid_targets')) test.fail_test() actual_compile_targets = set(result['compile_targets']) if actual_compile_targets != compile_targets: - print 'actual compile_targets:', actual_compile_targets, \ - '\nexpected compile_targets:', compile_targets + print('actual compile_targets:', actual_compile_targets, + '\nexpected compile_targets:', compile_targets) test.fail_test() actual_test_targets = set(result['test_targets']) if actual_test_targets != test_targets: - print 'actual test_targets:', actual_test_targets, \ - '\nexpected test_targets:', test_targets + print('actual test_targets:', actual_test_targets, + '\nexpected test_targets:', test_targets) test.fail_test() if matched and result['status'] != found: - print 'expected', found, 'got', result['status'] + print('expected', found, 'got', result['status']) test.fail_test() elif not matched and result['status'] != not_found: - print 'expected', not_found, 'got', result['status'] + print('expected', not_found, 'got', result['status']) test.fail_test() def EnsureMatchedAll(compile_targets, test_targets=set()): result = _ReadOutputFileContents() if 'error' in result: - print 'unexpected error', result.get('error') + print('unexpected error', result.get('error')) test.fail_test() if 'invalid_targets' in result: - print 'unexpected invalid_targets', result.get('invalid_targets') + print('unexpected invalid_targets', result.get('invalid_targets')) test.fail_test() if result['status'] != found_all: - print 'expected', found_all, 'got', result['status'] + print('expected', found_all, 'got', result['status']) test.fail_test() actual_compile_targets = set(result['compile_targets']) if actual_compile_targets != compile_targets: - print ('actual compile_targets:', actual_compile_targets, + print('actual compile_targets:', actual_compile_targets, '\nexpected compile_targets:', compile_targets) test.fail_test() actual_test_targets = set(result['test_targets']) if actual_test_targets != test_targets: - print ('actual test_targets:', actual_test_targets, + print('actual test_targets:', actual_test_targets, '\nexpected test_targets:', test_targets) test.fail_test() @@ -133,15 +135,15 @@ def EnsureError(expected_error_string): """Verifies output contains the error string.""" result = _ReadOutputFileContents() if result.get('error', '').find(expected_error_string) == -1: - print 'actual error:', result.get('error', ''), '\nexpected error:', \ - expected_error_string + print('actual error:', result.get('error', ''), '\nexpected error:', + expected_error_string) test.fail_test() def EnsureStdoutContains(expected_error_string): if test.stdout().find(expected_error_string) == -1: - print 'actual stdout:', test.stdout(), '\nexpected stdout:', \ - expected_error_string + print('actual stdout:', test.stdout(), '\nexpected stdout:', + expected_error_string) test.fail_test() @@ -150,8 +152,8 @@ def EnsureInvalidTargets(expected_invalid_targets): result = _ReadOutputFileContents() actual_invalid_targets = set(result['invalid_targets']) if actual_invalid_targets != expected_invalid_targets: - print 'actual invalid_targets:', actual_invalid_targets, \ - '\nexpected :', expected_invalid_targets + print('actual invalid_targets:', actual_invalid_targets, + '\nexpected :', expected_invalid_targets) test.fail_test() diff --git a/test/arflags/gyptest-arflags.py b/test/arflags/gyptest-arflags.py index a5cbcac7..870a2d89 100644 --- a/test/arflags/gyptest-arflags.py +++ b/test/arflags/gyptest-arflags.py @@ -8,12 +8,14 @@ Verifies that building a target with invalid arflags fails. """ +from __future__ import print_function + import os import sys import TestGyp if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/compiler-override/gyptest-compiler-global-settings.py b/test/compiler-override/gyptest-compiler-global-settings.py index c0fe321c..9f062a4f 100755 --- a/test/compiler-override/gyptest-compiler-global-settings.py +++ b/test/compiler-override/gyptest-compiler-global-settings.py @@ -6,6 +6,7 @@ Verifies that make_global_settings can be used to override the compiler settings. """ +from __future__ import print_function import TestGyp import os @@ -19,7 +20,7 @@ # and make not supported on windows at all. sys.exit(0) -print "This test is currently disabled: https://crbug.com/483696." +print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make']) diff --git a/test/compiler-override/my_cc.py b/test/compiler-override/my_cc.py index e2f0bdd5..09e1d3c5 100755 --- a/test/compiler-override/my_cc.py +++ b/test/compiler-override/my_cc.py @@ -2,5 +2,6 @@ # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function import sys -print sys.argv +print(sys.argv) diff --git a/test/compiler-override/my_cxx.py b/test/compiler-override/my_cxx.py index e2f0bdd5..09e1d3c5 100755 --- a/test/compiler-override/my_cxx.py +++ b/test/compiler-override/my_cxx.py @@ -2,5 +2,6 @@ # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function import sys -print sys.argv +print(sys.argv) diff --git a/test/compiler-override/my_ld.py b/test/compiler-override/my_ld.py index e2f0bdd5..09e1d3c5 100755 --- a/test/compiler-override/my_ld.py +++ b/test/compiler-override/my_ld.py @@ -2,5 +2,6 @@ # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function import sys -print sys.argv +print(sys.argv) diff --git a/test/compiler-override/my_nm.py b/test/compiler-override/my_nm.py index f0f1efcf..2c4e6781 100755 --- a/test/compiler-override/my_nm.py +++ b/test/compiler-override/my_nm.py @@ -2,7 +2,8 @@ # Copyright (c) 2014 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function import sys -print sys.argv +print(sys.argv) with open('RAN_MY_NM', 'w') as f: f.write('RAN_MY_NM') diff --git a/test/compiler-override/my_readelf.py b/test/compiler-override/my_readelf.py index 40e303cd..62666543 100755 --- a/test/compiler-override/my_readelf.py +++ b/test/compiler-override/my_readelf.py @@ -2,7 +2,8 @@ # Copyright (c) 2014 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function import sys -print sys.argv +print(sys.argv) with open('RAN_MY_READELF', 'w') as f: f.write('RAN_MY_READELF') diff --git a/test/configurations/inheritance/gyptest-duplicates.py b/test/configurations/inheritance/gyptest-duplicates.py index 46687b45..f015638b 100755 --- a/test/configurations/inheritance/gyptest-duplicates.py +++ b/test/configurations/inheritance/gyptest-duplicates.py @@ -8,6 +8,8 @@ Verifies that configurations do not duplicate other settings. """ +from __future__ import print_function + import TestGyp test = TestGyp.TestGyp(format='gypd') @@ -27,7 +29,7 @@ '\r', '').replace('\\\\', '/') expect = test.read('duplicates.gypd.golden').replace('\r', '') if not test.match(contents, expect): - print "Unexpected contents of `duplicates.gypd'" + print("Unexpected contents of `duplicates.gypd'") test.diff(expect, contents, 'duplicates.gypd ') test.fail_test() diff --git a/test/configurations/target_platform/gyptest-target_platform.py b/test/configurations/target_platform/gyptest-target_platform.py index ae4e9e5a..1645d6ec 100755 --- a/test/configurations/target_platform/gyptest-target_platform.py +++ b/test/configurations/target_platform/gyptest-target_platform.py @@ -15,7 +15,7 @@ def RunX64(exe, stdout): try: test.run_built_executable(exe, stdout=stdout) - except WindowsError, e: + except WindowsError as e: # Assume the exe is 64-bit if it can't load on 32-bit systems. # Both versions of the error are required because different versions # of python seem to return different errors for invalid exe type. diff --git a/test/copies/gyptest-updir.py b/test/copies/gyptest-updir.py index a34ae707..47a2ca2e 100755 --- a/test/copies/gyptest-updir.py +++ b/test/copies/gyptest-updir.py @@ -9,12 +9,14 @@ yields a make variable. """ +from __future__ import print_function + import sys import TestGyp if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp() diff --git a/test/custom-generator/mygenerator.py b/test/custom-generator/mygenerator.py index 8eb4c2de..5fcac3d7 100644 --- a/test/custom-generator/mygenerator.py +++ b/test/custom-generator/mygenerator.py @@ -9,6 +9,6 @@ generator_default_variables = {} def GenerateOutput(target_list, target_dicts, data, params): - f = open("MyBuildFile", "wb") + f = open("MyBuildFile", "w") f.write("Testing...\n") f.close() diff --git a/test/dependencies/adso/write_args.py b/test/dependencies/adso/write_args.py index cc87cf51..5e388b8f 100755 --- a/test/dependencies/adso/write_args.py +++ b/test/dependencies/adso/write_args.py @@ -6,6 +6,6 @@ import sys -f = open(sys.argv[1], 'wb') +f = open(sys.argv[1], 'w') f.write(' '.join(sys.argv[2:])) f.close() diff --git a/test/determinism/gyptest-solibs.py b/test/determinism/gyptest-solibs.py index de9588db..a9c31257 100644 --- a/test/determinism/gyptest-solibs.py +++ b/test/determinism/gyptest-solibs.py @@ -9,6 +9,8 @@ Tests all_targets, implicit_deps and solibs. """ +from __future__ import print_function + import os import sys import TestGyp @@ -28,7 +30,7 @@ if base1 != contents1: test.fail_test() if base2 != contents2: - print base2 + print(base2) test.fail_test() del os.environ["PYTHONHASHSEED"] diff --git a/test/determinism/rule.py b/test/determinism/rule.py index 310a9819..e18c3145 100644 --- a/test/determinism/rule.py +++ b/test/determinism/rule.py @@ -1,3 +1,8 @@ #!/usr/bin/env python +# Copyright (c) 2017 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. -print 'Hello World' +from __future__ import print_function + +print('Hello World') diff --git a/test/escaping/gyptest-colon.py b/test/escaping/gyptest-colon.py index 61a0e245..f62f8dc6 100644 --- a/test/escaping/gyptest-colon.py +++ b/test/escaping/gyptest-colon.py @@ -9,11 +9,13 @@ (This is important for absolute paths on Windows.) """ +from __future__ import print_function + import os import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/generator-output/actions/subdir2/make-file.py b/test/generator-output/actions/subdir2/make-file.py index fff06531..088a05e0 100755 --- a/test/generator-output/actions/subdir2/make-file.py +++ b/test/generator-output/actions/subdir2/make-file.py @@ -8,4 +8,4 @@ contents = "Hello from make-file.py\n" -open(sys.argv[1], 'wb').write(contents) +open(sys.argv[1], 'w').write(contents) diff --git a/test/generator-output/gyptest-mac-bundle.py b/test/generator-output/gyptest-mac-bundle.py index 8d19eedb..14597d8d 100644 --- a/test/generator-output/gyptest-mac-bundle.py +++ b/test/generator-output/gyptest-mac-bundle.py @@ -8,12 +8,14 @@ Verifies mac bundles work with --generator-output. """ +from __future__ import print_function + import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=[]) diff --git a/test/generator-output/rules/copy-file.py b/test/generator-output/rules/copy-file.py index 938c336a..80c6749f 100755 --- a/test/generator-output/rules/copy-file.py +++ b/test/generator-output/rules/copy-file.py @@ -7,6 +7,6 @@ import sys contents = open(sys.argv[1], 'r').read() -open(sys.argv[2], 'wb').write(contents) +open(sys.argv[2], 'w').write(contents) sys.exit(0) diff --git a/test/hard_dependency/src/emit.py b/test/hard_dependency/src/emit.py index 2df74b79..8ed12f73 100755 --- a/test/hard_dependency/src/emit.py +++ b/test/hard_dependency/src/emit.py @@ -6,6 +6,6 @@ import sys -f = open(sys.argv[1], 'wb') +f = open(sys.argv[1], 'w') f.write('/* Hello World */\n') f.close() diff --git a/test/ios/gyptest-app-ios.py b/test/ios/gyptest-app-ios.py index 853744f9..99f9e865 100755 --- a/test/ios/gyptest-app-ios.py +++ b/test/ios/gyptest-app-ios.py @@ -16,13 +16,13 @@ def CheckFileXMLPropertyList(file): output = subprocess.check_output(['file', file]) if not 'XML 1.0 document text' in output: - print 'File: Expected XML 1.0 document text, got %s' % output + print('File: Expected XML 1.0 document text, got %s' % output) test.fail_test() def CheckFileBinaryPropertyList(file): output = subprocess.check_output(['file', file]) if not 'Apple binary property list' in output: - print 'File: Expected Apple binary property list, got %s' % output + print('File: Expected Apple binary property list, got %s' % output) test.fail_test() if sys.platform == 'darwin': diff --git a/test/ios/gyptest-extension.py b/test/ios/gyptest-extension.py index 95689463..bb239ae5 100755 --- a/test/ios/gyptest-extension.py +++ b/test/ios/gyptest-extension.py @@ -8,6 +8,8 @@ Verifies that ios app extensions are built correctly. """ +from __future__ import print_function + import TestGyp import TestMac import subprocess @@ -15,12 +17,12 @@ def CheckStrip(p, expected): if expected not in subprocess.check_output(['nm','-gU', p]): - print expected + " shouldn't get stripped out." + print(expected + " shouldn't get stripped out.") test.fail_test() def CheckEntrypoint(p, expected): if expected not in subprocess.check_output(['nm', p]): - print expected + "not found." + print(expected + "not found.") test.fail_test() if sys.platform == 'darwin' and TestMac.Xcode.Version()>="0600": diff --git a/test/ios/gyptest-per-config-settings.py b/test/ios/gyptest-per-config-settings.py index f0573dde..c3a22e05 100644 --- a/test/ios/gyptest-per-config-settings.py +++ b/test/ios/gyptest-per-config-settings.py @@ -8,6 +8,8 @@ Verifies that device and simulator bundles are built correctly. """ +from __future__ import print_function + import plistlib import TestGyp import os @@ -17,7 +19,7 @@ import tempfile import TestMac -print "This test is currently disabled: https://crbug.com/483696." +print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) def CheckFileType(file, expected): @@ -25,7 +27,7 @@ def CheckFileType(file, expected): o = proc.communicate()[0].strip() assert not proc.returncode if not expected in o: - print 'File: Expected %s, got %s' % (expected, o) + print('File: Expected %s, got %s' % (expected, o)) test.fail_test() def HasCerts(): @@ -40,7 +42,7 @@ def CheckSignature(file): o = proc.communicate()[0].strip() assert not proc.returncode if "code object is not signed at all" in o: - print 'File %s not properly signed.' % (file) + print('File %s not properly signed.' % (file)) test.fail_test() def CheckEntitlements(file, expected_entitlements): @@ -52,10 +54,10 @@ def CheckEntitlements(file, expected_entitlements): data = temp.read() entitlements = ParseEntitlements(data) if not entitlements: - print 'No valid entitlements found in %s.' % (file) + print('No valid entitlements found in %s.' % (file)) test.fail_test() if entitlements != expected_entitlements: - print 'Unexpected entitlements found in %s.' % (file) + print('Unexpected entitlements found in %s.' % (file)) test.fail_test() def ParseEntitlements(data): @@ -78,17 +80,17 @@ def GetMachineBuild(): def CheckPlistvalue(plist, key, expected): if key not in plist: - print '%s not set in plist' % key + print('%s not set in plist' % key) test.fail_test() return actual = plist[key] if actual != expected: - print 'File: Expected %s, got %s for %s' % (expected, actual, key) + print('File: Expected %s, got %s for %s' % (expected, actual, key)) test.fail_test() def CheckPlistNotSet(plist, key): if key in plist: - print '%s should not be set in plist' % key + print('%s should not be set in plist' % key) test.fail_test() return @@ -114,7 +116,7 @@ def ConvertBinaryPlistToXML(path): xcode_version = TestMac.Xcode.Version() if xcode_version >= '0720': if len(plist) != 23: - print 'plist should have 23 entries, but it has %s' % len(plist) + print('plist should have 23 entries, but it has %s' % len(plist)) test.fail_test() # Values that will hopefully never change. diff --git a/test/ios/gyptest-watch.py b/test/ios/gyptest-watch.py index f5c4601d..39bab49b 100755 --- a/test/ios/gyptest-watch.py +++ b/test/ios/gyptest-watch.py @@ -8,13 +8,15 @@ Verifies that ios watch extensions and apps are built correctly. """ +from __future__ import print_function + import TestGyp import TestMac import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/lib/TestCmd.py b/test/lib/TestCmd.py index 25c01ec9..5f519c69 100644 --- a/test/lib/TestCmd.py +++ b/test/lib/TestCmd.py @@ -218,6 +218,8 @@ # AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. +from __future__ import print_function + __author__ = "Steven Knight " __revision__ = "TestCmd.py 0.37.D001 2010/01/11 16:55:50 knight" __version__ = "0.37" @@ -228,13 +230,14 @@ import re import shutil import stat -import string import sys import tempfile import time import traceback -import types -import UserList +try: + from UserList import UserList +except ImportError: + from collections import UserList __all__ = [ 'diff_re', @@ -254,23 +257,25 @@ __all__.append('simple_diff') def is_List(e): - return type(e) is types.ListType \ - or isinstance(e, UserList.UserList) + return (type(e) is list) or isinstance(e, UserList) try: from UserString import UserString except ImportError: - class UserString: - pass + try: + from collections import UserString + except ImportError: + class UserString: + pass -if hasattr(types, 'UnicodeType'): - def is_String(e): - return type(e) is types.StringType \ - or type(e) is types.UnicodeType \ - or isinstance(e, UserString) -else: - def is_String(e): - return type(e) is types.StringType or isinstance(e, UserString) +try: + # basestring was removed in python3. + basestring +except NameError: + basestring = str + +def is_String(e): + return isinstance(e, basestring) or isinstance(e, UserString) tempfile.template = 'testcmd.' if os.name in ('posix', 'nt'): @@ -286,11 +291,10 @@ def is_String(e): def _clean(): global _Cleanup - cleanlist = filter(None, _Cleanup) + for test in reversed(_Cleanup): + if test: + test.cleanup() del _Cleanup[:] - cleanlist.reverse() - for test in cleanlist: - test.cleanup() if _chain_to_exitfunc: _chain_to_exitfunc() @@ -311,7 +315,7 @@ def _clean(): except NameError: def zip(*lists): result = [] - for i in xrange(min(map(len, lists))): + for i in range(min(map(len, lists))): result.append(tuple(map(lambda l, i=i: l[i], lists))) return result @@ -414,9 +418,9 @@ def match_exact(lines = None, matches = None): """ """ if not is_List(lines): - lines = string.split(lines, "\n") + lines = lines.split("\n") if not is_List(matches): - matches = string.split(matches, "\n") + matches = matches.split("\n") if len(lines) != len(matches): return for i in range(len(lines)): @@ -428,18 +432,18 @@ def match_re(lines = None, res = None): """ """ if not is_List(lines): - lines = string.split(lines, "\n") + lines = lines.split("\n") if not is_List(res): - res = string.split(res, "\n") + res = res.split("\n") if len(lines) != len(res): return for i in range(len(lines)): s = "^" + res[i] + "$" try: expr = re.compile(s) - except re.error, e: + except re.error as e: msg = "Regular expression error in %s: %s" - raise re.error, msg % (repr(s), e[0]) + raise re.error(msg % (repr(s), e[0])) if not expr.search(lines[i]): return return 1 @@ -448,15 +452,15 @@ def match_re_dotall(lines = None, res = None): """ """ if not type(lines) is type(""): - lines = string.join(lines, "\n") + lines = "\n".join(lines) if not type(res) is type(""): - res = string.join(res, "\n") + res = "\n".join(res) s = "^" + res + "$" try: expr = re.compile(s, re.DOTALL) - except re.error, e: + except re.error as e: msg = "Regular expression error in %s: %s" - raise re.error, msg % (repr(s), e[0]) + raise re.error(msg % (repr(s), e[0])) if expr.match(lines): return 1 @@ -510,9 +514,9 @@ def diff_re(a, b, fromfile='', tofile='', s = "^" + aline + "$" try: expr = re.compile(s) - except re.error, e: + except re.error as e: msg = "Regular expression error in %s: %s" - raise re.error, msg % (repr(s), e[0]) + raise re.error(msg % (repr(s), e[0])) if not expr.search(bline): result.append("%sc%s" % (i+1, i+1)) result.append('< ' + repr(a[i])) @@ -537,13 +541,13 @@ def where_is(file, path=None, pathext=None): if path is None: path = os.environ['PATH'] if is_String(path): - path = string.split(path, os.pathsep) + path = path.split(os.pathsep) if pathext is None: pathext = os.environ['PATHEXT'] if is_String(pathext): - pathext = string.split(pathext, os.pathsep) + pathext = pathext.split(os.pathsep) for ext in pathext: - if string.lower(ext) == string.lower(file[-len(ext):]): + if ext.lower() == file[-len(ext):].lower(): pathext = [''] break for dir in path: @@ -560,7 +564,7 @@ def where_is(file, path=None, pathext=None): if path is None: path = os.environ['PATH'] if is_String(path): - path = string.split(path, os.pathsep) + path = path.split(os.pathsep) for dir in path: f = os.path.join(dir, file) if os.path.isfile(f): @@ -568,7 +572,7 @@ def where_is(file, path=None, pathext=None): st = os.stat(f) except OSError: continue - if stat.S_IMODE(st[stat.ST_MODE]) & 0111: + if stat.S_IMODE(st[stat.ST_MODE]) & 0o111: return f return None @@ -656,14 +660,14 @@ class Popen3(popen2.Popen3, popen2.Popen4): universal_newlines = 1 def __init__(self, command, **kw): if kw.get('stderr') == 'STDOUT': - apply(popen2.Popen4.__init__, (self, command, 1)) + popen2.Popen4.__init__(self, command, 1) else: - apply(popen2.Popen3.__init__, (self, command, 1)) + popen2.Popen3.__init__(self, command, 1) self.stdin = self.tochild self.stdout = self.fromchild self.stderr = self.childerr def wait(self, *args, **kw): - resultcode = apply(popen2.Popen3.wait, (self,)+args, kw) + resultcode = popen2.Popen3.wait(self, *args, **kw) if os.WIFEXITED(resultcode): return os.WEXITSTATUS(resultcode) elif os.WIFSIGNALED(resultcode): @@ -681,7 +685,7 @@ def wait(self, *args, **kw): PIPE = subprocess.PIPE -if subprocess.mswindows: +if sys.platform == 'win32': from win32file import ReadFile, WriteFile from win32pipe import PeekNamedPipe import msvcrt @@ -716,7 +720,7 @@ def _close(self, which): getattr(self, which).close() setattr(self, which, None) - if subprocess.mswindows: + if sys.platform == 'win32': def send(self, input): if not self.stdin: return None @@ -726,7 +730,7 @@ def send(self, input): (errCode, written) = WriteFile(x, input) except ValueError: return self._close('stdin') - except (subprocess.pywintypes.error, Exception), why: + except (subprocess.pywintypes.error, Exception) as why: if why[0] in (109, errno.ESHUTDOWN): return self._close('stdin') raise @@ -747,7 +751,7 @@ def _recv(self, which, maxsize): (errCode, read) = ReadFile(x, nAvail, None) except ValueError: return self._close(which) - except (subprocess.pywintypes.error, Exception), why: + except (subprocess.pywintypes.error, Exception) as why: if why[0] in (109, errno.ESHUTDOWN): return self._close(which) raise @@ -766,7 +770,7 @@ def send(self, input): try: written = os.write(self.stdin.fileno(), input) - except OSError, why: + except OSError as why: if why[0] == errno.EPIPE: #broken pipe return self._close('stdin') raise @@ -835,14 +839,6 @@ def send_all(p, data): -try: - object -except NameError: - class object: - pass - - - class TestCmd(object): """Class TestCmd """ @@ -886,7 +882,7 @@ def __init__(self, description = None, #self.diff_function = difflib.unified_diff self._dirlist = [] self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0} - if os.environ.has_key('PRESERVE') and not os.environ['PRESERVE'] is '': + if 'PRESERVE' in os.environ and os.environ['PRESERVE'] is not '': self._preserve['pass_test'] = os.environ['PRESERVE'] self._preserve['fail_test'] = os.environ['PRESERVE'] self._preserve['no_result'] = os.environ['PRESERVE'] @@ -931,9 +927,9 @@ def escape(self, arg): slash = '\\' special = '"$' - arg = string.replace(arg, slash, slash+slash) + arg = arg.replace(slash, slash+slash) for c in special: - arg = string.replace(arg, c, slash+c) + arg = arg.replace(c, slash+c) if re_space.search(arg): arg = '"' + arg + '"' @@ -951,7 +947,7 @@ def escape(self, arg): def canonicalize(self, path): if is_List(path): - path = apply(os.path.join, tuple(path)) + path = os.path.join(*path) if not os.path.isabs(path): path = os.path.join(self.workdir, path) return path @@ -985,7 +981,7 @@ def cleanup(self, condition = None): condition = self.condition if self._preserve[condition]: for dir in self._dirlist: - print "Preserved directory", dir + print("Preserved directory", dir) else: list = self._dirlist[:] list.reverse() @@ -1019,7 +1015,7 @@ def command_args(self, program = None, cmd = list(interpreter) + cmd if arguments: if type(arguments) == type(''): - arguments = string.split(arguments) + arguments = arguments.split() cmd.extend(arguments) return cmd @@ -1032,17 +1028,17 @@ def description_set(self, description): difflib except NameError: def diff(self, a, b, name, *args, **kw): - print self.banner('Expected %s' % name) - print a - print self.banner('Actual %s' % name) - print b + print(self.banner('Expected %s' % name)) + print(a) + print(self.banner('Actual %s' % name)) + print(b) else: def diff(self, a, b, name, *args, **kw): - print self.banner(name) + print(self.banner(name)) args = (a.splitlines(), b.splitlines()) + args - lines = apply(self.diff_function, args, kw) + lines = self.diff_function(*args, **kw) for l in lines: - print l + print(l) def fail_test(self, condition = 1, function = None, skip = 0): """Cause the test to fail. @@ -1119,18 +1115,18 @@ def program_set(self, program): program = os.path.join(self._cwd, program) self.program = program - def read(self, file, mode = 'rb'): + def read(self, file, mode = 'r'): """Reads and returns the contents of the specified file name. The file name may be a list, in which case the elements are concatenated with the os.path.join() method. The file is assumed to be under the temporary working directory unless it is an absolute path name. The I/O mode for the file may be specified; it must begin with an 'r'. The default is - 'rb' (binary read). + 'r' (string read). """ file = self.canonicalize(file) if mode[0] != 'r': - raise ValueError, "mode must begin with 'r'" + raise ValueError("mode must begin with 'r'") with open(file, mode) as f: result = f.read() return result @@ -1158,13 +1154,13 @@ def start(self, program = None, prepended unless it is enclosed in a [list]. """ cmd = self.command_args(program, interpreter, arguments) - cmd_string = string.join(map(self.escape, cmd), ' ') + cmd_string = ' '.join(map(self.escape, cmd)) if self.verbose: sys.stderr.write(cmd_string + "\n") if universal_newlines is None: universal_newlines = self.universal_newlines - # On Windows, if we make stdin a pipe when we plan to send + # On Windows, if we make stdin a pipe when we plan to send # no input, and the test program exits before # Popen calls msvcrt.open_osfhandle, that call will fail. # So don't use a pipe for stdin if we don't need one. @@ -1323,7 +1319,7 @@ def subdir(self, *subdirs): if sub is None: continue if is_List(sub): - sub = apply(os.path.join, tuple(sub)) + sub = os.path.join(*sub) new = os.path.join(self.workdir, sub) try: os.mkdir(new) @@ -1371,7 +1367,7 @@ def tempdir(self, path=None): # letters is pretty much random on win32: drive,rest = os.path.splitdrive(path) if drive: - path = string.upper(drive) + rest + path = drive.upper() + rest # self._dirlist.append(path) @@ -1413,7 +1409,7 @@ def where_is(self, file, path=None, pathext=None): """Find an executable file. """ if is_List(file): - file = apply(os.path.join, tuple(file)) + file = os.path.join(*file) if not os.path.isabs(file): file = where_is(file, path, pathext) return file @@ -1435,7 +1431,7 @@ def workpath(self, *args): the temporary working directory name with the specified arguments using the os.path.join() method. """ - return apply(os.path.join, (self.workdir,) + tuple(args)) + return os.path.join(self.workdir, *args) def readable(self, top, read=1): """Make the specified directory tree readable (read == 1) @@ -1466,27 +1462,27 @@ def do_chmod(fname): # It's a directory and we're trying to turn on read # permission, so it's also pretty easy, just chmod the # directory and then chmod every entry on our walk down the - # tree. Because os.path.walk() is top-down, we'll enable + # tree. Because os.walk() is top-down, we'll enable # read permission on any directories that have it disabled - # before os.path.walk() tries to list their contents. + # before os.walk() tries to list their contents. do_chmod(top) def chmod_entries(arg, dirname, names, do_chmod=do_chmod): for n in names: do_chmod(os.path.join(dirname, n)) - os.path.walk(top, chmod_entries, None) + os.walk(top, chmod_entries, None) else: # It's a directory and we're trying to turn off read # permission, which means we have to chmod the directoreis # in the tree bottom-up, lest disabling read permission from # the top down get in the way of being able to get at lower - # parts of the tree. But os.path.walk() visits things top + # parts of the tree. But os.walk() visits things top # down, so we just use an object to collect a list of all # of the entries in the tree, reverse the list, and then # chmod the reversed (bottom-up) list. col = Collector(top) - os.path.walk(top, col, None) + os.walk(top, col, None) col.entries.reverse() for d in col.entries: do_chmod(d) @@ -1512,18 +1508,18 @@ def do_chmod(fname): def do_chmod(fname): try: st = os.stat(fname) except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0200)) + else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0o200)) else: def do_chmod(fname): try: st = os.stat(fname) except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0200)) + else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0o200)) if os.path.isfile(top): do_chmod(top) else: col = Collector(top) - os.path.walk(top, col, None) + os.walk(top, col, None) for d in col.entries: do_chmod(d) def executable(self, top, execute=1): @@ -1555,42 +1551,42 @@ def do_chmod(fname): # It's a directory and we're trying to turn on execute # permission, so it's also pretty easy, just chmod the # directory and then chmod every entry on our walk down the - # tree. Because os.path.walk() is top-down, we'll enable + # tree. Because os.walk() is top-down, we'll enable # execute permission on any directories that have it disabled - # before os.path.walk() tries to list their contents. + # before os.walk() tries to list their contents. do_chmod(top) def chmod_entries(arg, dirname, names, do_chmod=do_chmod): for n in names: do_chmod(os.path.join(dirname, n)) - os.path.walk(top, chmod_entries, None) + os.walk(top, chmod_entries, None) else: # It's a directory and we're trying to turn off execute # permission, which means we have to chmod the directories # in the tree bottom-up, lest disabling execute permission from # the top down get in the way of being able to get at lower - # parts of the tree. But os.path.walk() visits things top + # parts of the tree. But os.walk() visits things top # down, so we just use an object to collect a list of all # of the entries in the tree, reverse the list, and then # chmod the reversed (bottom-up) list. col = Collector(top) - os.path.walk(top, col, None) + os.walk(top, col, None) col.entries.reverse() for d in col.entries: do_chmod(d) - def write(self, file, content, mode = 'wb'): + def write(self, file, content, mode = 'w'): """Writes the specified content text (second argument) to the specified file name (first argument). The file name may be a list, in which case the elements are concatenated with the os.path.join() method. The file is created under the temporary working directory. Any subdirectories in the path must already exist. The I/O mode for the file may be specified; it must - begin with a 'w'. The default is 'wb' (binary write). + begin with a 'w'. The default is 'w' (binary write). """ file = self.canonicalize(file) if mode[0] != 'w': - raise ValueError, "mode must begin with 'w'" + raise ValueError("mode must begin with 'w'") with open(file, mode) as f: f.write(content) diff --git a/test/lib/TestCommon.py b/test/lib/TestCommon.py index 2f526a6e..6850ce9a 100644 --- a/test/lib/TestCommon.py +++ b/test/lib/TestCommon.py @@ -1,3 +1,7 @@ +# Copyright (c) 2017 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + """ TestCommon.py: a testing framework for commands and scripts with commonly useful error handling @@ -88,6 +92,8 @@ # AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. +from __future__ import print_function + __author__ = "Steven Knight " __revision__ = "TestCommon.py 0.37.D001 2010/01/11 16:55:50 knight" __version__ = "0.37" @@ -96,10 +102,11 @@ import os import os.path import stat -import string import sys -import types -import UserList +try: + from UserList import UserList +except ImportError: + from collections import UserList from TestCmd import * from TestCmd import __all__ @@ -138,7 +145,7 @@ dll_suffix = '.dll' module_prefix = '' module_suffix = '.dll' -elif string.find(sys.platform, 'irix') != -1: +elif sys.platform.find('irix') != -1: exe_suffix = '' obj_suffix = '.o' shobj_suffix = '.o' @@ -149,7 +156,7 @@ dll_suffix = '.so' module_prefix = 'lib' module_prefix = '.so' -elif string.find(sys.platform, 'darwin') != -1: +elif sys.platform.find('darwin') != -1: exe_suffix = '' obj_suffix = '.o' shobj_suffix = '.os' @@ -160,7 +167,7 @@ dll_suffix = '.dylib' module_prefix = '' module_suffix = '.so' -elif string.find(sys.platform, 'sunos') != -1: +elif sys.platform.find('sunos') != -1: exe_suffix = '' obj_suffix = '.o' shobj_suffix = '.os' @@ -184,8 +191,8 @@ module_suffix = '.so' def is_List(e): - return type(e) is types.ListType \ - or isinstance(e, UserList.UserList) + return type(e) is list \ + or isinstance(e, UserList) def is_writable(f): mode = os.stat(f)[stat.ST_MODE] @@ -227,7 +234,7 @@ def __init__(self, **kw): calling the base class initialization, and then changing directory to the workdir. """ - apply(TestCmd.__init__, [self], kw) + TestCmd.__init__(self, **kw) os.chdir(self.workdir) def must_be_writable(self, *files): @@ -237,26 +244,26 @@ def must_be_writable(self, *files): them. Exits FAILED if any of the files does not exist or is not writable. """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) + files = map((lambda x: os.path.join(*x) if is_List(x) else x), files) existing, missing = separate_files(files) - unwritable = filter(lambda x, iw=is_writable: not iw(x), existing) + unwritable = [x for x in existing if not is_writable(x)] if missing: - print "Missing files: `%s'" % string.join(missing, "', `") + print("Missing files: `%s'" % "', `".join(missing)) if unwritable: - print "Unwritable files: `%s'" % string.join(unwritable, "', `") + print("Unwritable files: `%s'" % "', `".join(unwritable)) self.fail_test(missing + unwritable) - def must_contain(self, file, required, mode = 'rb'): + def must_contain(self, file, required, mode = 'r'): """Ensures that the specified file contains the required text. """ file_contents = self.read(file, mode) - contains = (string.find(file_contents, required) != -1) + contains = (file_contents.find(required) != -1) if not contains: - print "File `%s' does not contain required string." % file - print self.banner('Required string ') - print required - print self.banner('%s contents ' % file) - print file_contents + print("File `%s' does not contain required string." % file) + print(self.banner('Required string ')) + print(required) + print(self.banner('%s contents ' % file)) + print(file_contents) self.fail_test(not contains) def must_contain_all_lines(self, output, lines, title=None, find=None): @@ -271,7 +278,7 @@ def must_contain_all_lines(self, output, lines, title=None, find=None): for lines in the output. """ if find is None: - find = lambda o, l: string.find(o, l) != -1 + find = lambda o, l: o.find(l) != -1 missing = [] for line in lines: if not find(output, line): @@ -299,7 +306,7 @@ def must_contain_any_line(self, output, lines, title=None, find=None): for lines in the output. """ if find is None: - find = lambda o, l: string.find(o, l) != -1 + find = lambda o, l: o.find(l) != -1 for line in lines: if find(output, line): return @@ -323,13 +330,13 @@ def must_exist(self, *files): pathname will be constructed by concatenating them. Exits FAILED if any of the files does not exist. """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - missing = filter(lambda x: not os.path.exists(x), files) + files = map((lambda x: os.path.join(*x) if is_List(x) else x), files) + missing = [f for f in files if not os.path.exists(f)] if missing: - print "Missing files: `%s'" % string.join(missing, "', `") + print("Missing files: `%s'" % "', `".join(missing)) self.fail_test(missing) - def must_match(self, file, expect, mode = 'rb'): + def must_match(self, file, expect, mode = 'r'): """Matches the contents of the specified file (first argument) against the expected contents (second argument). The expected contents are a list of lines or a string which will be split @@ -341,21 +348,21 @@ def must_match(self, file, expect, mode = 'rb'): except KeyboardInterrupt: raise except: - print "Unexpected contents of `%s'" % file + print("Unexpected contents of `%s'" % file) self.diff(expect, file_contents, 'contents ') raise - def must_not_contain(self, file, banned, mode = 'rb'): + def must_not_contain(self, file, banned, mode = 'r'): """Ensures that the specified file doesn't contain the banned text. """ file_contents = self.read(file, mode) - contains = (string.find(file_contents, banned) != -1) + contains = (file_contents.find(banned) != -1) if contains: - print "File `%s' contains banned string." % file - print self.banner('Banned string ') - print banned - print self.banner('%s contents ' % file) - print file_contents + print("File `%s' contains banned string." % file) + print(self.banner('Banned string ')) + print(banned) + print(self.banner('%s contents ' % file)) + print(file_contents) self.fail_test(contains) def must_not_contain_any_line(self, output, lines, title=None, find=None): @@ -370,7 +377,7 @@ def must_not_contain_any_line(self, output, lines, title=None, find=None): for lines in the output. """ if find is None: - find = lambda o, l: string.find(o, l) != -1 + find = lambda o, l: o.find(l) != -1 unexpected = [] for line in lines: if find(output, line): @@ -395,10 +402,10 @@ def must_not_exist(self, *files): which case the pathname will be constructed by concatenating them. Exits FAILED if any of the files exists. """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - existing = filter(os.path.exists, files) + files = map((lambda x: os.path.join(*x) if is_List(x) else x), files) + existing = [f for f in files if os.path.exists(f)] if existing: - print "Unexpected files exist: `%s'" % string.join(existing, "', `") + print("Unexpected files exist: `%s'" % "', `".join(existing)) self.fail_test(existing) def must_not_be_writable(self, *files): @@ -408,13 +415,13 @@ def must_not_be_writable(self, *files): them. Exits FAILED if any of the files does not exist or is writable. """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) + files = map((lambda x: os.path.join(*x) if is_List(x) else x), files) existing, missing = separate_files(files) - writable = filter(is_writable, existing) + writable = [x for x in existing if is_writable(x)] if missing: - print "Missing files: `%s'" % string.join(missing, "', `") + print("Missing files: `%s'" % "', `".join(missing)) if writable: - print "Writable files: `%s'" % string.join(writable, "', `") + print("Writable files: `%s'" % "', `".join(writable)) self.fail_test(missing + writable) def _complete(self, actual_stdout, expected_stdout, @@ -427,21 +434,24 @@ def _complete(self, actual_stdout, expected_stdout, expect = '' if status != 0: expect = " (expected %s)" % str(status) - print "%s returned %s%s" % (self.program, str(_status(self)), expect) - print self.banner('STDOUT ') - print actual_stdout - print self.banner('STDERR ') - print actual_stderr + print("%s returned %s%s" % (self.program, str(_status(self)), + expect)) + print(self.banner('STDOUT ')) + print(actual_stdout) + print(self.banner('STDERR ')) + print(actual_stderr) self.fail_test() - if not expected_stdout is None and not match(actual_stdout, expected_stdout): + if not expected_stdout is None and not match(actual_stdout, + expected_stdout): self.diff(expected_stdout, actual_stdout, 'STDOUT ') if actual_stderr: - print self.banner('STDERR ') - print actual_stderr + print(self.banner('STDERR ')) + print(actual_stderr) self.fail_test() - if not expected_stderr is None and not match(actual_stderr, expected_stderr): - print self.banner('STDOUT ') - print actual_stdout + if not expected_stderr is None and not match(actual_stderr, + expected_stderr): + print(self.banner('STDOUT ')) + print(actual_stdout) self.diff(expected_stderr, actual_stderr, 'STDERR ') self.fail_test() @@ -463,20 +473,19 @@ def start(self, program = None, arguments = options + " " + arguments try: - return apply(TestCmd.start, - (self, program, interpreter, arguments, universal_newlines), - kw) + return TestCmd.start(self, program, interpreter, arguments, + universal_newlines, **kw) except KeyboardInterrupt: raise - except Exception, e: - print self.banner('STDOUT ') + except Exception as e: + print(self.banner('STDOUT ')) try: - print self.stdout() + print(self.stdout()) except IndexError: pass - print self.banner('STDERR ') + print(self.banner('STDERR ')) try: - print self.stderr() + print(self.stderr()) except IndexError: pass cmd_args = self.command_args(program, interpreter, arguments) @@ -501,7 +510,7 @@ def finish(self, popen, stdout = None, stderr = '', status = 0, **kw): command. A value of None means don't test exit status. """ - apply(TestCmd.finish, (self, popen,), kw) + TestCmd.finish(self, popen, **kw) match = kw.get('match', self.match) self._complete(self.stdout(), stdout, self.stderr(), stderr, status, match) @@ -539,7 +548,7 @@ def run(self, options = None, arguments = None, arguments = options + " " + arguments kw['arguments'] = arguments match = kw.pop('match', self.match) - apply(TestCmd.run, [self], kw) + TestCmd.run(self, **kw) self._complete(self.stdout(), stdout, self.stderr(), stderr, status, match) diff --git a/test/lib/TestGyp.py b/test/lib/TestGyp.py index 2b88f7bc..bbeca6aa 100644 --- a/test/lib/TestGyp.py +++ b/test/lib/TestGyp.py @@ -5,10 +5,10 @@ """ TestGyp.py: a testing framework for GYP integration tests. """ +from __future__ import print_function -import errno import collections -from contextlib import contextmanager +import errno import itertools import os import re @@ -17,6 +17,8 @@ import sys import tempfile +from contextlib import contextmanager + import TestCmd import TestCommon from TestCommon import __all__ @@ -283,13 +285,13 @@ def report_not_up_to_date(self): that expect exact output from the command (make) can just set stdout= when they call the run_build() method. """ - print "Build is not up-to-date:" - print self.banner('STDOUT ') - print self.stdout() + print("Build is not up-to-date:") + print(self.banner('STDOUT ')) + print(self.stdout()) stderr = self.stderr() if stderr: - print self.banner('STDERR ') - print stderr + print(self.banner('STDERR ')) + print(stderr) def run_gyp(self, gyp_file, *args, **kw): """ @@ -328,7 +330,7 @@ def run(self, *args, **kw): the tool-specific subclasses or clutter the tests themselves with platform-specific code. """ - if kw.has_key('SYMROOT'): + if 'SYMROOT' in kw: del kw['SYMROOT'] super(TestGypBase, self).run(*args, **kw) @@ -558,7 +560,7 @@ def build(self, gyp_file, target=None, **kw): # Makefile.gyp_filename), so use that if there is no Makefile. chdir = kw.get('chdir', '') if not os.path.exists(os.path.join(chdir, 'Makefile')): - print "NO Makefile in " + os.path.join(chdir, 'Makefile') + print("NO Makefile in " + os.path.join(chdir, 'Makefile')) arguments.insert(0, '-f') arguments.insert(1, os.path.splitext(gyp_file)[0] + '.Makefile') kw['arguments'] = arguments @@ -665,7 +667,7 @@ def FindMSBuildInstallation(msvs_version = 'auto'): msbuild_basekey = r'HKLM\SOFTWARE\Microsoft\MSBuild\ToolsVersions' if not registry.KeyExists(msbuild_basekey): - print 'Error: could not find MSBuild base registry entry' + print('Error: could not find MSBuild base registry entry') return None msbuild_version = None @@ -674,9 +676,9 @@ def FindMSBuildInstallation(msvs_version = 'auto'): if registry.KeyExists(msbuild_basekey + '\\' + msbuild_test_version): msbuild_version = msbuild_test_version else: - print ('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" ' - 'but corresponding MSBuild "%s" was not found.' % - (msvs_version, msbuild_version)) + print('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" ' + 'but corresponding MSBuild "%s" was not found.' % + (msvs_version, msbuild_version)) if not msbuild_version: for msvs_version in sorted(msvs_to_msbuild, reverse=True): msbuild_test_version = msvs_to_msbuild[msvs_version] @@ -684,13 +686,13 @@ def FindMSBuildInstallation(msvs_version = 'auto'): msbuild_version = msbuild_test_version break if not msbuild_version: - print 'Error: could not find MSBuild registry entry' + print('Error: could not find MSBuild registry entry') return None msbuild_path = registry.GetValue(msbuild_basekey + '\\' + msbuild_version, 'MSBuildToolsPath') if not msbuild_path: - print 'Error: could not get MSBuild registry entry value' + print('Error: could not get MSBuild registry entry value') return None return os.path.join(msbuild_path, 'MSBuild.exe') @@ -767,8 +769,8 @@ def FindVisualStudioInstallation(): msbuild_path = FindMSBuildInstallation(msvs_version) return build_tool, uses_msbuild, msbuild_path else: - print ('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" ' - 'but corresponding "%s" was not found.' % (msvs_version, path)) + print('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" ' + 'but corresponding "%s" was not found.' % (msvs_version, path)) # Neither GYP_MSVS_VERSION nor the path help us out. Iterate through # the choices looking for a match. for version in sorted(possible_paths, reverse=True): @@ -779,7 +781,7 @@ def FindVisualStudioInstallation(): uses_msbuild = msvs_version >= '2010' msbuild_path = FindMSBuildInstallation(msvs_version) return build_tool, uses_msbuild, msbuild_path - print 'Error: could not find devenv' + print('Error: could not find devenv') sys.exit(1) class TestGypOnMSToolchain(TestGypBase): @@ -944,7 +946,7 @@ def build(self, gyp_file, target=None, rebuild=False, clean=False, **kw): kw['arguments'] = arguments return self.run(program=self.build_tool, **kw) def up_to_date(self, gyp_file, target=None, **kw): - """ + r""" Verifies that a build of the specified Visual Studio target is up to date. Beware that VS2010 will behave strangely if you build under @@ -1251,4 +1253,4 @@ def TestGyp(*args, **kw): for format_class in format_class_list: if format == format_class.format: return format_class(*args, **kw) - raise Exception, "unknown format %r" % format + raise Exception("unknown format %r" % format) diff --git a/test/lib/TestMac.py b/test/lib/TestMac.py index 68605d77..0af49b0e 100644 --- a/test/lib/TestMac.py +++ b/test/lib/TestMac.py @@ -6,6 +6,8 @@ TestMac.py: a collection of helper function shared between test on Mac OS X. """ +from __future__ import print_function + import re import subprocess @@ -15,7 +17,7 @@ def CheckFileType(test, file, archs): """Check that |file| contains exactly |archs| or fails |test|.""" proc = subprocess.Popen(['lipo', '-info', file], stdout=subprocess.PIPE) - o = proc.communicate()[0].strip() + o = proc.communicate()[0].decode('utf-8').strip() assert not proc.returncode if len(archs) == 1: pattern = re.compile('^Non-fat file: (.*) is architecture: (.*)$') @@ -23,13 +25,13 @@ def CheckFileType(test, file, archs): pattern = re.compile('^Architectures in the fat file: (.*) are: (.*)$') match = pattern.match(o) if match is None: - print 'Ouput does not match expected pattern: %s' % (pattern.pattern) + print('Ouput does not match expected pattern: %s' % (pattern.pattern)) test.fail_test() else: found_file, found_archs = match.groups() if found_file != file or set(found_archs.split()) != set(archs): - print 'Expected file %s with arch %s, got %s with arch %s' % ( - file, ' '.join(archs), found_file, found_archs) + print('Expected file %s with arch %s, got %s with arch %s' % ( + file, ' '.join(archs), found_file, found_archs)) test.fail_test() @@ -59,6 +61,7 @@ def SDKBuild(self): if 'SDKBuild' not in self._cache: self._cache['SDKBuild'] = subprocess.check_output( ['xcodebuild', '-version', '-sdk', '', 'ProductBuildVersion']) + self._cache['SDKBuild'] = self._cache['SDKBuild'].decode('utf-8') self._cache['SDKBuild'] = self._cache['SDKBuild'].rstrip('\n') return self._cache['SDKBuild'] diff --git a/test/lib/TestWin.py b/test/lib/TestWin.py index 7627197b..2234253a 100644 --- a/test/lib/TestWin.py +++ b/test/lib/TestWin.py @@ -63,7 +63,7 @@ def Query(self, key, value=None): text = None try: text = self._QueryBase('Sysnative', key, value) - except OSError, e: + except OSError as e: if e.errno == errno.ENOENT: text = self._QueryBase('System32', key, value) else: diff --git a/test/linux/gyptest-implicit-rpath.py b/test/linux/gyptest-implicit-rpath.py index dd7718c7..8e17a3f1 100644 --- a/test/linux/gyptest-implicit-rpath.py +++ b/test/linux/gyptest-implicit-rpath.py @@ -25,7 +25,7 @@ def GetRpaths(p): p = test.built_file_path(p, chdir=CHDIR) r = re.compile(r'Library rpath: \[([^\]]+)\]') proc = subprocess.Popen(['readelf', '-d', p], stdout=subprocess.PIPE) - o = proc.communicate()[0] + o = proc.communicate()[0].decode('utf-8') assert not proc.returncode return r.findall(o) diff --git a/test/linux/gyptest-ldflags-from-environment.py b/test/linux/gyptest-ldflags-from-environment.py index b1a7752a..4aea193e 100644 --- a/test/linux/gyptest-ldflags-from-environment.py +++ b/test/linux/gyptest-ldflags-from-environment.py @@ -32,7 +32,7 @@ def GetDynamicLinker(p): p = test.built_file_path(p, chdir=CHDIR) r = re.compile(r'\[Requesting program interpreter: ([^\]]+)\]') proc = subprocess.Popen(['readelf', '-l', p], stdout=subprocess.PIPE) - o = proc.communicate()[0] + o = proc.communicate()[0].decode('utf-8') assert not proc.returncode return r.search(o).group(1) diff --git a/test/linux/gyptest-target-rpath.py b/test/linux/gyptest-target-rpath.py index 2950a202..f275caae 100644 --- a/test/linux/gyptest-target-rpath.py +++ b/test/linux/gyptest-target-rpath.py @@ -25,7 +25,7 @@ def GetRpaths(p): p = test.built_file_path(p, chdir=CHDIR) r = re.compile(r'Library rpath: \[([^\]]+)\]') proc = subprocess.Popen(['readelf', '-d', p], stdout=subprocess.PIPE) - o = proc.communicate()[0] + o = proc.communicate()[0].decode('utf-8') assert not proc.returncode return r.findall(o) diff --git a/test/linux/ldflags-duplicates/check-ldflags.py b/test/linux/ldflags-duplicates/check-ldflags.py index 0515da95..ef102952 100755 --- a/test/linux/ldflags-duplicates/check-ldflags.py +++ b/test/linux/ldflags-duplicates/check-ldflags.py @@ -8,17 +8,20 @@ Verifies duplicate ldflags are not removed. """ +from __future__ import print_function + import sys def CheckContainsFlags(args, substring): if args.find(substring) is -1: - print 'ERROR: Linker arguments "%s" are missing in "%s"' % (substring, args) + print('ERROR: Linker arguments "%s" are missing in "%s"' % (substring, + args)) return False; return True; if __name__ == '__main__': args = " ".join(sys.argv) - print "args = " +args + print("args = " +args) if not CheckContainsFlags(args, 'lib1.a -Wl,--no-whole-archive') \ or not CheckContainsFlags(args, 'lib2.a -Wl,--no-whole-archive'): sys.exit(1); diff --git a/test/mac/gyptest-app-assets-catalog.py b/test/mac/gyptest-app-assets-catalog.py index ca76b513..7b1c0f67 100755 --- a/test/mac/gyptest-app-assets-catalog.py +++ b/test/mac/gyptest-app-assets-catalog.py @@ -8,6 +8,8 @@ Verifies that app bundles are built correctly. """ +from __future__ import print_function + import TestGyp import TestMac @@ -17,12 +19,12 @@ import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) def ExpectEq(expected, actual): if expected != actual: - print >>sys.stderr, 'Expected "%s", got "%s"' % (expected, actual) + print('Expected "%s", got "%s"' % (expected, actual), file=sys.stderr) test.fail_test() def ls(path): diff --git a/test/mac/gyptest-app-error.py b/test/mac/gyptest-app-error.py index c6fe33f1..df0781d4 100755 --- a/test/mac/gyptest-app-error.py +++ b/test/mac/gyptest-app-error.py @@ -8,13 +8,15 @@ Verifies that invalid strings files cause the build to fail. """ +from __future__ import print_function + import TestCmd import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) expected_error = 'Old-style plist parser: missing semicolon in dictionary' diff --git a/test/mac/gyptest-app.py b/test/mac/gyptest-app.py index be92d01a..16c96403 100755 --- a/test/mac/gyptest-app.py +++ b/test/mac/gyptest-app.py @@ -8,6 +8,8 @@ Verifies that app bundles are built correctly. """ +from __future__ import print_function + import TestGyp import TestMac @@ -18,7 +20,7 @@ if sys.platform in ('darwin', 'win32'): - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) @@ -26,12 +28,12 @@ def CheckFileXMLPropertyList(file): output = subprocess.check_output(['file', file]) # The double space after XML is intentional. if not 'XML document text' in output: - print 'File: Expected XML document text, got %s' % output + print('File: Expected XML document text, got %s' % output) test.fail_test() def ExpectEq(expected, actual): if expected != actual: - print >>sys.stderr, 'Expected "%s", got "%s"' % (expected, actual) + print('Expected "%s", got "%s"' % (expected, actual), file=sys.stderr) test.fail_test() def ls(path): diff --git a/test/mac/gyptest-archs.py b/test/mac/gyptest-archs.py index 106a928e..c56f20c4 100644 --- a/test/mac/gyptest-archs.py +++ b/test/mac/gyptest-archs.py @@ -79,7 +79,8 @@ TestMac.CheckFileType(test, result_file, ['i386', 'x86_64']) # Check that symbol "_x" made it into both versions of the binary: if not all(['D _x' in subprocess.check_output( - ['nm', '-arch', arch, result_file]) for arch in ['i386', 'x86_64']]): + ['nm', '-arch', arch, result_file]).decode('utf-8') + for arch in ['i386', 'x86_64']]): # This can only flakily fail, due to process ordering issues. If this # does fail flakily, then something's broken, it's not the test at fault. test.fail_test() diff --git a/test/mac/gyptest-bundle-resources.py b/test/mac/gyptest-bundle-resources.py index e9eddb70..275cdbaa 100644 --- a/test/mac/gyptest-bundle-resources.py +++ b/test/mac/gyptest-bundle-resources.py @@ -8,6 +8,8 @@ Verifies things related to bundle resources. """ +from __future__ import print_function + import TestGyp import os @@ -15,7 +17,7 @@ import sys if sys.platform in ('darwin'): - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/mac/gyptest-copies.py b/test/mac/gyptest-copies.py index 41464418..838c62dc 100755 --- a/test/mac/gyptest-copies.py +++ b/test/mac/gyptest-copies.py @@ -8,6 +8,8 @@ Verifies that 'copies' with app bundles are handled correctly. """ +from __future__ import print_function + import TestGyp import os @@ -15,7 +17,7 @@ import time if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-depend-on-bundle.py b/test/mac/gyptest-depend-on-bundle.py index 486fbfe2..b8b06d4c 100644 --- a/test/mac/gyptest-depend-on-bundle.py +++ b/test/mac/gyptest-depend-on-bundle.py @@ -8,12 +8,14 @@ Verifies that a dependency on a bundle causes the whole bundle to be built. """ +from __future__ import print_function + import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-framework.py b/test/mac/gyptest-framework.py index 553cc9f6..faf05cf3 100755 --- a/test/mac/gyptest-framework.py +++ b/test/mac/gyptest-framework.py @@ -8,13 +8,15 @@ Verifies that app bundles are built correctly. """ +from __future__ import print_function + import TestGyp import os import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/mac/gyptest-infoplist-process.py b/test/mac/gyptest-infoplist-process.py index 2e516847..24260e1c 100755 --- a/test/mac/gyptest-infoplist-process.py +++ b/test/mac/gyptest-infoplist-process.py @@ -8,12 +8,14 @@ Verifies the Info.plist preprocessor functionality. """ +from __future__ import print_function + import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-installname.py b/test/mac/gyptest-installname.py index 3fc71522..17831aea 100644 --- a/test/mac/gyptest-installname.py +++ b/test/mac/gyptest-installname.py @@ -9,6 +9,8 @@ correctly. """ +from __future__ import print_function + import TestGyp import re @@ -16,7 +18,7 @@ import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-ldflags-passed-to-libtool.py b/test/mac/gyptest-ldflags-passed-to-libtool.py index 48afcd46..e24e305d 100644 --- a/test/mac/gyptest-ldflags-passed-to-libtool.py +++ b/test/mac/gyptest-ldflags-passed-to-libtool.py @@ -8,12 +8,14 @@ Verifies that OTHER_LDFLAGS is passed to libtool. """ +from __future__ import print_function + import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'], diff --git a/test/mac/gyptest-loadable-module.py b/test/mac/gyptest-loadable-module.py index 158a9305..77dde1d6 100755 --- a/test/mac/gyptest-loadable-module.py +++ b/test/mac/gyptest-loadable-module.py @@ -8,6 +8,8 @@ Tests that a loadable_module target is built correctly. """ +from __future__ import print_function + import TestGyp import os @@ -15,7 +17,7 @@ import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-lto.py b/test/mac/gyptest-lto.py index 1bd32e16..d37068f3 100644 --- a/test/mac/gyptest-lto.py +++ b/test/mac/gyptest-lto.py @@ -38,7 +38,7 @@ def ObjPath(srcpath, target): def ObjType(p, t_expected): r = re.compile(r'nsyms\s+(\d+)') - o = subprocess.check_output(['file', p]) + o = subprocess.check_output(['file', p]).decode('utf-8') objtype = 'unknown' if ': Mach-O ' in o: objtype = 'mach-o' diff --git a/test/mac/gyptest-missing-cfbundlesignature.py b/test/mac/gyptest-missing-cfbundlesignature.py index 43cab77b..be664924 100644 --- a/test/mac/gyptest-missing-cfbundlesignature.py +++ b/test/mac/gyptest-missing-cfbundlesignature.py @@ -8,12 +8,14 @@ Verifies that an Info.plist with CFBundleSignature works. """ +from __future__ import print_function + import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-non-strs-flattened-to-env.py b/test/mac/gyptest-non-strs-flattened-to-env.py index b802619b..cb42a939 100644 --- a/test/mac/gyptest-non-strs-flattened-to-env.py +++ b/test/mac/gyptest-non-strs-flattened-to-env.py @@ -9,12 +9,14 @@ environment. """ +from __future__ import print_function + import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-postbuild-defaults.py b/test/mac/gyptest-postbuild-defaults.py index 892a0c4b..0f7d25bd 100644 --- a/test/mac/gyptest-postbuild-defaults.py +++ b/test/mac/gyptest-postbuild-defaults.py @@ -8,12 +8,14 @@ Verifies that a postbuild invoking |defaults| works. """ +from __future__ import print_function + import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-postbuild-fail.py b/test/mac/gyptest-postbuild-fail.py index 9cd5d47d..1a229df6 100755 --- a/test/mac/gyptest-postbuild-fail.py +++ b/test/mac/gyptest-postbuild-fail.py @@ -8,6 +8,8 @@ Verifies that a failing postbuild step lets the build fail. """ +from __future__ import print_function + import TestGyp import sys @@ -37,9 +39,10 @@ job = subprocess.Popen(['xcodebuild', '-version'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, err = job.communicate() + out, _ = job.communicate() + out = out.decode('utf-8') if job.returncode != 0: - print out + print(out) raise Exception('Error %d running xcodebuild' % job.returncode) if out.startswith('Xcode 3.'): test.pass_test() diff --git a/test/mac/gyptest-rebuild.py b/test/mac/gyptest-rebuild.py index e615d067..c7d8cad0 100755 --- a/test/mac/gyptest-rebuild.py +++ b/test/mac/gyptest-rebuild.py @@ -8,12 +8,14 @@ Verifies that app bundles are rebuilt correctly. """ +from __future__ import print_function + import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-rpath.py b/test/mac/gyptest-rpath.py index ef415cd7..a82e2fd5 100644 --- a/test/mac/gyptest-rpath.py +++ b/test/mac/gyptest-rpath.py @@ -27,7 +27,7 @@ def GetRpaths(p): p = test.built_file_path(p, chdir=CHDIR) r = re.compile(r'cmd LC_RPATH.*?path (.*?) \(offset \d+\)', re.DOTALL) proc = subprocess.Popen(['otool', '-l', p], stdout=subprocess.PIPE) - o = proc.communicate()[0] + o = proc.communicate()[0].decode('utf-8') assert not proc.returncode return r.findall(o) diff --git a/test/mac/gyptest-sdkroot.py b/test/mac/gyptest-sdkroot.py index f7d41cd2..f8edbaa5 100644 --- a/test/mac/gyptest-sdkroot.py +++ b/test/mac/gyptest-sdkroot.py @@ -8,6 +8,8 @@ Verifies that setting SDKROOT works. """ +from __future__ import print_function + import TestGyp import os @@ -16,7 +18,7 @@ if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-sourceless-module.py b/test/mac/gyptest-sourceless-module.py index c34bc546..f2801c20 100644 --- a/test/mac/gyptest-sourceless-module.py +++ b/test/mac/gyptest-sourceless-module.py @@ -8,12 +8,14 @@ Verifies that bundles that have no 'sources' (pure resource containers) work. """ +from __future__ import print_function + import TestGyp import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-strip-default.py b/test/mac/gyptest-strip-default.py index f73fa112..b851782f 100644 --- a/test/mac/gyptest-strip-default.py +++ b/test/mac/gyptest-strip-default.py @@ -8,6 +8,8 @@ Verifies that the default STRIP_STYLEs match between different generators. """ +from __future__ import print_function + import TestGyp import re @@ -29,7 +31,7 @@ def OutPath(s): def CheckNsyms(p, o_expected): proc = subprocess.Popen(['nm', '-aU', p], stdout=subprocess.PIPE) - o = proc.communicate()[0] + o = proc.communicate()[0].decode('utf-8') # Filter out mysterious "00 0000 OPT radr://5614542" symbol which # is apparently only printed on the bots (older toolchain?). @@ -40,8 +42,8 @@ def CheckNsyms(p, o_expected): o = re.sub(r'^[a-fA-F0-9]+', 'XXXXXXXX', o, flags=re.MULTILINE) assert not proc.returncode if o != o_expected: - print 'Stripping: Expected symbols """\n%s""", got """\n%s"""' % ( - o_expected, o) + print('Stripping: Expected symbols """\n%s""", got """\n%s"""' % ( + o_expected, o)) test.fail_test() CheckNsyms(OutPath('libsingle_dylib.dylib'), diff --git a/test/mac/gyptest-strip.py b/test/mac/gyptest-strip.py index 7b951425..d4694834 100755 --- a/test/mac/gyptest-strip.py +++ b/test/mac/gyptest-strip.py @@ -8,6 +8,8 @@ Verifies that stripping works. """ +from __future__ import print_function + import TestGyp import TestMac @@ -16,7 +18,7 @@ import sys import time -print "This test is currently disabled: https://crbug.com/483696." +print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) if sys.platform == 'darwin': @@ -36,7 +38,7 @@ def CheckNsyms(p, n_expected): m = r.search(o) n = int(m.group(1)) if n != n_expected: - print 'Stripping: Expected %d symbols, got %d' % (n_expected, n) + print('Stripping: Expected %d symbols, got %d' % (n_expected, n)) test.fail_test() # Starting with Xcode 5.0, clang adds an additional symbols to the compiled diff --git a/test/mac/gyptest-swift-library.py b/test/mac/gyptest-swift-library.py index dde7a623..d3433753 100644 --- a/test/mac/gyptest-swift-library.py +++ b/test/mac/gyptest-swift-library.py @@ -8,6 +8,8 @@ Verifies that a swift framework builds correctly. """ +from __future__ import print_function + import TestGyp import TestMac @@ -16,7 +18,7 @@ import subprocess if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['xcode']) @@ -26,7 +28,7 @@ def CheckHasSymbolName(path, symbol): output = subprocess.check_output(['nm', '-j', path]) idx = output.find(symbol) if idx == -1: - print 'Swift: Could not find symobl: %s' % symbol + print('Swift: Could not find symobl: %s' % symbol) test.fail_test() test_cases = [] diff --git a/test/mac/gyptest-xcode-env-order.py b/test/mac/gyptest-xcode-env-order.py index 6e7ca241..bda19988 100755 --- a/test/mac/gyptest-xcode-env-order.py +++ b/test/mac/gyptest-xcode-env-order.py @@ -8,6 +8,8 @@ Verifies that dependent Xcode settings are processed correctly. """ +from __future__ import print_function + import TestGyp import TestMac @@ -15,7 +17,7 @@ import sys if sys.platform == 'darwin': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) diff --git a/test/mac/gyptest-xcode-gcc.py b/test/mac/gyptest-xcode-gcc.py index dee4bd59..a1d201ae 100644 --- a/test/mac/gyptest-xcode-gcc.py +++ b/test/mac/gyptest-xcode-gcc.py @@ -19,6 +19,7 @@ def IgnoreOutput(string, expected_string): def CompilerVersion(compiler): stdout = subprocess.check_output([compiler, '-v'], stderr=subprocess.STDOUT) + stdout = stdout.decode('utf-8') return stdout.rstrip('\n') def CompilerSupportsWarnAboutInvalidOffsetOfMacro(test): diff --git a/test/mac/gyptest-xcuitest.py b/test/mac/gyptest-xcuitest.py index 4e6067e6..410de297 100755 --- a/test/mac/gyptest-xcuitest.py +++ b/test/mac/gyptest-xcuitest.py @@ -23,7 +23,7 @@ out, err = job.communicate() if job.returncode != 0: raise Exception('Error %d running xcodebuild' % job.returncode) - xcode_version, build_number = out.splitlines() + xcode_version, build_number = out.decode('utf-8').splitlines() # Convert the version string from 'Xcode 5.0' to ['5','0']. xcode_version = xcode_version.split()[-1].split('.') if xcode_version < ['7']: diff --git a/test/make_global_settings/ar/gyptest-make_global_settings_ar.py b/test/make_global_settings/ar/gyptest-make_global_settings_ar.py index e6676de6..aabc5618 100644 --- a/test/make_global_settings/ar/gyptest-make_global_settings_ar.py +++ b/test/make_global_settings/ar/gyptest-make_global_settings_ar.py @@ -69,7 +69,7 @@ def verify_ar_host(test, ar=None, rel_path=False): test_format = ['ninja'] -if sys.platform in ('linux2', 'darwin'): +if sys.platform.startswith('linux') or sys.platform == 'darwin': test_format += ['make'] test = TestGyp.TestGyp(formats=test_format) diff --git a/test/make_global_settings/basics/gyptest-make_global_settings.py b/test/make_global_settings/basics/gyptest-make_global_settings.py index 20710a33..8f488759 100644 --- a/test/make_global_settings/basics/gyptest-make_global_settings.py +++ b/test/make_global_settings/basics/gyptest-make_global_settings.py @@ -8,15 +8,17 @@ Verifies make_global_settings. """ +from __future__ import print_function + import os import sys import TestGyp -print "This test is currently disabled: https://crbug.com/483696." +print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test_format = ['ninja'] -if sys.platform in ('linux2', 'darwin'): +if sys.platform.startswith('linux') or sys.platform == 'darwin': test_format += ['make'] test = TestGyp.TestGyp(formats=test_format) @@ -28,7 +30,7 @@ CC = $(abspath clang) endif """ - if sys.platform == 'linux2': + if sys.platform.startswith('linux'): link_expected = """ LINK ?= $(abspath clang) """ diff --git a/test/make_global_settings/env-wrapper/gyptest-wrapper.py b/test/make_global_settings/env-wrapper/gyptest-wrapper.py index b98e6632..409799e3 100644 --- a/test/make_global_settings/env-wrapper/gyptest-wrapper.py +++ b/test/make_global_settings/env-wrapper/gyptest-wrapper.py @@ -8,11 +8,13 @@ Verifies *_wrapper in environment. """ +from __future__ import print_function + import os import sys import TestGyp -print "This test is currently disabled: https://crbug.com/483696." +print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test_format = ['ninja'] diff --git a/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py b/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py index acc2ce94..542fd631 100644 --- a/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py +++ b/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py @@ -8,6 +8,8 @@ Verifies make_global_settings works with the full toolchain. """ +from __future__ import print_function + import os import sys import TestGyp @@ -17,7 +19,7 @@ # and make not supported on windows at all. sys.exit(0) -print "This test is currently disabled: https://crbug.com/483696." +print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test = TestGyp.TestGyp(formats=['ninja']) diff --git a/test/make_global_settings/full-toolchain/my_nm.py b/test/make_global_settings/full-toolchain/my_nm.py index f0f1efcf..2c4e6781 100755 --- a/test/make_global_settings/full-toolchain/my_nm.py +++ b/test/make_global_settings/full-toolchain/my_nm.py @@ -2,7 +2,8 @@ # Copyright (c) 2014 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function import sys -print sys.argv +print(sys.argv) with open('RAN_MY_NM', 'w') as f: f.write('RAN_MY_NM') diff --git a/test/make_global_settings/full-toolchain/my_readelf.py b/test/make_global_settings/full-toolchain/my_readelf.py index 40e303cd..62666543 100755 --- a/test/make_global_settings/full-toolchain/my_readelf.py +++ b/test/make_global_settings/full-toolchain/my_readelf.py @@ -2,7 +2,8 @@ # Copyright (c) 2014 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function import sys -print sys.argv +print(sys.argv) with open('RAN_MY_READELF', 'w') as f: f.write('RAN_MY_READELF') diff --git a/test/make_global_settings/ld/gyptest-make_global_settings_ld.py b/test/make_global_settings/ld/gyptest-make_global_settings_ld.py index c5a2e964..e5f50fbb 100644 --- a/test/make_global_settings/ld/gyptest-make_global_settings_ld.py +++ b/test/make_global_settings/ld/gyptest-make_global_settings_ld.py @@ -76,7 +76,7 @@ def verify_ld_host(test, ld=None, rel_path=False): test_format = ['ninja'] -if sys.platform in ('linux2', 'darwin'): +if sys.platform.startswith('linux') or sys.platform == 'darwin': test_format += ['make'] test = TestGyp.TestGyp(formats=test_format) diff --git a/test/make_global_settings/wrapper/gyptest-wrapper.py b/test/make_global_settings/wrapper/gyptest-wrapper.py index de9b0d1d..7ef4314b 100644 --- a/test/make_global_settings/wrapper/gyptest-wrapper.py +++ b/test/make_global_settings/wrapper/gyptest-wrapper.py @@ -8,15 +8,17 @@ Verifies *_wrapper in make_global_settings. """ +from __future__ import print_function + import os import sys import TestGyp -print "This test is currently disabled: https://crbug.com/483696." +print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) test_format = ['ninja'] -if sys.platform in ('linux2', 'darwin'): +if sys.platform.startswith('linux') or sys.platform == 'darwin': test_format += ['make'] test = TestGyp.TestGyp(formats=test_format) diff --git a/test/many-actions/gyptest-many-actions-unsorted.py b/test/many-actions/gyptest-many-actions-unsorted.py index 90d3c92e..6927d1c7 100644 --- a/test/many-actions/gyptest-many-actions-unsorted.py +++ b/test/many-actions/gyptest-many-actions-unsorted.py @@ -9,10 +9,12 @@ line length. """ +from __future__ import print_function + import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) import TestGyp diff --git a/test/many-actions/gyptest-many-actions.py b/test/many-actions/gyptest-many-actions.py index f2e719bb..4a525d32 100644 --- a/test/many-actions/gyptest-many-actions.py +++ b/test/many-actions/gyptest-many-actions.py @@ -9,10 +9,12 @@ line length. """ +from __future__ import print_function + import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/msvs/config_attrs/gyptest-config_attrs.py b/test/msvs/config_attrs/gyptest-config_attrs.py index d5603744..29a8022b 100644 --- a/test/msvs/config_attrs/gyptest-config_attrs.py +++ b/test/msvs/config_attrs/gyptest-config_attrs.py @@ -10,13 +10,15 @@ them to set the OutputDirectory. """ +from __future__ import print_function + import TestGyp import os import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/msvs/rules_stdout_stderr/rule_stderr.py b/test/msvs/rules_stdout_stderr/rule_stderr.py index f4860624..2081513e 100644 --- a/test/msvs/rules_stdout_stderr/rule_stderr.py +++ b/test/msvs/rules_stdout_stderr/rule_stderr.py @@ -3,5 +3,6 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function import sys -print >>sys.stderr, "This will go to stderr" +print("This will go to stderr", file=sys.stderr) diff --git a/test/msvs/rules_stdout_stderr/rule_stdout.py b/test/msvs/rules_stdout_stderr/rule_stdout.py index 2b58d2a3..4c073ebc 100644 --- a/test/msvs/rules_stdout_stderr/rule_stdout.py +++ b/test/msvs/rules_stdout_stderr/rule_stdout.py @@ -3,4 +3,5 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -print "This will go to stdout" +from __future__ import print_function +print("This will go to stdout") diff --git a/test/ninja/action-rule-hash/subdir/emit.py b/test/ninja/action-rule-hash/subdir/emit.py index fcb715a9..6b171255 100644 --- a/test/ninja/action-rule-hash/subdir/emit.py +++ b/test/ninja/action-rule-hash/subdir/emit.py @@ -6,7 +6,7 @@ import sys -f = open(sys.argv[1], 'wb') +f = open(sys.argv[1], 'w') f.write('int main() {\n') f.write(' return 0;\n') f.write('}\n') diff --git a/test/ninja/action_dependencies/gyptest-action-dependencies.py b/test/ninja/action_dependencies/gyptest-action-dependencies.py index cb59d7e3..89813bab 100755 --- a/test/ninja/action_dependencies/gyptest-action-dependencies.py +++ b/test/ninja/action_dependencies/gyptest-action-dependencies.py @@ -9,11 +9,13 @@ dependent targets, but not the targets themselves. """ +from __future__ import print_function + import os import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/ninja/action_dependencies/src/emit.py b/test/ninja/action_dependencies/src/emit.py index 2df74b79..8ed12f73 100755 --- a/test/ninja/action_dependencies/src/emit.py +++ b/test/ninja/action_dependencies/src/emit.py @@ -6,6 +6,6 @@ import sys -f = open(sys.argv[1], 'wb') +f = open(sys.argv[1], 'w') f.write('/* Hello World */\n') f.close() diff --git a/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py b/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py index 1b8e812e..fd4470ac 100755 --- a/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py +++ b/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py @@ -9,6 +9,8 @@ solib's public API hasn't changed. """ +from __future__ import print_function + import os import sys import TestCommon @@ -24,7 +26,7 @@ if not os.environ.get('ProgramFiles(x86)'): # TODO(scottmg) - print 'Skipping test on x86, http://crbug.com/365833' + print('Skipping test on x86, http://crbug.com/365833') test.pass_test() test.run_gyp('solibs_avoid_relinking.gyp') diff --git a/test/no-cpp/gyptest-no-cpp.py b/test/no-cpp/gyptest-no-cpp.py index 874bb05d..a5d64512 100644 --- a/test/no-cpp/gyptest-no-cpp.py +++ b/test/no-cpp/gyptest-no-cpp.py @@ -30,7 +30,7 @@ def LinksLibStdCpp(path): proc = subprocess.Popen(['otool', '-L', path], stdout=subprocess.PIPE) else: proc = subprocess.Popen(['ldd', path], stdout=subprocess.PIPE) - output = proc.communicate()[0] + output = proc.communicate()[0].decode('utf-8') assert not proc.returncode return 'libstdc++' in output or 'libc++' in output diff --git a/test/rules-dirname/gyptest-dirname.py b/test/rules-dirname/gyptest-dirname.py index 9b8949b3..da5429cb 100755 --- a/test/rules-dirname/gyptest-dirname.py +++ b/test/rules-dirname/gyptest-dirname.py @@ -8,12 +8,14 @@ Verifies simple rules when using an explicit build target of 'all'. """ +from __future__ import print_function + import TestGyp import os import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/rules-dirname/src/copy-file.py b/test/rules-dirname/src/copy-file.py index 9774ccc9..271a72b6 100755 --- a/test/rules-dirname/src/copy-file.py +++ b/test/rules-dirname/src/copy-file.py @@ -6,6 +6,6 @@ import sys contents = open(sys.argv[1], 'r').read() -open(sys.argv[2], 'wb').write(contents) +open(sys.argv[2], 'w').write(contents) sys.exit(0) diff --git a/test/rules-rebuild/src/make-sources.py b/test/rules-rebuild/src/make-sources.py index 7ec02278..dd9e5285 100755 --- a/test/rules-rebuild/src/make-sources.py +++ b/test/rules-rebuild/src/make-sources.py @@ -10,9 +10,9 @@ (in_file, c_file, h_file) = sys.argv[1:] def write_file(filename, contents): - open(filename, 'wb').write(contents) + open(filename, 'w').write(contents) -write_file(c_file, open(in_file, 'rb').read()) +write_file(c_file, open(in_file, 'r').read()) write_file(h_file, '#define NAME "%s"\n' % in_file) diff --git a/test/rules-variables/gyptest-rules-variables.py b/test/rules-variables/gyptest-rules-variables.py index c1825e0c..16afc22e 100755 --- a/test/rules-variables/gyptest-rules-variables.py +++ b/test/rules-variables/gyptest-rules-variables.py @@ -8,10 +8,12 @@ Verifies rules related variables are expanded. """ +from __future__ import print_function + import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/rules/gyptest-all.py b/test/rules/gyptest-all.py index e6e637eb..0520c2f6 100755 --- a/test/rules/gyptest-all.py +++ b/test/rules/gyptest-all.py @@ -8,10 +8,12 @@ Verifies simple rules when using an explicit build target of 'all'. """ +from __future__ import print_function + import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/rules/gyptest-default.py b/test/rules/gyptest-default.py index 65b79dac..5d010941 100755 --- a/test/rules/gyptest-default.py +++ b/test/rules/gyptest-default.py @@ -8,10 +8,12 @@ Verifies simple rules when using an explicit build target of 'all'. """ +from __future__ import print_function + import sys if sys.platform == 'win32': - print "This test is currently disabled: https://crbug.com/483696." + print("This test is currently disabled: https://crbug.com/483696.") sys.exit(0) diff --git a/test/rules/src/copy-file.py b/test/rules/src/copy-file.py index 5a5feae1..7bdfbfd4 100755 --- a/test/rules/src/copy-file.py +++ b/test/rules/src/copy-file.py @@ -6,6 +6,6 @@ import sys contents = open(sys.argv[1], 'r').read() -open(sys.argv[2], 'wb').write(contents) +open(sys.argv[2], 'w').write(contents) sys.exit(0) diff --git a/test/small/gyptest-small.py b/test/small/gyptest-small.py index e7562cb7..e6cb25f3 100755 --- a/test/small/gyptest-small.py +++ b/test/small/gyptest-small.py @@ -10,6 +10,7 @@ import imp import os +import platform import sys import unittest diff --git a/test/standalone/gyptest-standalone.py b/test/standalone/gyptest-standalone.py index 87143706..0581d538 100644 --- a/test/standalone/gyptest-standalone.py +++ b/test/standalone/gyptest-standalone.py @@ -9,6 +9,8 @@ option can be built even when it's relocated to a different path. """ +from __future__ import print_function + import TestGyp import os @@ -24,9 +26,9 @@ if os.path.splitext(__file__)[0] in file: continue file = os.path.join(root, file) - contents = open(file).read() + contents = open(file, 'rb').read().decode('utf-8', 'ignore') if 'standalone.gyp' in contents: - print 'gyp file referenced in generated output: %s' % file + print('gyp file referenced in generated output: %s' % file) test.fail_test() diff --git a/test/symlinks/gyptest-symlinks.py b/test/symlinks/gyptest-symlinks.py index f0c2d515..278818a9 100755 --- a/test/symlinks/gyptest-symlinks.py +++ b/test/symlinks/gyptest-symlinks.py @@ -28,8 +28,8 @@ # symlinked back and processed. Note that we don't ask gyp to touch the # original files at all; they are only there as source material for the copy. # That's why hello.gyp references symlink_hello.c instead of hello.c. - with tempfile.NamedTemporaryFile() as gyp_file: - with tempfile.NamedTemporaryFile() as c_file: + with tempfile.NamedTemporaryFile(mode='w+') as gyp_file: + with tempfile.NamedTemporaryFile(mode='w+') as c_file: with open('hello.gyp') as orig_gyp_file: gyp_file.write(orig_gyp_file.read()) gyp_file.flush() diff --git a/test/variables/commands/commands-repeated.gyp b/test/variables/commands/commands-repeated.gyp index 822ae4f0..1f52e759 100644 --- a/test/variables/commands/commands-repeated.gyp +++ b/test/variables/commands/commands-repeated.gyp @@ -8,7 +8,7 @@ { 'variables': { - 'pi': 'import math; print math.pi', + 'pi': 'import math; print(math.pi)', 'third_letters': "<(other_letters)HIJK", 'letters_list': 'ABCD', 'other_letters': '<(letters_list)EFG', @@ -41,9 +41,9 @@ 'type': 'none', 'variables': { 'var1': '>f, 'options', options -print >>f, 'args', args +print('options', options, file=f) +print('args', args, file=f) f.close() diff --git a/test/win/vs-macros/test_exists.py b/test/win/vs-macros/test_exists.py index f5c90ad6..297b1b7d 100644 --- a/test/win/vs-macros/test_exists.py +++ b/test/win/vs-macros/test_exists.py @@ -6,5 +6,5 @@ import sys if not os.path.exists(sys.argv[1]): - raise + raise Exception() open(sys.argv[2], 'w').close() diff --git a/tools/graphviz.py b/tools/graphviz.py index 326ae221..538b059d 100755 --- a/tools/graphviz.py +++ b/tools/graphviz.py @@ -8,6 +8,8 @@ generate input suitable for graphviz to render a dependency graph of targets.""" +from __future__ import print_function + import collections import json import sys @@ -50,9 +52,9 @@ def WriteGraph(edges): build_file, target_name, toolset = ParseTarget(src) files[build_file].append(src) - print 'digraph D {' - print ' fontsize=8' # Used by subgraphs. - print ' node [fontsize=8]' + print('digraph D {') + print(' fontsize=8') # Used by subgraphs. + print(' node [fontsize=8]') # Output nodes by file. We must first write out each node within # its file grouping before writing out any edges that may refer @@ -63,31 +65,31 @@ def WriteGraph(edges): # the display by making it a box without an internal node. target = targets[0] build_file, target_name, toolset = ParseTarget(target) - print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, - target_name) + print(' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, + target_name)) else: # Group multiple nodes together in a subgraph. - print ' subgraph "cluster_%s" {' % filename - print ' label = "%s"' % filename + print(' subgraph "cluster_%s" {' % filename) + print(' label = "%s"' % filename) for target in targets: build_file, target_name, toolset = ParseTarget(target) - print ' "%s" [label="%s"]' % (target, target_name) - print ' }' + print(' "%s" [label="%s"]' % (target, target_name)) + print(' }') # Now that we've placed all the nodes within subgraphs, output all # the edges between nodes. for src, dsts in edges.items(): for dst in dsts: - print ' "%s" -> "%s"' % (src, dst) + print(' "%s" -> "%s"' % (src, dst)) - print '}' + print('}') def main(): if len(sys.argv) < 2: - print >>sys.stderr, __doc__ - print >>sys.stderr - print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) + print(__doc__, file=sys.stderr) + print(file=sys.stderr) + print('usage: %s target1 target2...' % (sys.argv[0]), file=sys.stderr) return 1 edges = LoadEdges('dump.json', sys.argv[1:]) diff --git a/tools/pretty_gyp.py b/tools/pretty_gyp.py index d5736bbd..5060d1d9 100755 --- a/tools/pretty_gyp.py +++ b/tools/pretty_gyp.py @@ -6,6 +6,8 @@ """Pretty-prints the contents of a GYP file.""" +from __future__ import print_function + import sys import re @@ -125,15 +127,15 @@ def prettyprint_input(lines): (brace_diff, after) = count_braces(line) if brace_diff != 0: if after: - print " " * (basic_offset * indent) + line + print(" " * (basic_offset * indent) + line) indent += brace_diff else: indent += brace_diff - print " " * (basic_offset * indent) + line + print(" " * (basic_offset * indent) + line) else: - print " " * (basic_offset * indent) + line + print(" " * (basic_offset * indent) + line) else: - print "" + print("") last_line = line diff --git a/tools/pretty_sln.py b/tools/pretty_sln.py index ca8cf4ad..12a6dadd 100755 --- a/tools/pretty_sln.py +++ b/tools/pretty_sln.py @@ -12,6 +12,8 @@ Then it outputs a possible build order. """ +from __future__ import print_function + __author__ = 'nsylvain (Nicolas Sylvain)' import os @@ -26,7 +28,7 @@ def BuildProject(project, built, projects, deps): for dep in deps[project]: if dep not in built: BuildProject(dep, built, projects, deps) - print project + print(project) built.append(project) def ParseSolution(solution_file): @@ -100,44 +102,44 @@ def ParseSolution(solution_file): return (projects, dependencies) def PrintDependencies(projects, deps): - print "---------------------------------------" - print "Dependencies for all projects" - print "---------------------------------------" - print "-- --" + print("---------------------------------------") + print("Dependencies for all projects") + print("---------------------------------------") + print("-- --") for (project, dep_list) in sorted(deps.items()): - print "Project : %s" % project - print "Path : %s" % projects[project][0] + print("Project : %s" % project) + print("Path : %s" % projects[project][0]) if dep_list: for dep in dep_list: - print " - %s" % dep - print "" + print(" - %s" % dep) + print("") - print "-- --" + print("-- --") def PrintBuildOrder(projects, deps): - print "---------------------------------------" - print "Build order " - print "---------------------------------------" - print "-- --" + print("---------------------------------------") + print("Build order ") + print("---------------------------------------") + print("-- --") built = [] for (project, _) in sorted(deps.items()): if project not in built: BuildProject(project, built, projects, deps) - print "-- --" + print("-- --") def PrintVCProj(projects): for project in projects: - print "-------------------------------------" - print "-------------------------------------" - print project - print project - print project - print "-------------------------------------" - print "-------------------------------------" + print("-------------------------------------") + print("-------------------------------------") + print(project) + print(project) + print(project) + print("-------------------------------------") + print("-------------------------------------") project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), projects[project][2])) @@ -153,7 +155,7 @@ def PrintVCProj(projects): def main(): # check if we have exactly 1 parameter. if len(sys.argv) < 2: - print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] + print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]) return 1 (projects, deps) = ParseSolution(sys.argv[1]) diff --git a/tools/pretty_vcproj.py b/tools/pretty_vcproj.py index 6099bd7c..f02e59ea 100755 --- a/tools/pretty_vcproj.py +++ b/tools/pretty_vcproj.py @@ -12,6 +12,8 @@ It outputs the resulting xml to stdout. """ +from __future__ import print_function + __author__ = 'nsylvain (Nicolas Sylvain)' import os @@ -73,23 +75,23 @@ def PrettyPrintNode(node, indent=0): # Print the main tag if attr_count == 0: - print '%s<%s>' % (' '*indent, node.nodeName) + print('%s<%s>' % (' '*indent, node.nodeName)) else: - print '%s<%s' % (' '*indent, node.nodeName) + print('%s<%s' % (' '*indent, node.nodeName)) all_attributes = [] for (name, value) in node.attributes.items(): all_attributes.append((name, value)) - all_attributes.sort(CmpTuple()) + all_attributes.sort(key=(lambda attr: attr[0])) for (name, value) in all_attributes: - print '%s %s="%s"' % (' '*indent, name, value) - print '%s>' % (' '*indent) + print('%s %s="%s"' % (' '*indent, name, value)) + print('%s>' % (' '*indent)) if node.nodeValue: - print '%s %s' % (' '*indent, node.nodeValue) + print('%s %s' % (' '*indent, node.nodeValue)) for sub_node in node.childNodes: PrettyPrintNode(sub_node, indent=indent+2) - print '%s' % (' '*indent, node.nodeName) + print('%s' % (' '*indent, node.nodeName)) def FlattenFilter(node): @@ -283,8 +285,8 @@ def main(argv): # check if we have exactly 1 parameter. if len(argv) < 2: - print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' - '[key2=value2]' % argv[0]) + print('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' + '[key2=value2]' % argv[0]) return 1 # Parse the keys