Reimport
This commit is contained in:
31
tools/adb/systrace/catapult/common/py_utils/PRESUBMIT.py
Normal file
31
tools/adb/systrace/catapult/common/py_utils/PRESUBMIT.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
def CheckChangeOnUpload(input_api, output_api):
|
||||
return _CommonChecks(input_api, output_api)
|
||||
|
||||
|
||||
def CheckChangeOnCommit(input_api, output_api):
|
||||
return _CommonChecks(input_api, output_api)
|
||||
|
||||
|
||||
def _CommonChecks(input_api, output_api):
|
||||
results = []
|
||||
results += input_api.RunTests(input_api.canned_checks.GetPylint(
|
||||
input_api, output_api, extra_paths_list=_GetPathsToPrepend(input_api),
|
||||
pylintrc='../../pylintrc'))
|
||||
return results
|
||||
|
||||
|
||||
def _GetPathsToPrepend(input_api):
|
||||
project_dir = input_api.PresubmitLocalPath()
|
||||
catapult_dir = input_api.os_path.join(project_dir, '..', '..')
|
||||
return [
|
||||
project_dir,
|
||||
input_api.os_path.join(catapult_dir, 'dependency_manager'),
|
||||
input_api.os_path.join(catapult_dir, 'devil'),
|
||||
input_api.os_path.join(catapult_dir, 'third_party', 'mock'),
|
||||
input_api.os_path.join(catapult_dir, 'third_party', 'pyfakefs'),
|
||||
]
|
||||
38
tools/adb/systrace/catapult/common/py_utils/bin/run_tests
Normal file
38
tools/adb/systrace/catapult/common/py_utils/bin/run_tests
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
_CATAPULT_PATH = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
|
||||
_PY_UTILS_PATH = os.path.abspath(
|
||||
os.path.join(_CATAPULT_PATH, 'common', 'py_utils'))
|
||||
|
||||
|
||||
def _RunTestsOrDie(top_level_dir):
|
||||
exit_code = run_with_typ.Run(top_level_dir, path=[_PY_UTILS_PATH])
|
||||
if exit_code:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
def _AddToPathIfNeeded(path):
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
_AddToPathIfNeeded(_CATAPULT_PATH)
|
||||
|
||||
from hooks import install
|
||||
if '--no-install-hooks' in sys.argv:
|
||||
sys.argv.remove('--no-install-hooks')
|
||||
else:
|
||||
install.InstallHooks()
|
||||
|
||||
from catapult_build import run_with_typ
|
||||
_RunTestsOrDie(_PY_UTILS_PATH)
|
||||
sys.exit(0)
|
||||
158
tools/adb/systrace/catapult/common/py_utils/py_utils/__init__.py
Normal file
158
tools/adb/systrace/catapult/common/py_utils/py_utils/__init__.py
Normal file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import platform
|
||||
|
||||
|
||||
def GetCatapultDir():
|
||||
return os.path.normpath(
|
||||
os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
|
||||
|
||||
def IsRunningOnCrosDevice():
|
||||
"""Returns True if we're on a ChromeOS device."""
|
||||
lsb_release = '/etc/lsb-release'
|
||||
if sys.platform.startswith('linux') and os.path.exists(lsb_release):
|
||||
with open(lsb_release, 'r') as f:
|
||||
res = f.read()
|
||||
if res.count('CHROMEOS_RELEASE_NAME'):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def GetHostOsName():
|
||||
if IsRunningOnCrosDevice():
|
||||
return 'chromeos'
|
||||
elif sys.platform.startswith('linux'):
|
||||
return 'linux'
|
||||
elif sys.platform == 'darwin':
|
||||
return 'mac'
|
||||
elif sys.platform == 'win32':
|
||||
return 'win'
|
||||
|
||||
|
||||
def GetHostArchName():
|
||||
return platform.machine()
|
||||
|
||||
|
||||
def _ExecutableExtensions():
|
||||
# pathext is, e.g. '.com;.exe;.bat;.cmd'
|
||||
exts = os.getenv('PATHEXT').split(';') #e.g. ['.com','.exe','.bat','.cmd']
|
||||
return [x[1:].upper() for x in exts] #e.g. ['COM','EXE','BAT','CMD']
|
||||
|
||||
|
||||
def IsExecutable(path):
|
||||
if os.path.isfile(path):
|
||||
if hasattr(os, 'name') and os.name == 'nt':
|
||||
return path.split('.')[-1].upper() in _ExecutableExtensions()
|
||||
else:
|
||||
return os.access(path, os.X_OK)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _AddDirToPythonPath(*path_parts):
|
||||
path = os.path.abspath(os.path.join(*path_parts))
|
||||
if os.path.isdir(path) and path not in sys.path:
|
||||
# Some callsite that use telemetry assumes that sys.path[0] is the directory
|
||||
# containing the script, so we add these extra paths to right after it.
|
||||
sys.path.insert(1, path)
|
||||
|
||||
_AddDirToPythonPath(os.path.join(GetCatapultDir(), 'devil'))
|
||||
_AddDirToPythonPath(os.path.join(GetCatapultDir(), 'dependency_manager'))
|
||||
_AddDirToPythonPath(os.path.join(GetCatapultDir(), 'third_party', 'mock'))
|
||||
# mox3 is needed for pyfakefs usage, but not for pylint.
|
||||
_AddDirToPythonPath(os.path.join(GetCatapultDir(), 'third_party', 'mox3'))
|
||||
_AddDirToPythonPath(
|
||||
os.path.join(GetCatapultDir(), 'third_party', 'pyfakefs'))
|
||||
|
||||
from devil.utils import timeout_retry # pylint: disable=wrong-import-position
|
||||
from devil.utils import reraiser_thread # pylint: disable=wrong-import-position
|
||||
|
||||
|
||||
# Decorator that adds timeout functionality to a function.
|
||||
def Timeout(default_timeout):
|
||||
return lambda func: TimeoutDeco(func, default_timeout)
|
||||
|
||||
# Note: Even though the "timeout" keyword argument is the only
|
||||
# keyword argument that will need to be given to the decorated function,
|
||||
# we still have to use the **kwargs syntax, because we have to use
|
||||
# the *args syntax here before (since the decorator decorates functions
|
||||
# with different numbers of positional arguments) and Python doesn't allow
|
||||
# a single named keyword argument after *args.
|
||||
# (e.g., 'def foo(*args, bar=42):' is a syntax error)
|
||||
|
||||
def TimeoutDeco(func, default_timeout):
|
||||
@functools.wraps(func)
|
||||
def RunWithTimeout(*args, **kwargs):
|
||||
if 'timeout' in kwargs:
|
||||
timeout = kwargs['timeout']
|
||||
else:
|
||||
timeout = default_timeout
|
||||
try:
|
||||
return timeout_retry.Run(func, timeout, 0, args=args)
|
||||
except reraiser_thread.TimeoutError:
|
||||
print('%s timed out.' % func.__name__)
|
||||
return False
|
||||
return RunWithTimeout
|
||||
|
||||
|
||||
MIN_POLL_INTERVAL_IN_SECONDS = 0.1
|
||||
MAX_POLL_INTERVAL_IN_SECONDS = 5
|
||||
OUTPUT_INTERVAL_IN_SECONDS = 300
|
||||
|
||||
def WaitFor(condition, timeout):
|
||||
"""Waits for up to |timeout| secs for the function |condition| to return True.
|
||||
|
||||
Polling frequency is (elapsed_time / 10), with a min of .1s and max of 5s.
|
||||
|
||||
Returns:
|
||||
Result of |condition| function (if present).
|
||||
"""
|
||||
def GetConditionString():
|
||||
if condition.__name__ == '<lambda>':
|
||||
try:
|
||||
return inspect.getsource(condition).strip()
|
||||
except IOError:
|
||||
pass
|
||||
return condition.__name__
|
||||
|
||||
# Do an initial check to see if its true.
|
||||
res = condition()
|
||||
if res:
|
||||
return res
|
||||
start_time = time.time()
|
||||
last_output_time = start_time
|
||||
elapsed_time = time.time() - start_time
|
||||
while elapsed_time < timeout:
|
||||
res = condition()
|
||||
if res:
|
||||
return res
|
||||
now = time.time()
|
||||
elapsed_time = now - start_time
|
||||
last_output_elapsed_time = now - last_output_time
|
||||
if last_output_elapsed_time > OUTPUT_INTERVAL_IN_SECONDS:
|
||||
last_output_time = time.time()
|
||||
poll_interval = min(max(elapsed_time / 10., MIN_POLL_INTERVAL_IN_SECONDS),
|
||||
MAX_POLL_INTERVAL_IN_SECONDS)
|
||||
time.sleep(poll_interval)
|
||||
raise TimeoutException('Timed out while waiting %ds for %s.' %
|
||||
(timeout, GetConditionString()))
|
||||
|
||||
class TimeoutException(Exception):
|
||||
"""The operation failed to complete because of a timeout.
|
||||
|
||||
It is possible that waiting for a longer period of time would result in a
|
||||
successful operation.
|
||||
"""
|
||||
pass
|
||||
@@ -0,0 +1,21 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import atexit
|
||||
import logging
|
||||
|
||||
|
||||
def _WrapFunction(function):
|
||||
def _WrappedFn(*args, **kwargs):
|
||||
logging.debug('Try running %s', repr(function))
|
||||
try:
|
||||
function(*args, **kwargs)
|
||||
logging.debug('Did run %s', repr(function))
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logging.exception('Exception running %s', repr(function))
|
||||
return _WrappedFn
|
||||
|
||||
|
||||
def Register(function, *args, **kwargs):
|
||||
atexit.register(_WrapFunction(function), *args, **kwargs)
|
||||
@@ -0,0 +1,61 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import logging
|
||||
|
||||
import dependency_manager
|
||||
|
||||
|
||||
class BinaryManager(object):
|
||||
""" This class is effectively a subclass of dependency_manager, but uses a
|
||||
different number of arguments for FetchPath and LocalPath.
|
||||
"""
|
||||
|
||||
def __init__(self, config_files):
|
||||
if not config_files or not isinstance(config_files, list):
|
||||
raise ValueError(
|
||||
'Must supply a list of config files to the BinaryManager')
|
||||
configs = [dependency_manager.BaseConfig(config) for config in config_files]
|
||||
self._dependency_manager = dependency_manager.DependencyManager(configs)
|
||||
|
||||
def FetchPathWithVersion(self, binary_name, os_name, arch, os_version=None):
|
||||
""" Return a path to the executable for <binary_name>, or None if not found.
|
||||
|
||||
Will attempt to download from cloud storage if needed.
|
||||
"""
|
||||
return self._WrapDependencyManagerFunction(
|
||||
self._dependency_manager.FetchPathWithVersion, binary_name, os_name,
|
||||
arch, os_version)
|
||||
|
||||
def FetchPath(self, binary_name, os_name, arch, os_version=None):
|
||||
""" Return a path to the executable for <binary_name>, or None if not found.
|
||||
|
||||
Will attempt to download from cloud storage if needed.
|
||||
"""
|
||||
return self._WrapDependencyManagerFunction(
|
||||
self._dependency_manager.FetchPath, binary_name, os_name, arch,
|
||||
os_version)
|
||||
|
||||
def LocalPath(self, binary_name, os_name, arch, os_version=None):
|
||||
""" Return a local path to the given binary name, or None if not found.
|
||||
|
||||
Will not download from cloud_storage.
|
||||
"""
|
||||
return self._WrapDependencyManagerFunction(
|
||||
self._dependency_manager.LocalPath, binary_name, os_name, arch,
|
||||
os_version)
|
||||
|
||||
def _WrapDependencyManagerFunction(
|
||||
self, function, binary_name, os_name, arch, os_version):
|
||||
platform = '%s_%s' % (os_name, arch)
|
||||
if os_version:
|
||||
try:
|
||||
versioned_platform = '%s_%s_%s' % (os_name, os_version, arch)
|
||||
return function(binary_name, versioned_platform)
|
||||
except dependency_manager.NoPathFoundError:
|
||||
logging.warning(
|
||||
'Cannot find path for %s on platform %s. Falling back to %s.',
|
||||
binary_name, versioned_platform, platform)
|
||||
return function(binary_name, platform)
|
||||
|
||||
@@ -0,0 +1,214 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from pyfakefs import fake_filesystem_unittest
|
||||
from dependency_manager import exceptions
|
||||
|
||||
from py_utils import binary_manager
|
||||
|
||||
class BinaryManagerTest(fake_filesystem_unittest.TestCase):
|
||||
# TODO(aiolos): disable cloud storage use during this test.
|
||||
|
||||
def setUp(self):
|
||||
self.setUpPyfakefs()
|
||||
# pylint: disable=bad-continuation
|
||||
self.expected_dependencies = {
|
||||
'dep_1': {
|
||||
'cloud_storage_base_folder': 'dependencies/fake_config',
|
||||
'cloud_storage_bucket': 'chrome-tel',
|
||||
'file_info': {
|
||||
'linux_x86_64': {
|
||||
'cloud_storage_hash': '661ce936b3276f7ec3d687ab62be05b96d796f21',
|
||||
'download_path': 'bin/linux/x86_64/dep_1'
|
||||
},
|
||||
'mac_x86_64': {
|
||||
'cloud_storage_hash': 'c7b1bfc6399dc683058e88dac1ef0f877edea74b',
|
||||
'download_path': 'bin/mac/x86_64/dep_1'
|
||||
},
|
||||
'win_AMD64': {
|
||||
'cloud_storage_hash': 'ac4fee89a51662b9d920bce443c19b9b2929b198',
|
||||
'download_path': 'bin/win/AMD64/dep_1.exe'
|
||||
},
|
||||
'win_x86': {
|
||||
'cloud_storage_hash': 'e246e183553ea26967d7b323ea269e3357b9c837',
|
||||
'download_path': 'bin/win/x86/dep_1.exe'
|
||||
}
|
||||
}
|
||||
},
|
||||
'dep_2': {
|
||||
'cloud_storage_base_folder': 'dependencies/fake_config',
|
||||
'cloud_storage_bucket': 'chrome-tel',
|
||||
'file_info': {
|
||||
'linux_x86_64': {
|
||||
'cloud_storage_hash': '13a57efae9a680ac0f160b3567e02e81f4ac493c',
|
||||
'download_path': 'bin/linux/x86_64/dep_2',
|
||||
'local_paths': [
|
||||
'../../example/location/linux/dep_2',
|
||||
'../../example/location2/linux/dep_2'
|
||||
]
|
||||
},
|
||||
'mac_x86_64': {
|
||||
'cloud_storage_hash': 'd10c0ddaa8586b20449e951216bee852fa0f8850',
|
||||
'download_path': 'bin/mac/x86_64/dep_2',
|
||||
'local_paths': [
|
||||
'../../example/location/mac/dep_2',
|
||||
'../../example/location2/mac/dep_2'
|
||||
]
|
||||
},
|
||||
'win_AMD64': {
|
||||
'cloud_storage_hash': 'fd5b417f78c7f7d9192a98967058709ded1d399d',
|
||||
'download_path': 'bin/win/AMD64/dep_2.exe',
|
||||
'local_paths': [
|
||||
'../../example/location/win64/dep_2',
|
||||
'../../example/location2/win64/dep_2'
|
||||
]
|
||||
},
|
||||
'win_x86': {
|
||||
'cloud_storage_hash': 'cf5c8fe920378ce30d057e76591d57f63fd31c1a',
|
||||
'download_path': 'bin/win/x86/dep_2.exe',
|
||||
'local_paths': [
|
||||
'../../example/location/win32/dep_2',
|
||||
'../../example/location2/win32/dep_2'
|
||||
]
|
||||
},
|
||||
'android_k_x64': {
|
||||
'cloud_storage_hash': '09177be2fed00b44df0e777932828425440b23b3',
|
||||
'download_path': 'bin/android/x64/k/dep_2.apk',
|
||||
'local_paths': [
|
||||
'../../example/location/android_x64/k/dep_2',
|
||||
'../../example/location2/android_x64/k/dep_2'
|
||||
]
|
||||
},
|
||||
'android_l_x64': {
|
||||
'cloud_storage_hash': '09177be2fed00b44df0e777932828425440b23b3',
|
||||
'download_path': 'bin/android/x64/l/dep_2.apk',
|
||||
'local_paths': [
|
||||
'../../example/location/android_x64/l/dep_2',
|
||||
'../../example/location2/android_x64/l/dep_2'
|
||||
]
|
||||
},
|
||||
'android_k_x86': {
|
||||
'cloud_storage_hash': 'bcf02af039713a48b69b89bd7f0f9c81ed8183a4',
|
||||
'download_path': 'bin/android/x86/k/dep_2.apk',
|
||||
'local_paths': [
|
||||
'../../example/location/android_x86/k/dep_2',
|
||||
'../../example/location2/android_x86/k/dep_2'
|
||||
]
|
||||
},
|
||||
'android_l_x86': {
|
||||
'cloud_storage_hash': '12a74cec071017ba11655b5740b8a58e2f52a219',
|
||||
'download_path': 'bin/android/x86/l/dep_2.apk',
|
||||
'local_paths': [
|
||||
'../../example/location/android_x86/l/dep_2',
|
||||
'../../example/location2/android_x86/l/dep_2'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
'dep_3': {
|
||||
'file_info': {
|
||||
'linux_x86_64': {
|
||||
'local_paths': [
|
||||
'../../example/location/linux/dep_3',
|
||||
'../../example/location2/linux/dep_3'
|
||||
]
|
||||
},
|
||||
'mac_x86_64': {
|
||||
'local_paths': [
|
||||
'../../example/location/mac/dep_3',
|
||||
'../../example/location2/mac/dep_3'
|
||||
]
|
||||
},
|
||||
'win_AMD64': {
|
||||
'local_paths': [
|
||||
'../../example/location/win64/dep_3',
|
||||
'../../example/location2/win64/dep_3'
|
||||
]
|
||||
},
|
||||
'win_x86': {
|
||||
'local_paths': [
|
||||
'../../example/location/win32/dep_3',
|
||||
'../../example/location2/win32/dep_3'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
# pylint: enable=bad-continuation
|
||||
fake_config = {
|
||||
'config_type': 'BaseConfig',
|
||||
'dependencies': self.expected_dependencies
|
||||
}
|
||||
|
||||
self.base_config = os.path.join(os.path.dirname(__file__),
|
||||
'example_config.json')
|
||||
self.fs.CreateFile(self.base_config, contents=json.dumps(fake_config))
|
||||
linux_file = os.path.join(
|
||||
os.path.dirname(self.base_config),
|
||||
os.path.join('..', '..', 'example', 'location2', 'linux', 'dep_2'))
|
||||
android_file = os.path.join(
|
||||
os.path.dirname(self.base_config),
|
||||
'..', '..', 'example', 'location', 'android_x86', 'l', 'dep_2')
|
||||
self.expected_dep2_linux_file = os.path.abspath(linux_file)
|
||||
self.expected_dep2_android_file = os.path.abspath(android_file)
|
||||
self.fs.CreateFile(self.expected_dep2_linux_file)
|
||||
self.fs.CreateFile(self.expected_dep2_android_file)
|
||||
|
||||
def tearDown(self):
|
||||
self.tearDownPyfakefs()
|
||||
|
||||
def testInitializationNoConfig(self):
|
||||
with self.assertRaises(ValueError):
|
||||
binary_manager.BinaryManager(None)
|
||||
|
||||
def testInitializationMissingConfig(self):
|
||||
with self.assertRaises(ValueError):
|
||||
binary_manager.BinaryManager(os.path.join('missing', 'path'))
|
||||
|
||||
def testInitializationWithConfig(self):
|
||||
with self.assertRaises(ValueError):
|
||||
manager = binary_manager.BinaryManager(self.base_config)
|
||||
manager = binary_manager.BinaryManager([self.base_config])
|
||||
self.assertItemsEqual(self.expected_dependencies,
|
||||
manager._dependency_manager._lookup_dict)
|
||||
|
||||
def testSuccessfulFetchPathNoOsVersion(self):
|
||||
manager = binary_manager.BinaryManager([self.base_config])
|
||||
found_path = manager.FetchPath('dep_2', 'linux', 'x86_64')
|
||||
self.assertEqual(self.expected_dep2_linux_file, found_path)
|
||||
|
||||
def testSuccessfulFetchPathOsVersion(self):
|
||||
manager = binary_manager.BinaryManager([self.base_config])
|
||||
found_path = manager.FetchPath('dep_2', 'android', 'x86', 'l')
|
||||
self.assertEqual(self.expected_dep2_android_file, found_path)
|
||||
|
||||
def testSuccessfulFetchPathFallbackToNoOsVersion(self):
|
||||
manager = binary_manager.BinaryManager([self.base_config])
|
||||
found_path = manager.FetchPath('dep_2', 'linux', 'x86_64', 'fake_version')
|
||||
self.assertEqual(self.expected_dep2_linux_file, found_path)
|
||||
|
||||
def testFailedFetchPathMissingDep(self):
|
||||
manager = binary_manager.BinaryManager([self.base_config])
|
||||
with self.assertRaises(exceptions.NoPathFoundError):
|
||||
manager.FetchPath('missing_dep', 'linux', 'x86_64')
|
||||
with self.assertRaises(exceptions.NoPathFoundError):
|
||||
manager.FetchPath('missing_dep', 'android', 'x86', 'l')
|
||||
with self.assertRaises(exceptions.NoPathFoundError):
|
||||
manager.FetchPath('dep_1', 'linux', 'bad_arch')
|
||||
with self.assertRaises(exceptions.NoPathFoundError):
|
||||
manager.FetchPath('dep_1', 'bad_os', 'x86')
|
||||
|
||||
def testSuccessfulLocalPathNoOsVersion(self):
|
||||
manager = binary_manager.BinaryManager([self.base_config])
|
||||
found_path = manager.LocalPath('dep_2', 'linux', 'x86_64')
|
||||
self.assertEqual(self.expected_dep2_linux_file, found_path)
|
||||
|
||||
def testSuccessfulLocalPathOsVersion(self):
|
||||
manager = binary_manager.BinaryManager([self.base_config])
|
||||
found_path = manager.LocalPath('dep_2', 'android', 'x86', 'l')
|
||||
self.assertEqual(self.expected_dep2_android_file, found_path)
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
import re
|
||||
import six
|
||||
|
||||
|
||||
def ToUnderscore(obj):
|
||||
"""Converts a string, list, or dict from camelCase to lower_with_underscores.
|
||||
|
||||
Descends recursively into lists and dicts, converting all dict keys.
|
||||
Returns a newly allocated object of the same structure as the input.
|
||||
"""
|
||||
if isinstance(obj, six.string_types):
|
||||
return re.sub('(?!^)([A-Z]+)', r'_\1', obj).lower()
|
||||
|
||||
elif isinstance(obj, list):
|
||||
return [ToUnderscore(item) for item in obj]
|
||||
|
||||
elif isinstance(obj, dict):
|
||||
output = {}
|
||||
for k, v in six.iteritems(obj):
|
||||
if isinstance(v, list) or isinstance(v, dict):
|
||||
output[ToUnderscore(k)] = ToUnderscore(v)
|
||||
else:
|
||||
output[ToUnderscore(k)] = v
|
||||
return output
|
||||
|
||||
else:
|
||||
return obj
|
||||
@@ -0,0 +1,50 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from py_utils import camel_case
|
||||
|
||||
|
||||
class CamelCaseTest(unittest.TestCase):
|
||||
|
||||
def testString(self):
|
||||
self.assertEqual(camel_case.ToUnderscore('camelCase'), 'camel_case')
|
||||
self.assertEqual(camel_case.ToUnderscore('CamelCase'), 'camel_case')
|
||||
self.assertEqual(camel_case.ToUnderscore('Camel2Case'), 'camel2_case')
|
||||
self.assertEqual(camel_case.ToUnderscore('Camel2Case2'), 'camel2_case2')
|
||||
self.assertEqual(camel_case.ToUnderscore('2012Q3'), '2012_q3')
|
||||
|
||||
def testList(self):
|
||||
camel_case_list = ['CamelCase', ['NestedList']]
|
||||
underscore_list = ['camel_case', ['nested_list']]
|
||||
self.assertEqual(camel_case.ToUnderscore(camel_case_list), underscore_list)
|
||||
|
||||
def testDict(self):
|
||||
camel_case_dict = {
|
||||
'gpu': {
|
||||
'vendorId': 1000,
|
||||
'deviceId': 2000,
|
||||
'vendorString': 'aString',
|
||||
'deviceString': 'bString'},
|
||||
'secondaryGpus': [
|
||||
{'vendorId': 3000, 'deviceId': 4000,
|
||||
'vendorString': 'k', 'deviceString': 'l'}
|
||||
]
|
||||
}
|
||||
underscore_dict = {
|
||||
'gpu': {
|
||||
'vendor_id': 1000,
|
||||
'device_id': 2000,
|
||||
'vendor_string': 'aString',
|
||||
'device_string': 'bString'},
|
||||
'secondary_gpus': [
|
||||
{'vendor_id': 3000, 'device_id': 4000,
|
||||
'vendor_string': 'k', 'device_string': 'l'}
|
||||
]
|
||||
}
|
||||
self.assertEqual(camel_case.ToUnderscore(camel_case_dict), underscore_dict)
|
||||
|
||||
def testOther(self):
|
||||
self.assertEqual(camel_case.ToUnderscore(self), self)
|
||||
@@ -0,0 +1,126 @@
|
||||
{
|
||||
"config_type": "BaseConfig",
|
||||
"dependencies": {
|
||||
"chrome_canary": {
|
||||
"cloud_storage_base_folder": "binary_dependencies",
|
||||
"cloud_storage_bucket": "chrome-telemetry",
|
||||
"file_info": {
|
||||
"mac_x86_64": {
|
||||
"cloud_storage_hash": "381a491e14ab523b8db4cdf3c993713678237af8",
|
||||
"download_path": "bin/reference_builds/chrome-mac64.zip",
|
||||
"path_within_archive": "chrome-mac/Google Chrome.app/Contents/MacOS/Google Chrome",
|
||||
"version_in_cs": "77.0.3822.0"
|
||||
},
|
||||
"win_AMD64": {
|
||||
"cloud_storage_hash": "600ee522c410efe1de2f593c0efc32ae113a7d99",
|
||||
"download_path": "bin\\reference_build\\chrome-win64-clang.zip",
|
||||
"path_within_archive": "chrome-win64-clang\\chrome.exe",
|
||||
"version_in_cs": "77.0.3822.0"
|
||||
},
|
||||
"win_x86": {
|
||||
"cloud_storage_hash": "5b79a181bfbd94d8288529b0da1defa3ef097197",
|
||||
"download_path": "bin\\reference_build\\chrome-win32-clang.zip",
|
||||
"path_within_archive": "chrome-win32-clang\\chrome.exe",
|
||||
"version_in_cs": "77.0.3822.0"
|
||||
}
|
||||
}
|
||||
},
|
||||
"chrome_dev": {
|
||||
"cloud_storage_base_folder": "binary_dependencies",
|
||||
"cloud_storage_bucket": "chrome-telemetry",
|
||||
"file_info": {
|
||||
"linux_x86_64": {
|
||||
"cloud_storage_hash": "61d68a6b00f25c964f5162f5251962468c886f3a",
|
||||
"download_path": "bin/reference_build/chrome-linux64.zip",
|
||||
"path_within_archive": "chrome-linux64/chrome",
|
||||
"version_in_cs": "76.0.3809.21"
|
||||
}
|
||||
}
|
||||
},
|
||||
"chrome_stable": {
|
||||
"cloud_storage_base_folder": "binary_dependencies",
|
||||
"cloud_storage_bucket": "chrome-telemetry",
|
||||
"file_info": {
|
||||
"android_k_armeabi-v7a": {
|
||||
"cloud_storage_hash": "28b913c720d56a30c092625c7862f00175a316c7",
|
||||
"download_path": "bin/reference_build/android_k_armeabi-v7a/ChromeStable.apk",
|
||||
"version_in_cs": "75.0.3770.67"
|
||||
},
|
||||
"android_l_arm64-v8a": {
|
||||
"cloud_storage_hash": "4b953c33c61f94c2198e8001d0d8142c6504a875",
|
||||
"download_path": "bin/reference_build/android_l_arm64-v8a/ChromeStable.apk",
|
||||
"version_in_cs": "75.0.3770.67"
|
||||
},
|
||||
"android_l_armeabi-v7a": {
|
||||
"cloud_storage_hash": "28b913c720d56a30c092625c7862f00175a316c7",
|
||||
"download_path": "bin/reference_build/android_l_armeabi-v7a/ChromeStable.apk",
|
||||
"version_in_cs": "75.0.3770.67"
|
||||
},
|
||||
"android_n_arm64-v8a": {
|
||||
"cloud_storage_hash": "84152ba8f7a25cacc79d588ed827ea75f0e4ab94",
|
||||
"download_path": "bin/reference_build/android_n_arm64-v8a/Monochrome.apk",
|
||||
"version_in_cs": "75.0.3770.67"
|
||||
},
|
||||
"android_n_armeabi-v7a": {
|
||||
"cloud_storage_hash": "656bb9e3982d0d35decd5347ced2c320a7267f33",
|
||||
"download_path": "bin/reference_build/android_n_armeabi-v7a/Monochrome.apk",
|
||||
"version_in_cs": "75.0.3770.67"
|
||||
},
|
||||
"linux_x86_64": {
|
||||
"cloud_storage_hash": "dee8469e8dcd8453efd33f3a00d7ea302a126a4b",
|
||||
"download_path": "bin/reference_build/chrome-linux64.zip",
|
||||
"path_within_archive": "chrome-linux64/chrome",
|
||||
"version_in_cs": "75.0.3770.80"
|
||||
},
|
||||
"mac_x86_64": {
|
||||
"cloud_storage_hash": "16a43a1e794bb99ec1ebcd40569084985b3c6626",
|
||||
"download_path": "bin/reference_builds/chrome-mac64.zip",
|
||||
"path_within_archive": "chrome-mac/Google Chrome.app/Contents/MacOS/Google Chrome",
|
||||
"version_in_cs": "75.0.3770.80"
|
||||
},
|
||||
"win_AMD64": {
|
||||
"cloud_storage_hash": "1ec52bd4164f2d93c53113a093dae9e041eb2d73",
|
||||
"download_path": "bin\\reference_build\\chrome-win64-clang.zip",
|
||||
"path_within_archive": "chrome-win64-clang\\chrome.exe",
|
||||
"version_in_cs": "75.0.3770.80"
|
||||
},
|
||||
"win_x86": {
|
||||
"cloud_storage_hash": "0f9eb991ba618dc61f2063ea252f44be94c2252e",
|
||||
"download_path": "bin\\reference_build\\chrome-win-clang.zip",
|
||||
"path_within_archive": "chrome-win-clang\\chrome.exe",
|
||||
"version_in_cs": "75.0.3770.80"
|
||||
}
|
||||
}
|
||||
},
|
||||
"chrome_m72": {
|
||||
"cloud_storage_base_folder": "binary_dependencies",
|
||||
"cloud_storage_bucket": "chrome-telemetry",
|
||||
"file_info": {
|
||||
"linux_x86_64": {
|
||||
"cloud_storage_hash": "537c19346b20340cc6807242e1eb6d82dfcfa2e8",
|
||||
"download_path": "bin/reference_build/chrome-linux64.zip",
|
||||
"path_within_archive": "chrome-linux64/chrome",
|
||||
"version_in_cs": "72.0.3626.119"
|
||||
},
|
||||
"mac_x86_64": {
|
||||
"cloud_storage_hash": "7f6a931f696f57561703538c6f799781d6e22e7e",
|
||||
"download_path": "bin/reference_builds/chrome-mac64.zip",
|
||||
"path_within_archive": "chrome-mac/Google Chrome.app/Contents/MacOS/Google Chrome",
|
||||
"version_in_cs": "72.0.3626.119"
|
||||
},
|
||||
"win_AMD64": {
|
||||
"cloud_storage_hash": "563d7985c85bfe77e92b8253d0389ff8551018c7",
|
||||
"download_path": "bin\\reference_build\\chrome-win64-clang.zip",
|
||||
"path_within_archive": "chrome-win64-clang\\chrome.exe",
|
||||
"version_in_cs": "72.0.3626.119"
|
||||
},
|
||||
"win_x86": {
|
||||
"cloud_storage_hash": "1802179da16e44b83bd3f0b296f9e5b0b053d59c",
|
||||
"download_path": "bin\\reference_build\\chrome-win-clang.zip",
|
||||
"path_within_archive": "chrome-win-clang\\chrome.exe",
|
||||
"version_in_cs": "72.0.3626.119"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import inspect
|
||||
|
||||
def IsMethodOverridden(parent_cls, child_cls, method_name):
|
||||
assert inspect.isclass(parent_cls), '%s should be a class' % parent_cls
|
||||
assert inspect.isclass(child_cls), '%s should be a class' % child_cls
|
||||
assert parent_cls.__dict__.get(method_name), '%s has no method %s' % (
|
||||
parent_cls, method_name)
|
||||
|
||||
if child_cls.__dict__.get(method_name):
|
||||
# It's overridden
|
||||
return True
|
||||
|
||||
if parent_cls in child_cls.__bases__:
|
||||
# The parent is the base class of the child, we did not find the
|
||||
# overridden method.
|
||||
return False
|
||||
|
||||
# For all the base classes of this class that are not object, check if
|
||||
# they override the method.
|
||||
base_cls = [cls for cls in child_cls.__bases__ if cls and cls != object]
|
||||
return any(
|
||||
IsMethodOverridden(parent_cls, base, method_name) for base in base_cls)
|
||||
@@ -0,0 +1,138 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from py_utils import class_util
|
||||
|
||||
|
||||
class ClassUtilTest(unittest.TestCase):
|
||||
|
||||
def testClassOverridden(self):
|
||||
class Parent(object):
|
||||
def MethodShouldBeOverridden(self):
|
||||
pass
|
||||
|
||||
class Child(Parent):
|
||||
def MethodShouldBeOverridden(self):
|
||||
pass
|
||||
|
||||
self.assertTrue(class_util.IsMethodOverridden(
|
||||
Parent, Child, 'MethodShouldBeOverridden'))
|
||||
|
||||
def testGrandchildOverridden(self):
|
||||
class Parent(object):
|
||||
def MethodShouldBeOverridden(self):
|
||||
pass
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
|
||||
class Grandchild(Child):
|
||||
def MethodShouldBeOverridden(self):
|
||||
pass
|
||||
|
||||
self.assertTrue(class_util.IsMethodOverridden(
|
||||
Parent, Grandchild, 'MethodShouldBeOverridden'))
|
||||
|
||||
def testClassNotOverridden(self):
|
||||
class Parent(object):
|
||||
def MethodShouldBeOverridden(self):
|
||||
pass
|
||||
|
||||
class Child(Parent):
|
||||
def SomeOtherMethod(self):
|
||||
pass
|
||||
|
||||
self.assertFalse(class_util.IsMethodOverridden(
|
||||
Parent, Child, 'MethodShouldBeOverridden'))
|
||||
|
||||
def testGrandchildNotOverridden(self):
|
||||
class Parent(object):
|
||||
def MethodShouldBeOverridden(self):
|
||||
pass
|
||||
|
||||
class Child(Parent):
|
||||
def MethodShouldBeOverridden(self):
|
||||
pass
|
||||
|
||||
class Grandchild(Child):
|
||||
def SomeOtherMethod(self):
|
||||
pass
|
||||
|
||||
self.assertTrue(class_util.IsMethodOverridden(
|
||||
Parent, Grandchild, 'MethodShouldBeOverridden'))
|
||||
|
||||
def testClassNotPresentInParent(self):
|
||||
class Parent(object):
|
||||
def MethodShouldBeOverridden(self):
|
||||
pass
|
||||
|
||||
class Child(Parent):
|
||||
def MethodShouldBeOverridden(self):
|
||||
pass
|
||||
|
||||
self.assertRaises(
|
||||
AssertionError, class_util.IsMethodOverridden,
|
||||
Parent, Child, 'WrongMethod')
|
||||
|
||||
def testInvalidClass(self):
|
||||
class Foo(object):
|
||||
def Bar(self):
|
||||
pass
|
||||
|
||||
self.assertRaises(
|
||||
AssertionError, class_util.IsMethodOverridden, 'invalid', Foo, 'Bar')
|
||||
|
||||
self.assertRaises(
|
||||
AssertionError, class_util.IsMethodOverridden, Foo, 'invalid', 'Bar')
|
||||
|
||||
def testMultipleInheritance(self):
|
||||
class Aaa(object):
|
||||
def One(self):
|
||||
pass
|
||||
|
||||
class Bbb(object):
|
||||
def Two(self):
|
||||
pass
|
||||
|
||||
class Ccc(Aaa, Bbb):
|
||||
pass
|
||||
|
||||
class Ddd(object):
|
||||
def Three(self):
|
||||
pass
|
||||
|
||||
class Eee(Ddd):
|
||||
def Three(self):
|
||||
pass
|
||||
|
||||
class Fff(Ccc, Eee):
|
||||
def One(self):
|
||||
pass
|
||||
|
||||
class Ggg(object):
|
||||
def Four(self):
|
||||
pass
|
||||
|
||||
class Hhh(Fff, Ggg):
|
||||
def Two(self):
|
||||
pass
|
||||
|
||||
class Iii(Hhh):
|
||||
pass
|
||||
|
||||
class Jjj(Iii):
|
||||
pass
|
||||
|
||||
self.assertFalse(class_util.IsMethodOverridden(Aaa, Ccc, 'One'))
|
||||
self.assertTrue(class_util.IsMethodOverridden(Aaa, Fff, 'One'))
|
||||
self.assertTrue(class_util.IsMethodOverridden(Aaa, Hhh, 'One'))
|
||||
self.assertTrue(class_util.IsMethodOverridden(Aaa, Jjj, 'One'))
|
||||
self.assertFalse(class_util.IsMethodOverridden(Bbb, Ccc, 'Two'))
|
||||
self.assertTrue(class_util.IsMethodOverridden(Bbb, Hhh, 'Two'))
|
||||
self.assertTrue(class_util.IsMethodOverridden(Bbb, Jjj, 'Two'))
|
||||
self.assertFalse(class_util.IsMethodOverridden(Eee, Fff, 'Three'))
|
||||
|
||||
|
||||
@@ -0,0 +1,502 @@
|
||||
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Wrappers for gsutil, for basic interaction with Google Cloud Storage."""
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
import py_utils
|
||||
from py_utils import cloud_storage_global_lock # pylint: disable=unused-import
|
||||
from py_utils import lock
|
||||
|
||||
# Do a no-op import here so that cloud_storage_global_lock dep is picked up
|
||||
# by https://cs.chromium.org/chromium/src/build/android/test_runner.pydeps.
|
||||
# TODO(nedn, jbudorick): figure out a way to get rid of this ugly hack.
|
||||
|
||||
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
|
||||
|
||||
|
||||
PUBLIC_BUCKET = 'chromium-telemetry'
|
||||
PARTNER_BUCKET = 'chrome-partner-telemetry'
|
||||
INTERNAL_BUCKET = 'chrome-telemetry'
|
||||
TELEMETRY_OUTPUT = 'chrome-telemetry-output'
|
||||
|
||||
# Uses ordered dict to make sure that bucket's key-value items are ordered from
|
||||
# the most open to the most restrictive.
|
||||
BUCKET_ALIASES = collections.OrderedDict((
|
||||
('public', PUBLIC_BUCKET),
|
||||
('partner', PARTNER_BUCKET),
|
||||
('internal', INTERNAL_BUCKET),
|
||||
('output', TELEMETRY_OUTPUT),
|
||||
))
|
||||
|
||||
BUCKET_ALIAS_NAMES = list(BUCKET_ALIASES.keys())
|
||||
|
||||
|
||||
_GSUTIL_PATH = os.path.join(py_utils.GetCatapultDir(), 'third_party', 'gsutil',
|
||||
'gsutil')
|
||||
|
||||
# TODO(tbarzic): A workaround for http://crbug.com/386416 and
|
||||
# http://crbug.com/359293. See |_RunCommand|.
|
||||
_CROS_GSUTIL_HOME_WAR = '/home/chromeos-test/'
|
||||
|
||||
|
||||
# If Environment variables has DISABLE_CLOUD_STORAGE_IO set to '1', any method
|
||||
# calls that invoke cloud storage network io will throw exceptions.
|
||||
DISABLE_CLOUD_STORAGE_IO = 'DISABLE_CLOUD_STORAGE_IO'
|
||||
|
||||
# The maximum number of seconds to wait to acquire the pseudo lock for a cloud
|
||||
# storage file before raising an exception.
|
||||
LOCK_ACQUISITION_TIMEOUT = 10
|
||||
|
||||
|
||||
class CloudStorageError(Exception):
|
||||
|
||||
@staticmethod
|
||||
def _GetConfigInstructions():
|
||||
command = _GSUTIL_PATH
|
||||
if py_utils.IsRunningOnCrosDevice():
|
||||
command = 'HOME=%s %s' % (_CROS_GSUTIL_HOME_WAR, _GSUTIL_PATH)
|
||||
return ('To configure your credentials:\n'
|
||||
' 1. Run "%s config" and follow its instructions.\n'
|
||||
' 2. If you have a @google.com account, use that account.\n'
|
||||
' 3. For the project-id, just enter 0.' % command)
|
||||
|
||||
|
||||
class PermissionError(CloudStorageError):
|
||||
|
||||
def __init__(self):
|
||||
super(PermissionError, self).__init__(
|
||||
'Attempted to access a file from Cloud Storage but you don\'t '
|
||||
'have permission. ' + self._GetConfigInstructions())
|
||||
|
||||
|
||||
class CredentialsError(CloudStorageError):
|
||||
|
||||
def __init__(self):
|
||||
super(CredentialsError, self).__init__(
|
||||
'Attempted to access a file from Cloud Storage but you have no '
|
||||
'configured credentials. ' + self._GetConfigInstructions())
|
||||
|
||||
|
||||
class CloudStorageIODisabled(CloudStorageError):
|
||||
pass
|
||||
|
||||
|
||||
class NotFoundError(CloudStorageError):
|
||||
pass
|
||||
|
||||
|
||||
class ServerError(CloudStorageError):
|
||||
pass
|
||||
|
||||
|
||||
# TODO(tonyg/dtu): Can this be replaced with distutils.spawn.find_executable()?
|
||||
def _FindExecutableInPath(relative_executable_path, *extra_search_paths):
|
||||
search_paths = list(extra_search_paths) + os.environ['PATH'].split(os.pathsep)
|
||||
for search_path in search_paths:
|
||||
executable_path = os.path.join(search_path, relative_executable_path)
|
||||
if py_utils.IsExecutable(executable_path):
|
||||
return executable_path
|
||||
return None
|
||||
|
||||
|
||||
def _EnsureExecutable(gsutil):
|
||||
"""chmod +x if gsutil is not executable."""
|
||||
st = os.stat(gsutil)
|
||||
if not st.st_mode & stat.S_IEXEC:
|
||||
os.chmod(gsutil, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
|
||||
def _IsRunningOnSwarming():
|
||||
return os.environ.get('SWARMING_HEADLESS') is not None
|
||||
|
||||
def _RunCommand(args):
|
||||
# On cros device, as telemetry is running as root, home will be set to /root/,
|
||||
# which is not writable. gsutil will attempt to create a download tracker dir
|
||||
# in home dir and fail. To avoid this, override HOME dir to something writable
|
||||
# when running on cros device.
|
||||
#
|
||||
# TODO(tbarzic): Figure out a better way to handle gsutil on cros.
|
||||
# http://crbug.com/386416, http://crbug.com/359293.
|
||||
gsutil_env = None
|
||||
if py_utils.IsRunningOnCrosDevice():
|
||||
gsutil_env = os.environ.copy()
|
||||
gsutil_env['HOME'] = _CROS_GSUTIL_HOME_WAR
|
||||
elif _IsRunningOnSwarming():
|
||||
gsutil_env = os.environ.copy()
|
||||
|
||||
if os.name == 'nt':
|
||||
# If Windows, prepend python. Python scripts aren't directly executable.
|
||||
args = [sys.executable, _GSUTIL_PATH] + args
|
||||
else:
|
||||
# Don't do it on POSIX, in case someone is using a shell script to redirect.
|
||||
args = [_GSUTIL_PATH] + args
|
||||
_EnsureExecutable(_GSUTIL_PATH)
|
||||
|
||||
if args[0] not in ('help', 'hash', 'version') and not IsNetworkIOEnabled():
|
||||
raise CloudStorageIODisabled(
|
||||
"Environment variable DISABLE_CLOUD_STORAGE_IO is set to 1. "
|
||||
'Command %s is not allowed to run' % args)
|
||||
|
||||
gsutil = subprocess.Popen(args, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE, env=gsutil_env)
|
||||
stdout, stderr = gsutil.communicate()
|
||||
|
||||
if gsutil.returncode:
|
||||
raise GetErrorObjectForCloudStorageStderr(stderr)
|
||||
|
||||
return stdout
|
||||
|
||||
|
||||
def GetErrorObjectForCloudStorageStderr(stderr):
|
||||
if (stderr.startswith((
|
||||
'You are attempting to access protected data with no configured',
|
||||
'Failure: No handler was ready to authenticate.')) or
|
||||
re.match('.*401.*does not have .* access to .*', stderr)):
|
||||
return CredentialsError()
|
||||
if ('status=403' in stderr or 'status 403' in stderr or
|
||||
'403 Forbidden' in stderr or
|
||||
re.match('.*403.*does not have .* access to .*', stderr)):
|
||||
return PermissionError()
|
||||
if (stderr.startswith('InvalidUriError') or 'No such object' in stderr or
|
||||
'No URLs matched' in stderr or 'One or more URLs matched no' in stderr):
|
||||
return NotFoundError(stderr)
|
||||
if '500 Internal Server Error' in stderr:
|
||||
return ServerError(stderr)
|
||||
return CloudStorageError(stderr)
|
||||
|
||||
|
||||
def IsNetworkIOEnabled():
|
||||
"""Returns true if cloud storage is enabled."""
|
||||
disable_cloud_storage_env_val = os.getenv(DISABLE_CLOUD_STORAGE_IO)
|
||||
|
||||
if disable_cloud_storage_env_val and disable_cloud_storage_env_val != '1':
|
||||
logger.error(
|
||||
'Unsupported value of environment variable '
|
||||
'DISABLE_CLOUD_STORAGE_IO. Expected None or \'1\' but got %s.',
|
||||
disable_cloud_storage_env_val)
|
||||
|
||||
return disable_cloud_storage_env_val != '1'
|
||||
|
||||
|
||||
def List(bucket):
|
||||
query = 'gs://%s/' % bucket
|
||||
stdout = _RunCommand(['ls', query])
|
||||
return [url[len(query):] for url in stdout.splitlines()]
|
||||
|
||||
|
||||
def Exists(bucket, remote_path):
|
||||
try:
|
||||
_RunCommand(['ls', 'gs://%s/%s' % (bucket, remote_path)])
|
||||
return True
|
||||
except NotFoundError:
|
||||
return False
|
||||
|
||||
|
||||
def Move(bucket1, bucket2, remote_path):
|
||||
url1 = 'gs://%s/%s' % (bucket1, remote_path)
|
||||
url2 = 'gs://%s/%s' % (bucket2, remote_path)
|
||||
logger.info('Moving %s to %s', url1, url2)
|
||||
_RunCommand(['mv', url1, url2])
|
||||
|
||||
|
||||
def Copy(bucket_from, bucket_to, remote_path_from, remote_path_to):
|
||||
"""Copy a file from one location in CloudStorage to another.
|
||||
|
||||
Args:
|
||||
bucket_from: The cloud storage bucket where the file is currently located.
|
||||
bucket_to: The cloud storage bucket it is being copied to.
|
||||
remote_path_from: The file path where the file is located in bucket_from.
|
||||
remote_path_to: The file path it is being copied to in bucket_to.
|
||||
|
||||
It should: cause no changes locally or to the starting file, and will
|
||||
overwrite any existing files in the destination location.
|
||||
"""
|
||||
url1 = 'gs://%s/%s' % (bucket_from, remote_path_from)
|
||||
url2 = 'gs://%s/%s' % (bucket_to, remote_path_to)
|
||||
logger.info('Copying %s to %s', url1, url2)
|
||||
_RunCommand(['cp', url1, url2])
|
||||
|
||||
|
||||
def Delete(bucket, remote_path):
|
||||
url = 'gs://%s/%s' % (bucket, remote_path)
|
||||
logger.info('Deleting %s', url)
|
||||
_RunCommand(['rm', url])
|
||||
|
||||
|
||||
def Get(bucket, remote_path, local_path):
|
||||
with _FileLock(local_path):
|
||||
_GetLocked(bucket, remote_path, local_path)
|
||||
|
||||
|
||||
_CLOUD_STORAGE_GLOBAL_LOCK = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), 'cloud_storage_global_lock.py')
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _FileLock(base_path):
|
||||
pseudo_lock_path = '%s.pseudo_lock' % base_path
|
||||
_CreateDirectoryIfNecessary(os.path.dirname(pseudo_lock_path))
|
||||
|
||||
# Make sure that we guard the creation, acquisition, release, and removal of
|
||||
# the pseudo lock all with the same guard (_CLOUD_STORAGE_GLOBAL_LOCK).
|
||||
# Otherwise, we can get nasty interleavings that result in multiple processes
|
||||
# thinking they have an exclusive lock, like:
|
||||
#
|
||||
# (Process 1) Create and acquire the pseudo lock
|
||||
# (Process 1) Release the pseudo lock
|
||||
# (Process 1) Release the file lock
|
||||
# (Process 2) Open and acquire the existing pseudo lock
|
||||
# (Process 1) Delete the (existing) pseudo lock
|
||||
# (Process 3) Create and acquire a new pseudo lock
|
||||
#
|
||||
# Using the same guard for creation and removal of the pseudo lock guarantees
|
||||
# that all processes are referring to the same lock.
|
||||
pseudo_lock_fd = None
|
||||
pseudo_lock_fd_return = []
|
||||
py_utils.WaitFor(lambda: _AttemptPseudoLockAcquisition(pseudo_lock_path,
|
||||
pseudo_lock_fd_return),
|
||||
LOCK_ACQUISITION_TIMEOUT)
|
||||
pseudo_lock_fd = pseudo_lock_fd_return[0]
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
py_utils.WaitFor(lambda: _AttemptPseudoLockRelease(pseudo_lock_fd),
|
||||
LOCK_ACQUISITION_TIMEOUT)
|
||||
|
||||
def _AttemptPseudoLockAcquisition(pseudo_lock_path, pseudo_lock_fd_return):
|
||||
"""Try to acquire the lock and return a boolean indicating whether the attempt
|
||||
was successful. If the attempt was successful, pseudo_lock_fd_return, which
|
||||
should be an empty array, will be modified to contain a single entry: the file
|
||||
descriptor of the (now acquired) lock file.
|
||||
|
||||
This whole operation is guarded with the global cloud storage lock, which
|
||||
prevents race conditions that might otherwise cause multiple processes to
|
||||
believe they hold the same pseudo lock (see _FileLock for more details).
|
||||
"""
|
||||
pseudo_lock_fd = None
|
||||
try:
|
||||
with open(_CLOUD_STORAGE_GLOBAL_LOCK) as global_file:
|
||||
with lock.FileLock(global_file, lock.LOCK_EX | lock.LOCK_NB):
|
||||
# Attempt to acquire the lock in a non-blocking manner. If we block,
|
||||
# then we'll cause deadlock because another process will be unable to
|
||||
# acquire the cloud storage global lock in order to release the pseudo
|
||||
# lock.
|
||||
pseudo_lock_fd = open(pseudo_lock_path, 'w')
|
||||
lock.AcquireFileLock(pseudo_lock_fd, lock.LOCK_EX | lock.LOCK_NB)
|
||||
pseudo_lock_fd_return.append(pseudo_lock_fd)
|
||||
return True
|
||||
except (lock.LockException, IOError):
|
||||
# We failed to acquire either the global cloud storage lock or the pseudo
|
||||
# lock.
|
||||
if pseudo_lock_fd:
|
||||
pseudo_lock_fd.close()
|
||||
return False
|
||||
|
||||
|
||||
def _AttemptPseudoLockRelease(pseudo_lock_fd):
|
||||
"""Try to release the pseudo lock and return a boolean indicating whether
|
||||
the release was succesful.
|
||||
|
||||
This whole operation is guarded with the global cloud storage lock, which
|
||||
prevents race conditions that might otherwise cause multiple processes to
|
||||
believe they hold the same pseudo lock (see _FileLock for more details).
|
||||
"""
|
||||
pseudo_lock_path = pseudo_lock_fd.name
|
||||
try:
|
||||
with open(_CLOUD_STORAGE_GLOBAL_LOCK) as global_file:
|
||||
with lock.FileLock(global_file, lock.LOCK_EX | lock.LOCK_NB):
|
||||
lock.ReleaseFileLock(pseudo_lock_fd)
|
||||
pseudo_lock_fd.close()
|
||||
try:
|
||||
os.remove(pseudo_lock_path)
|
||||
except OSError:
|
||||
# We don't care if the pseudo lock gets removed elsewhere before
|
||||
# we have a chance to do so.
|
||||
pass
|
||||
return True
|
||||
except (lock.LockException, IOError):
|
||||
# We failed to acquire the global cloud storage lock and are thus unable to
|
||||
# release the pseudo lock.
|
||||
return False
|
||||
|
||||
|
||||
def _CreateDirectoryIfNecessary(directory):
|
||||
if not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
|
||||
|
||||
def _GetLocked(bucket, remote_path, local_path):
|
||||
url = 'gs://%s/%s' % (bucket, remote_path)
|
||||
logger.info('Downloading %s to %s', url, local_path)
|
||||
_CreateDirectoryIfNecessary(os.path.dirname(local_path))
|
||||
with tempfile.NamedTemporaryFile(
|
||||
dir=os.path.dirname(local_path),
|
||||
delete=False) as partial_download_path:
|
||||
try:
|
||||
# Windows won't download to an open file.
|
||||
partial_download_path.close()
|
||||
try:
|
||||
_RunCommand(['cp', url, partial_download_path.name])
|
||||
except ServerError:
|
||||
logger.info('Cloud Storage server error, retrying download')
|
||||
_RunCommand(['cp', url, partial_download_path.name])
|
||||
shutil.move(partial_download_path.name, local_path)
|
||||
finally:
|
||||
if os.path.exists(partial_download_path.name):
|
||||
os.remove(partial_download_path.name)
|
||||
|
||||
|
||||
def Insert(bucket, remote_path, local_path, publicly_readable=False):
|
||||
""" Upload file in |local_path| to cloud storage.
|
||||
Args:
|
||||
bucket: the google cloud storage bucket name.
|
||||
remote_path: the remote file path in |bucket|.
|
||||
local_path: path of the local file to be uploaded.
|
||||
publicly_readable: whether the uploaded file has publicly readable
|
||||
permission.
|
||||
|
||||
Returns:
|
||||
The url where the file is uploaded to.
|
||||
"""
|
||||
url = 'gs://%s/%s' % (bucket, remote_path)
|
||||
command_and_args = ['cp']
|
||||
extra_info = ''
|
||||
if publicly_readable:
|
||||
command_and_args += ['-a', 'public-read']
|
||||
extra_info = ' (publicly readable)'
|
||||
command_and_args += [local_path, url]
|
||||
logger.info('Uploading %s to %s%s', local_path, url, extra_info)
|
||||
_RunCommand(command_and_args)
|
||||
return 'https://console.developers.google.com/m/cloudstorage/b/%s/o/%s' % (
|
||||
bucket, remote_path)
|
||||
|
||||
|
||||
def GetIfHashChanged(cs_path, download_path, bucket, file_hash):
|
||||
"""Downloads |download_path| to |file_path| if |file_path| doesn't exist or
|
||||
it's hash doesn't match |file_hash|.
|
||||
|
||||
Returns:
|
||||
True if the binary was changed.
|
||||
Raises:
|
||||
CredentialsError if the user has no configured credentials.
|
||||
PermissionError if the user does not have permission to access the bucket.
|
||||
NotFoundError if the file is not in the given bucket in cloud_storage.
|
||||
"""
|
||||
with _FileLock(download_path):
|
||||
if (os.path.exists(download_path) and
|
||||
CalculateHash(download_path) == file_hash):
|
||||
return False
|
||||
_GetLocked(bucket, cs_path, download_path)
|
||||
return True
|
||||
|
||||
|
||||
def GetIfChanged(file_path, bucket):
|
||||
"""Gets the file at file_path if it has a hash file that doesn't match or
|
||||
if there is no local copy of file_path, but there is a hash file for it.
|
||||
|
||||
Returns:
|
||||
True if the binary was changed.
|
||||
Raises:
|
||||
CredentialsError if the user has no configured credentials.
|
||||
PermissionError if the user does not have permission to access the bucket.
|
||||
NotFoundError if the file is not in the given bucket in cloud_storage.
|
||||
"""
|
||||
with _FileLock(file_path):
|
||||
hash_path = file_path + '.sha1'
|
||||
fetch_ts_path = file_path + '.fetchts'
|
||||
if not os.path.exists(hash_path):
|
||||
logger.warning('Hash file not found: %s', hash_path)
|
||||
return False
|
||||
|
||||
expected_hash = ReadHash(hash_path)
|
||||
|
||||
# To save the time required computing binary hash (which is an expensive
|
||||
# operation, see crbug.com/793609#c2 for details), any time we fetch a new
|
||||
# binary, we save not only that binary but the time of the fetch in
|
||||
# |fetch_ts_path|. Anytime the file needs updated (its
|
||||
# hash in |hash_path| change), we can just need to compare the timestamp of
|
||||
# |hash_path| with the timestamp in |fetch_ts_path| to figure out
|
||||
# if the update operation has been done.
|
||||
#
|
||||
# Notes: for this to work, we make the assumption that only
|
||||
# cloud_storage.GetIfChanged modifies the local |file_path| binary.
|
||||
|
||||
if os.path.exists(fetch_ts_path) and os.path.exists(file_path):
|
||||
with open(fetch_ts_path) as f:
|
||||
data = f.read().strip()
|
||||
last_binary_fetch_ts = float(data)
|
||||
|
||||
if last_binary_fetch_ts > os.path.getmtime(hash_path):
|
||||
return False
|
||||
|
||||
# Whether the binary stored in local already has hash matched
|
||||
# expected_hash or we need to fetch new binary from cloud, update the
|
||||
# timestamp in |fetch_ts_path| with current time anyway since it is
|
||||
# outdated compared with sha1's last modified time.
|
||||
with open(fetch_ts_path, 'w') as f:
|
||||
f.write(str(time.time()))
|
||||
|
||||
if os.path.exists(file_path) and CalculateHash(file_path) == expected_hash:
|
||||
return False
|
||||
_GetLocked(bucket, expected_hash, file_path)
|
||||
if CalculateHash(file_path) != expected_hash:
|
||||
os.remove(fetch_ts_path)
|
||||
raise RuntimeError(
|
||||
'Binary stored in cloud storage does not have hash matching .sha1 '
|
||||
'file. Please make sure that the binary file is uploaded using '
|
||||
'depot_tools/upload_to_google_storage.py script or through automatic '
|
||||
'framework.')
|
||||
return True
|
||||
|
||||
|
||||
def GetFilesInDirectoryIfChanged(directory, bucket):
|
||||
""" Scan the directory for .sha1 files, and download them from the given
|
||||
bucket in cloud storage if the local and remote hash don't match or
|
||||
there is no local copy.
|
||||
"""
|
||||
if not os.path.isdir(directory):
|
||||
raise ValueError(
|
||||
'%s does not exist. Must provide a valid directory path.' % directory)
|
||||
# Don't allow the root directory to be a serving_dir.
|
||||
if directory == os.path.abspath(os.sep):
|
||||
raise ValueError('Trying to serve root directory from HTTP server.')
|
||||
for dirpath, _, filenames in os.walk(directory):
|
||||
for filename in filenames:
|
||||
path_name, extension = os.path.splitext(
|
||||
os.path.join(dirpath, filename))
|
||||
if extension != '.sha1':
|
||||
continue
|
||||
GetIfChanged(path_name, bucket)
|
||||
|
||||
|
||||
def CalculateHash(file_path):
|
||||
"""Calculates and returns the hash of the file at file_path."""
|
||||
sha1 = hashlib.sha1()
|
||||
with open(file_path, 'rb') as f:
|
||||
while True:
|
||||
# Read in 1mb chunks, so it doesn't all have to be loaded into memory.
|
||||
chunk = f.read(1024 * 1024)
|
||||
if not chunk:
|
||||
break
|
||||
sha1.update(chunk)
|
||||
return sha1.hexdigest()
|
||||
|
||||
|
||||
def ReadHash(hash_path):
|
||||
with open(hash_path, 'rb') as f:
|
||||
return f.read(1024).rstrip()
|
||||
@@ -0,0 +1,5 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This file is used by cloud_storage._FileLock implementation, don't delete it!
|
||||
@@ -0,0 +1,387 @@
|
||||
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
from pyfakefs import fake_filesystem_unittest
|
||||
|
||||
import py_utils
|
||||
from py_utils import cloud_storage
|
||||
from py_utils import lock
|
||||
|
||||
_CLOUD_STORAGE_GLOBAL_LOCK_PATH = os.path.join(
|
||||
os.path.dirname(__file__), 'cloud_storage_global_lock.py')
|
||||
|
||||
def _FakeReadHash(_):
|
||||
return 'hashthis!'
|
||||
|
||||
|
||||
def _FakeCalulateHashMatchesRead(_):
|
||||
return 'hashthis!'
|
||||
|
||||
|
||||
def _FakeCalulateHashNewHash(_):
|
||||
return 'omgnewhash'
|
||||
|
||||
|
||||
class BaseFakeFsUnitTest(fake_filesystem_unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.original_environ = os.environ.copy()
|
||||
os.environ['DISABLE_CLOUD_STORAGE_IO'] = ''
|
||||
self.setUpPyfakefs()
|
||||
self.fs.CreateFile(
|
||||
os.path.join(py_utils.GetCatapultDir(),
|
||||
'third_party', 'gsutil', 'gsutil'))
|
||||
|
||||
def CreateFiles(self, file_paths):
|
||||
for f in file_paths:
|
||||
self.fs.CreateFile(f)
|
||||
|
||||
def tearDown(self):
|
||||
self.tearDownPyfakefs()
|
||||
os.environ = self.original_environ
|
||||
|
||||
def _FakeRunCommand(self, cmd):
|
||||
pass
|
||||
|
||||
def _FakeGet(self, bucket, remote_path, local_path):
|
||||
pass
|
||||
|
||||
|
||||
class CloudStorageFakeFsUnitTest(BaseFakeFsUnitTest):
|
||||
|
||||
def _AssertRunCommandRaisesError(self, communicate_strs, error):
|
||||
with mock.patch('py_utils.cloud_storage.subprocess.Popen') as popen:
|
||||
p_mock = mock.Mock()
|
||||
popen.return_value = p_mock
|
||||
p_mock.returncode = 1
|
||||
for stderr in communicate_strs:
|
||||
p_mock.communicate.return_value = ('', stderr)
|
||||
self.assertRaises(error, cloud_storage._RunCommand, [])
|
||||
|
||||
def testRunCommandCredentialsError(self):
|
||||
strs = ['You are attempting to access protected data with no configured',
|
||||
'Failure: No handler was ready to authenticate.']
|
||||
self._AssertRunCommandRaisesError(strs, cloud_storage.CredentialsError)
|
||||
|
||||
def testRunCommandPermissionError(self):
|
||||
strs = ['status=403', 'status 403', '403 Forbidden']
|
||||
self._AssertRunCommandRaisesError(strs, cloud_storage.PermissionError)
|
||||
|
||||
def testRunCommandNotFoundError(self):
|
||||
strs = ['InvalidUriError', 'No such object', 'No URLs matched',
|
||||
'One or more URLs matched no', 'InvalidUriError']
|
||||
self._AssertRunCommandRaisesError(strs, cloud_storage.NotFoundError)
|
||||
|
||||
def testRunCommandServerError(self):
|
||||
strs = ['500 Internal Server Error']
|
||||
self._AssertRunCommandRaisesError(strs, cloud_storage.ServerError)
|
||||
|
||||
def testRunCommandGenericError(self):
|
||||
strs = ['Random string']
|
||||
self._AssertRunCommandRaisesError(strs, cloud_storage.CloudStorageError)
|
||||
|
||||
def testInsertCreatesValidCloudUrl(self):
|
||||
orig_run_command = cloud_storage._RunCommand
|
||||
try:
|
||||
cloud_storage._RunCommand = self._FakeRunCommand
|
||||
remote_path = 'test-remote-path.html'
|
||||
local_path = 'test-local-path.html'
|
||||
cloud_url = cloud_storage.Insert(cloud_storage.PUBLIC_BUCKET,
|
||||
remote_path, local_path)
|
||||
self.assertEqual('https://console.developers.google.com/m/cloudstorage'
|
||||
'/b/chromium-telemetry/o/test-remote-path.html',
|
||||
cloud_url)
|
||||
finally:
|
||||
cloud_storage._RunCommand = orig_run_command
|
||||
|
||||
@mock.patch('py_utils.cloud_storage.subprocess')
|
||||
def testExistsReturnsFalse(self, subprocess_mock):
|
||||
p_mock = mock.Mock()
|
||||
subprocess_mock.Popen.return_value = p_mock
|
||||
p_mock.communicate.return_value = (
|
||||
'',
|
||||
'CommandException: One or more URLs matched no objects.\n')
|
||||
p_mock.returncode_result = 1
|
||||
self.assertFalse(cloud_storage.Exists('fake bucket',
|
||||
'fake remote path'))
|
||||
|
||||
@unittest.skipIf(sys.platform.startswith('win'),
|
||||
'https://github.com/catapult-project/catapult/issues/1861')
|
||||
def testGetFilesInDirectoryIfChanged(self):
|
||||
self.CreateFiles([
|
||||
'real_dir_path/dir1/1file1.sha1',
|
||||
'real_dir_path/dir1/1file2.txt',
|
||||
'real_dir_path/dir1/1file3.sha1',
|
||||
'real_dir_path/dir2/2file.txt',
|
||||
'real_dir_path/dir3/3file1.sha1'])
|
||||
|
||||
def IncrementFilesUpdated(*_):
|
||||
IncrementFilesUpdated.files_updated += 1
|
||||
IncrementFilesUpdated.files_updated = 0
|
||||
orig_get_if_changed = cloud_storage.GetIfChanged
|
||||
cloud_storage.GetIfChanged = IncrementFilesUpdated
|
||||
try:
|
||||
self.assertRaises(ValueError, cloud_storage.GetFilesInDirectoryIfChanged,
|
||||
os.path.abspath(os.sep), cloud_storage.PUBLIC_BUCKET)
|
||||
self.assertEqual(0, IncrementFilesUpdated.files_updated)
|
||||
self.assertRaises(ValueError, cloud_storage.GetFilesInDirectoryIfChanged,
|
||||
'fake_dir_path', cloud_storage.PUBLIC_BUCKET)
|
||||
self.assertEqual(0, IncrementFilesUpdated.files_updated)
|
||||
cloud_storage.GetFilesInDirectoryIfChanged('real_dir_path',
|
||||
cloud_storage.PUBLIC_BUCKET)
|
||||
self.assertEqual(3, IncrementFilesUpdated.files_updated)
|
||||
finally:
|
||||
cloud_storage.GetIfChanged = orig_get_if_changed
|
||||
|
||||
def testCopy(self):
|
||||
orig_run_command = cloud_storage._RunCommand
|
||||
|
||||
def AssertCorrectRunCommandArgs(args):
|
||||
self.assertEqual(expected_args, args)
|
||||
cloud_storage._RunCommand = AssertCorrectRunCommandArgs
|
||||
expected_args = ['cp', 'gs://bucket1/remote_path1',
|
||||
'gs://bucket2/remote_path2']
|
||||
try:
|
||||
cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2')
|
||||
finally:
|
||||
cloud_storage._RunCommand = orig_run_command
|
||||
|
||||
@mock.patch('py_utils.cloud_storage.subprocess.Popen')
|
||||
def testSwarmingUsesExistingEnv(self, mock_popen):
|
||||
os.environ['SWARMING_HEADLESS'] = '1'
|
||||
|
||||
mock_gsutil = mock_popen()
|
||||
mock_gsutil.communicate = mock.MagicMock(return_value=('a', 'b'))
|
||||
mock_gsutil.returncode = None
|
||||
|
||||
cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2')
|
||||
|
||||
mock_popen.assert_called_with(
|
||||
mock.ANY, stderr=-1, env=os.environ, stdout=-1)
|
||||
|
||||
@mock.patch('py_utils.cloud_storage._FileLock')
|
||||
def testDisableCloudStorageIo(self, unused_lock_mock):
|
||||
os.environ['DISABLE_CLOUD_STORAGE_IO'] = '1'
|
||||
dir_path = 'real_dir_path'
|
||||
self.fs.CreateDirectory(dir_path)
|
||||
file_path = os.path.join(dir_path, 'file1')
|
||||
file_path_sha = file_path + '.sha1'
|
||||
|
||||
def CleanTimeStampFile():
|
||||
os.remove(file_path + '.fetchts')
|
||||
|
||||
self.CreateFiles([file_path, file_path_sha])
|
||||
with open(file_path_sha, 'w') as f:
|
||||
f.write('hash1234')
|
||||
with self.assertRaises(cloud_storage.CloudStorageIODisabled):
|
||||
cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2')
|
||||
with self.assertRaises(cloud_storage.CloudStorageIODisabled):
|
||||
cloud_storage.Get('bucket', 'foo', file_path)
|
||||
with self.assertRaises(cloud_storage.CloudStorageIODisabled):
|
||||
cloud_storage.GetIfChanged(file_path, 'foo')
|
||||
with self.assertRaises(cloud_storage.CloudStorageIODisabled):
|
||||
cloud_storage.GetIfHashChanged('bar', file_path, 'bucket', 'hash1234')
|
||||
with self.assertRaises(cloud_storage.CloudStorageIODisabled):
|
||||
cloud_storage.Insert('bucket', 'foo', file_path)
|
||||
|
||||
CleanTimeStampFile()
|
||||
with self.assertRaises(cloud_storage.CloudStorageIODisabled):
|
||||
cloud_storage.GetFilesInDirectoryIfChanged(dir_path, 'bucket')
|
||||
|
||||
|
||||
class GetIfChangedTests(BaseFakeFsUnitTest):
|
||||
|
||||
def setUp(self):
|
||||
super(GetIfChangedTests, self).setUp()
|
||||
self._orig_read_hash = cloud_storage.ReadHash
|
||||
self._orig_calculate_hash = cloud_storage.CalculateHash
|
||||
|
||||
def tearDown(self):
|
||||
super(GetIfChangedTests, self).tearDown()
|
||||
cloud_storage.CalculateHash = self._orig_calculate_hash
|
||||
cloud_storage.ReadHash = self._orig_read_hash
|
||||
|
||||
@mock.patch('py_utils.cloud_storage._FileLock')
|
||||
@mock.patch('py_utils.cloud_storage._GetLocked')
|
||||
def testHashPathDoesNotExists(self, unused_get_locked, unused_lock_mock):
|
||||
cloud_storage.ReadHash = _FakeReadHash
|
||||
cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
|
||||
file_path = 'test-file-path.wpr'
|
||||
|
||||
cloud_storage._GetLocked = self._FakeGet
|
||||
# hash_path doesn't exist.
|
||||
self.assertFalse(cloud_storage.GetIfChanged(file_path,
|
||||
cloud_storage.PUBLIC_BUCKET))
|
||||
|
||||
@mock.patch('py_utils.cloud_storage._FileLock')
|
||||
@mock.patch('py_utils.cloud_storage._GetLocked')
|
||||
def testHashPathExistsButFilePathDoesNot(
|
||||
self, unused_get_locked, unused_lock_mock):
|
||||
cloud_storage.ReadHash = _FakeReadHash
|
||||
cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
|
||||
file_path = 'test-file-path.wpr'
|
||||
hash_path = file_path + '.sha1'
|
||||
|
||||
# hash_path exists, but file_path doesn't.
|
||||
self.CreateFiles([hash_path])
|
||||
self.assertTrue(cloud_storage.GetIfChanged(file_path,
|
||||
cloud_storage.PUBLIC_BUCKET))
|
||||
|
||||
@mock.patch('py_utils.cloud_storage._FileLock')
|
||||
@mock.patch('py_utils.cloud_storage._GetLocked')
|
||||
def testHashPathAndFileHashExistWithSameHash(
|
||||
self, unused_get_locked, unused_lock_mock):
|
||||
cloud_storage.ReadHash = _FakeReadHash
|
||||
cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
|
||||
file_path = 'test-file-path.wpr'
|
||||
|
||||
# hash_path and file_path exist, and have same hash.
|
||||
self.CreateFiles([file_path])
|
||||
self.assertFalse(cloud_storage.GetIfChanged(file_path,
|
||||
cloud_storage.PUBLIC_BUCKET))
|
||||
|
||||
@mock.patch('py_utils.cloud_storage._FileLock')
|
||||
@mock.patch('py_utils.cloud_storage._GetLocked')
|
||||
def testHashPathAndFileHashExistWithDifferentHash(
|
||||
self, mock_get_locked, unused_get_locked):
|
||||
cloud_storage.ReadHash = _FakeReadHash
|
||||
cloud_storage.CalculateHash = _FakeCalulateHashNewHash
|
||||
file_path = 'test-file-path.wpr'
|
||||
hash_path = file_path + '.sha1'
|
||||
|
||||
def _FakeGetLocked(bucket, expected_hash, file_path):
|
||||
del bucket, expected_hash, file_path # unused
|
||||
cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
|
||||
|
||||
mock_get_locked.side_effect = _FakeGetLocked
|
||||
|
||||
self.CreateFiles([file_path, hash_path])
|
||||
# hash_path and file_path exist, and have different hashes.
|
||||
self.assertTrue(cloud_storage.GetIfChanged(file_path,
|
||||
cloud_storage.PUBLIC_BUCKET))
|
||||
|
||||
@mock.patch('py_utils.cloud_storage._FileLock')
|
||||
@mock.patch('py_utils.cloud_storage.CalculateHash')
|
||||
@mock.patch('py_utils.cloud_storage._GetLocked')
|
||||
def testNoHashComputationNeededUponSecondCall(
|
||||
self, mock_get_locked, mock_calculate_hash, unused_get_locked):
|
||||
mock_calculate_hash.side_effect = _FakeCalulateHashNewHash
|
||||
cloud_storage.ReadHash = _FakeReadHash
|
||||
file_path = 'test-file-path.wpr'
|
||||
hash_path = file_path + '.sha1'
|
||||
|
||||
def _FakeGetLocked(bucket, expected_hash, file_path):
|
||||
del bucket, expected_hash, file_path # unused
|
||||
cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
|
||||
|
||||
mock_get_locked.side_effect = _FakeGetLocked
|
||||
|
||||
self.CreateFiles([file_path, hash_path])
|
||||
# hash_path and file_path exist, and have different hashes. This first call
|
||||
# will invoke a fetch.
|
||||
self.assertTrue(cloud_storage.GetIfChanged(file_path,
|
||||
cloud_storage.PUBLIC_BUCKET))
|
||||
|
||||
# The fetch left a .fetchts file on machine.
|
||||
self.assertTrue(os.path.exists(file_path + '.fetchts'))
|
||||
|
||||
# Subsequent invocations of GetIfChanged should not invoke CalculateHash.
|
||||
mock_calculate_hash.assert_not_called()
|
||||
self.assertFalse(cloud_storage.GetIfChanged(file_path,
|
||||
cloud_storage.PUBLIC_BUCKET))
|
||||
self.assertFalse(cloud_storage.GetIfChanged(file_path,
|
||||
cloud_storage.PUBLIC_BUCKET))
|
||||
|
||||
@mock.patch('py_utils.cloud_storage._FileLock')
|
||||
@mock.patch('py_utils.cloud_storage.CalculateHash')
|
||||
@mock.patch('py_utils.cloud_storage._GetLocked')
|
||||
def testRefetchingFileUponHashFileChange(
|
||||
self, mock_get_locked, mock_calculate_hash, unused_get_locked):
|
||||
mock_calculate_hash.side_effect = _FakeCalulateHashNewHash
|
||||
cloud_storage.ReadHash = _FakeReadHash
|
||||
file_path = 'test-file-path.wpr'
|
||||
hash_path = file_path + '.sha1'
|
||||
|
||||
def _FakeGetLocked(bucket, expected_hash, file_path):
|
||||
del bucket, expected_hash, file_path # unused
|
||||
cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
|
||||
|
||||
mock_get_locked.side_effect = _FakeGetLocked
|
||||
|
||||
self.CreateFiles([file_path, hash_path])
|
||||
# hash_path and file_path exist, and have different hashes. This first call
|
||||
# will invoke a fetch.
|
||||
self.assertTrue(cloud_storage.GetIfChanged(file_path,
|
||||
cloud_storage.PUBLIC_BUCKET))
|
||||
|
||||
# The fetch left a .fetchts file on machine.
|
||||
self.assertTrue(os.path.exists(file_path + '.fetchts'))
|
||||
|
||||
with open(file_path + '.fetchts') as f:
|
||||
fetchts = float(f.read())
|
||||
|
||||
# Updating the .sha1 hash_path file with the new hash after .fetchts
|
||||
# is created.
|
||||
file_obj = self.fs.GetObject(hash_path)
|
||||
file_obj.SetMTime(fetchts + 100)
|
||||
|
||||
cloud_storage.ReadHash = lambda _: 'hashNeW'
|
||||
def _FakeGetLockedNewHash(bucket, expected_hash, file_path):
|
||||
del bucket, expected_hash, file_path # unused
|
||||
cloud_storage.CalculateHash = lambda _: 'hashNeW'
|
||||
|
||||
mock_get_locked.side_effect = _FakeGetLockedNewHash
|
||||
|
||||
# hash_path and file_path exist, and have different hashes. This first call
|
||||
# will invoke a fetch.
|
||||
self.assertTrue(cloud_storage.GetIfChanged(file_path,
|
||||
cloud_storage.PUBLIC_BUCKET))
|
||||
|
||||
|
||||
class CloudStorageRealFsUnitTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.original_environ = os.environ.copy()
|
||||
os.environ['DISABLE_CLOUD_STORAGE_IO'] = ''
|
||||
|
||||
def tearDown(self):
|
||||
os.environ = self.original_environ
|
||||
|
||||
@mock.patch('py_utils.cloud_storage.LOCK_ACQUISITION_TIMEOUT', .005)
|
||||
def testGetPseudoLockUnavailableCausesTimeout(self):
|
||||
with tempfile.NamedTemporaryFile(suffix='.pseudo_lock') as pseudo_lock_fd:
|
||||
with lock.FileLock(pseudo_lock_fd, lock.LOCK_EX | lock.LOCK_NB):
|
||||
with self.assertRaises(py_utils.TimeoutException):
|
||||
file_path = pseudo_lock_fd.name.replace('.pseudo_lock', '')
|
||||
cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)
|
||||
|
||||
@mock.patch('py_utils.cloud_storage.LOCK_ACQUISITION_TIMEOUT', .005)
|
||||
def testGetGlobalLockUnavailableCausesTimeout(self):
|
||||
with open(_CLOUD_STORAGE_GLOBAL_LOCK_PATH) as global_lock_fd:
|
||||
with lock.FileLock(global_lock_fd, lock.LOCK_EX | lock.LOCK_NB):
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
file_path = os.path.join(tmp_dir, 'foo')
|
||||
with self.assertRaises(py_utils.TimeoutException):
|
||||
cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)
|
||||
finally:
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
|
||||
class CloudStorageErrorHandlingTest(unittest.TestCase):
|
||||
def runTest(self):
|
||||
self.assertIsInstance(cloud_storage.GetErrorObjectForCloudStorageStderr(
|
||||
'ServiceException: 401 Anonymous users does not have '
|
||||
'storage.objects.get access to object chrome-partner-telemetry'),
|
||||
cloud_storage.CredentialsError)
|
||||
self.assertIsInstance(cloud_storage.GetErrorObjectForCloudStorageStderr(
|
||||
'403 Caller does not have storage.objects.list access to bucket '
|
||||
'chrome-telemetry'), cloud_storage.PermissionError)
|
||||
@@ -0,0 +1,33 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
class _OptionalContextManager(object):
|
||||
|
||||
def __init__(self, manager, condition):
|
||||
self._manager = manager
|
||||
self._condition = condition
|
||||
|
||||
def __enter__(self):
|
||||
if self._condition:
|
||||
return self._manager.__enter__()
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._condition:
|
||||
return self._manager.__exit__(exc_type, exc_val, exc_tb)
|
||||
return None
|
||||
|
||||
|
||||
def Optional(manager, condition):
|
||||
"""Wraps the provided context manager and runs it if condition is True.
|
||||
|
||||
Args:
|
||||
manager: A context manager to conditionally run.
|
||||
condition: If true, runs the given context manager.
|
||||
Returns:
|
||||
A context manager that conditionally executes the given manager.
|
||||
"""
|
||||
return _OptionalContextManager(manager, condition)
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from py_utils import contextlib_ext
|
||||
|
||||
|
||||
class OptionalUnittest(unittest.TestCase):
|
||||
|
||||
class SampleContextMgr(object):
|
||||
|
||||
def __init__(self):
|
||||
self.entered = False
|
||||
self.exited = False
|
||||
|
||||
def __enter__(self):
|
||||
self.entered = True
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.exited = True
|
||||
|
||||
def testConditionTrue(self):
|
||||
c = self.SampleContextMgr()
|
||||
with contextlib_ext.Optional(c, True):
|
||||
self.assertTrue(c.entered)
|
||||
self.assertTrue(c.exited)
|
||||
|
||||
def testConditionFalse(self):
|
||||
c = self.SampleContextMgr()
|
||||
with contextlib_ext.Optional(c, False):
|
||||
self.assertFalse(c.entered)
|
||||
self.assertFalse(c.exited)
|
||||
@@ -0,0 +1,49 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
import py_utils
|
||||
|
||||
def GetOSAndArchForCurrentDesktopPlatform():
|
||||
os_name = GetOSNameForCurrentDesktopPlatform()
|
||||
return os_name, GetArchForCurrentDesktopPlatform(os_name)
|
||||
|
||||
|
||||
def GetOSNameForCurrentDesktopPlatform():
|
||||
if py_utils.IsRunningOnCrosDevice():
|
||||
return 'chromeos'
|
||||
if sys.platform.startswith('linux'):
|
||||
return 'linux'
|
||||
if sys.platform == 'darwin':
|
||||
return 'mac'
|
||||
if sys.platform == 'win32':
|
||||
return 'win'
|
||||
return sys.platform
|
||||
|
||||
|
||||
def GetArchForCurrentDesktopPlatform(os_name):
|
||||
if os_name == 'chromeos':
|
||||
# Current tests outside of telemetry don't run on chromeos, and
|
||||
# platform.machine is not the way telemetry gets the arch name on chromeos.
|
||||
raise NotImplementedError()
|
||||
return platform.machine()
|
||||
|
||||
|
||||
def GetChromeApkOsVersion(version_name):
|
||||
version = version_name[0]
|
||||
assert version.isupper(), (
|
||||
'First character of versions name %s was not an uppercase letter.')
|
||||
if version < 'L':
|
||||
return 'k'
|
||||
elif version > 'M':
|
||||
return 'n'
|
||||
return 'l'
|
||||
|
||||
|
||||
def ChromeBinariesConfigPath():
|
||||
return os.path.realpath(os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), 'chrome_binaries.json'))
|
||||
191
tools/adb/systrace/catapult/common/py_utils/py_utils/discover.py
Normal file
191
tools/adb/systrace/catapult/common/py_utils/py_utils/discover.py
Normal file
@@ -0,0 +1,191 @@
|
||||
# Copyright 2012 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import fnmatch
|
||||
import importlib
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from py_utils import camel_case
|
||||
|
||||
|
||||
def DiscoverModules(start_dir, top_level_dir, pattern='*'):
|
||||
"""Discover all modules in |start_dir| which match |pattern|.
|
||||
|
||||
Args:
|
||||
start_dir: The directory to recursively search.
|
||||
top_level_dir: The top level of the package, for importing.
|
||||
pattern: Unix shell-style pattern for filtering the filenames to import.
|
||||
|
||||
Returns:
|
||||
list of modules.
|
||||
"""
|
||||
# start_dir and top_level_dir must be consistent with each other.
|
||||
start_dir = os.path.realpath(start_dir)
|
||||
top_level_dir = os.path.realpath(top_level_dir)
|
||||
|
||||
modules = []
|
||||
sub_paths = list(os.walk(start_dir))
|
||||
# We sort the directories & file paths to ensure a deterministic ordering when
|
||||
# traversing |top_level_dir|.
|
||||
sub_paths.sort(key=lambda paths_tuple: paths_tuple[0])
|
||||
for dir_path, _, filenames in sub_paths:
|
||||
# Sort the directories to walk recursively by the directory path.
|
||||
filenames.sort()
|
||||
for filename in filenames:
|
||||
# Filter out unwanted filenames.
|
||||
if filename.startswith('.') or filename.startswith('_'):
|
||||
continue
|
||||
if os.path.splitext(filename)[1] != '.py':
|
||||
continue
|
||||
if not fnmatch.fnmatch(filename, pattern):
|
||||
continue
|
||||
|
||||
# Find the module.
|
||||
module_rel_path = os.path.relpath(
|
||||
os.path.join(dir_path, filename), top_level_dir)
|
||||
module_name = re.sub(r'[/\\]', '.', os.path.splitext(module_rel_path)[0])
|
||||
|
||||
# Import the module.
|
||||
try:
|
||||
# Make sure that top_level_dir is the first path in the sys.path in case
|
||||
# there are naming conflict in module parts.
|
||||
original_sys_path = sys.path[:]
|
||||
sys.path.insert(0, top_level_dir)
|
||||
module = importlib.import_module(module_name)
|
||||
modules.append(module)
|
||||
finally:
|
||||
sys.path = original_sys_path
|
||||
return modules
|
||||
|
||||
|
||||
def AssertNoKeyConflicts(classes_by_key_1, classes_by_key_2):
|
||||
for k in classes_by_key_1:
|
||||
if k in classes_by_key_2:
|
||||
assert classes_by_key_1[k] is classes_by_key_2[k], (
|
||||
'Found conflicting classes for the same key: '
|
||||
'key=%s, class_1=%s, class_2=%s' % (
|
||||
k, classes_by_key_1[k], classes_by_key_2[k]))
|
||||
|
||||
|
||||
# TODO(dtu): Normalize all discoverable classes to have corresponding module
|
||||
# and class names, then always index by class name.
|
||||
def DiscoverClasses(start_dir,
|
||||
top_level_dir,
|
||||
base_class,
|
||||
pattern='*',
|
||||
index_by_class_name=True,
|
||||
directly_constructable=False):
|
||||
"""Discover all classes in |start_dir| which subclass |base_class|.
|
||||
|
||||
Base classes that contain subclasses are ignored by default.
|
||||
|
||||
Args:
|
||||
start_dir: The directory to recursively search.
|
||||
top_level_dir: The top level of the package, for importing.
|
||||
base_class: The base class to search for.
|
||||
pattern: Unix shell-style pattern for filtering the filenames to import.
|
||||
index_by_class_name: If True, use class name converted to
|
||||
lowercase_with_underscores instead of module name in return dict keys.
|
||||
directly_constructable: If True, will only return classes that can be
|
||||
constructed without arguments
|
||||
|
||||
Returns:
|
||||
dict of {module_name: class} or {underscored_class_name: class}
|
||||
"""
|
||||
modules = DiscoverModules(start_dir, top_level_dir, pattern)
|
||||
classes = {}
|
||||
for module in modules:
|
||||
new_classes = DiscoverClassesInModule(
|
||||
module, base_class, index_by_class_name, directly_constructable)
|
||||
# TODO(nednguyen): we should remove index_by_class_name once
|
||||
# benchmark_smoke_unittest in chromium/src/tools/perf no longer relied
|
||||
# naming collisions to reduce the number of smoked benchmark tests.
|
||||
# crbug.com/548652
|
||||
if index_by_class_name:
|
||||
AssertNoKeyConflicts(classes, new_classes)
|
||||
classes = dict(list(classes.items()) + list(new_classes.items()))
|
||||
return classes
|
||||
|
||||
|
||||
# TODO(nednguyen): we should remove index_by_class_name once
|
||||
# benchmark_smoke_unittest in chromium/src/tools/perf no longer relied
|
||||
# naming collisions to reduce the number of smoked benchmark tests.
|
||||
# crbug.com/548652
|
||||
def DiscoverClassesInModule(module,
|
||||
base_class,
|
||||
index_by_class_name=False,
|
||||
directly_constructable=False):
|
||||
"""Discover all classes in |module| which subclass |base_class|.
|
||||
|
||||
Base classes that contain subclasses are ignored by default.
|
||||
|
||||
Args:
|
||||
module: The module to search.
|
||||
base_class: The base class to search for.
|
||||
index_by_class_name: If True, use class name converted to
|
||||
lowercase_with_underscores instead of module name in return dict keys.
|
||||
|
||||
Returns:
|
||||
dict of {module_name: class} or {underscored_class_name: class}
|
||||
"""
|
||||
classes = {}
|
||||
for _, obj in inspect.getmembers(module):
|
||||
# Ensure object is a class.
|
||||
if not inspect.isclass(obj):
|
||||
continue
|
||||
# Include only subclasses of base_class.
|
||||
if not issubclass(obj, base_class):
|
||||
continue
|
||||
# Exclude the base_class itself.
|
||||
if obj is base_class:
|
||||
continue
|
||||
# Exclude protected or private classes.
|
||||
if obj.__name__.startswith('_'):
|
||||
continue
|
||||
# Include only the module in which the class is defined.
|
||||
# If a class is imported by another module, exclude those duplicates.
|
||||
if obj.__module__ != module.__name__:
|
||||
continue
|
||||
|
||||
if index_by_class_name:
|
||||
key_name = camel_case.ToUnderscore(obj.__name__)
|
||||
else:
|
||||
key_name = module.__name__.split('.')[-1]
|
||||
if not directly_constructable or IsDirectlyConstructable(obj):
|
||||
if key_name in classes and index_by_class_name:
|
||||
assert classes[key_name] is obj, (
|
||||
'Duplicate key_name with different objs detected: '
|
||||
'key=%s, obj1=%s, obj2=%s' % (key_name, classes[key_name], obj))
|
||||
else:
|
||||
classes[key_name] = obj
|
||||
|
||||
return classes
|
||||
|
||||
|
||||
def IsDirectlyConstructable(cls):
|
||||
"""Returns True if instance of |cls| can be construct without arguments."""
|
||||
assert inspect.isclass(cls)
|
||||
if not hasattr(cls, '__init__'):
|
||||
# Case |class A: pass|.
|
||||
return True
|
||||
if cls.__init__ is object.__init__:
|
||||
# Case |class A(object): pass|.
|
||||
return True
|
||||
# Case |class (object):| with |__init__| other than |object.__init__|.
|
||||
args, _, _, defaults = inspect.getargspec(cls.__init__)
|
||||
if defaults is None:
|
||||
defaults = ()
|
||||
# Return true if |self| is only arg without a default.
|
||||
return len(args) == len(defaults) + 1
|
||||
|
||||
|
||||
_COUNTER = [0]
|
||||
|
||||
|
||||
def _GetUniqueModuleName():
|
||||
_COUNTER[0] += 1
|
||||
return "module_" + str(_COUNTER[0])
|
||||
@@ -0,0 +1,151 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from py_utils import discover
|
||||
import six
|
||||
|
||||
|
||||
class DiscoverTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._base_dir = os.path.join(os.path.dirname(__file__), 'test_data')
|
||||
self._start_dir = os.path.join(self._base_dir, 'discoverable_classes')
|
||||
self._base_class = Exception
|
||||
|
||||
def testDiscoverClassesWithIndexByModuleName(self):
|
||||
classes = discover.DiscoverClasses(self._start_dir,
|
||||
self._base_dir,
|
||||
self._base_class,
|
||||
index_by_class_name=False)
|
||||
|
||||
actual_classes = dict(
|
||||
(name, cls.__name__) for name, cls in six.iteritems(classes))
|
||||
expected_classes = {
|
||||
'another_discover_dummyclass': 'DummyExceptionWithParameterImpl1',
|
||||
'discover_dummyclass': 'DummyException',
|
||||
'parameter_discover_dummyclass': 'DummyExceptionWithParameterImpl2'
|
||||
}
|
||||
self.assertEqual(actual_classes, expected_classes)
|
||||
|
||||
def testDiscoverDirectlyConstructableClassesWithIndexByClassName(self):
|
||||
classes = discover.DiscoverClasses(self._start_dir,
|
||||
self._base_dir,
|
||||
self._base_class,
|
||||
directly_constructable=True)
|
||||
|
||||
actual_classes = dict(
|
||||
(name, cls.__name__) for name, cls in six.iteritems(classes))
|
||||
expected_classes = {
|
||||
'dummy_exception': 'DummyException',
|
||||
'dummy_exception_impl1': 'DummyExceptionImpl1',
|
||||
'dummy_exception_impl2': 'DummyExceptionImpl2',
|
||||
}
|
||||
self.assertEqual(actual_classes, expected_classes)
|
||||
|
||||
def testDiscoverClassesWithIndexByClassName(self):
|
||||
classes = discover.DiscoverClasses(self._start_dir, self._base_dir,
|
||||
self._base_class)
|
||||
|
||||
actual_classes = dict(
|
||||
(name, cls.__name__) for name, cls in six.iteritems(classes))
|
||||
expected_classes = {
|
||||
'dummy_exception': 'DummyException',
|
||||
'dummy_exception_impl1': 'DummyExceptionImpl1',
|
||||
'dummy_exception_impl2': 'DummyExceptionImpl2',
|
||||
'dummy_exception_with_parameter_impl1':
|
||||
'DummyExceptionWithParameterImpl1',
|
||||
'dummy_exception_with_parameter_impl2':
|
||||
'DummyExceptionWithParameterImpl2'
|
||||
}
|
||||
self.assertEqual(actual_classes, expected_classes)
|
||||
|
||||
def testDiscoverClassesWithPatternAndIndexByModule(self):
|
||||
classes = discover.DiscoverClasses(self._start_dir,
|
||||
self._base_dir,
|
||||
self._base_class,
|
||||
pattern='another*',
|
||||
index_by_class_name=False)
|
||||
|
||||
actual_classes = dict(
|
||||
(name, cls.__name__) for name, cls in six.iteritems(classes))
|
||||
expected_classes = {
|
||||
'another_discover_dummyclass': 'DummyExceptionWithParameterImpl1'
|
||||
}
|
||||
self.assertEqual(actual_classes, expected_classes)
|
||||
|
||||
def testDiscoverDirectlyConstructableClassesWithPatternAndIndexByClassName(
|
||||
self):
|
||||
classes = discover.DiscoverClasses(self._start_dir,
|
||||
self._base_dir,
|
||||
self._base_class,
|
||||
pattern='another*',
|
||||
directly_constructable=True)
|
||||
|
||||
actual_classes = dict(
|
||||
(name, cls.__name__) for name, cls in six.iteritems(classes))
|
||||
expected_classes = {
|
||||
'dummy_exception_impl1': 'DummyExceptionImpl1',
|
||||
'dummy_exception_impl2': 'DummyExceptionImpl2',
|
||||
}
|
||||
self.assertEqual(actual_classes, expected_classes)
|
||||
|
||||
def testDiscoverClassesWithPatternAndIndexByClassName(self):
|
||||
classes = discover.DiscoverClasses(self._start_dir,
|
||||
self._base_dir,
|
||||
self._base_class,
|
||||
pattern='another*')
|
||||
|
||||
actual_classes = dict(
|
||||
(name, cls.__name__) for name, cls in six.iteritems(classes))
|
||||
expected_classes = {
|
||||
'dummy_exception_impl1': 'DummyExceptionImpl1',
|
||||
'dummy_exception_impl2': 'DummyExceptionImpl2',
|
||||
'dummy_exception_with_parameter_impl1':
|
||||
'DummyExceptionWithParameterImpl1',
|
||||
}
|
||||
self.assertEqual(actual_classes, expected_classes)
|
||||
|
||||
|
||||
class ClassWithoutInitDefOne: # pylint: disable=old-style-class, no-init
|
||||
pass
|
||||
|
||||
|
||||
class ClassWithoutInitDefTwo(object):
|
||||
pass
|
||||
|
||||
|
||||
class ClassWhoseInitOnlyHasSelf(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
class ClassWhoseInitWithDefaultArguments(object):
|
||||
def __init__(self, dog=1, cat=None, cow=None, fud='a'):
|
||||
pass
|
||||
|
||||
|
||||
class ClassWhoseInitWithDefaultArgumentsAndNonDefaultArguments(object):
|
||||
def __init__(self, x, dog=1, cat=None, fish=None, fud='a'):
|
||||
pass
|
||||
|
||||
|
||||
class IsDirectlyConstructableTest(unittest.TestCase):
|
||||
|
||||
def testIsDirectlyConstructableReturnsTrue(self):
|
||||
self.assertTrue(discover.IsDirectlyConstructable(ClassWithoutInitDefOne))
|
||||
self.assertTrue(discover.IsDirectlyConstructable(ClassWithoutInitDefTwo))
|
||||
self.assertTrue(discover.IsDirectlyConstructable(ClassWhoseInitOnlyHasSelf))
|
||||
self.assertTrue(
|
||||
discover.IsDirectlyConstructable(ClassWhoseInitWithDefaultArguments))
|
||||
|
||||
def testIsDirectlyConstructableReturnsFalse(self):
|
||||
self.assertFalse(
|
||||
discover.IsDirectlyConstructable(
|
||||
ClassWhoseInitWithDefaultArgumentsAndNonDefaultArguments))
|
||||
@@ -0,0 +1,84 @@
|
||||
# Copyright 2019 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import sys
|
||||
|
||||
|
||||
def BestEffort(func):
|
||||
"""Decorator to log and dismiss exceptions if one if already being handled.
|
||||
|
||||
Note: This is largely a workaround for the lack of support of exception
|
||||
chaining in Python 2.7, this decorator will no longer be needed in Python 3.
|
||||
|
||||
Typical usage would be in |Close| or |Disconnect| methods, to dismiss but log
|
||||
any further exceptions raised if the current execution context is already
|
||||
handling an exception. For example:
|
||||
|
||||
class Client(object):
|
||||
def Connect(self):
|
||||
# code to connect ...
|
||||
|
||||
@exc_util.BestEffort
|
||||
def Disconnect(self):
|
||||
# code to disconnect ...
|
||||
|
||||
client = Client()
|
||||
try:
|
||||
client.Connect()
|
||||
except:
|
||||
client.Disconnect()
|
||||
raise
|
||||
|
||||
If an exception is raised by client.Connect(), and then a second exception
|
||||
is raised by client.Disconnect(), the decorator will log the second exception
|
||||
and let the original one be re-raised.
|
||||
|
||||
Otherwise, in Python 2.7 and without the decorator, the second exception is
|
||||
the one propagated to the caller; while information about the original one,
|
||||
usually more important, is completely lost.
|
||||
|
||||
Note that if client.Disconnect() is called in a context where an exception
|
||||
is *not* being handled, then any exceptions raised within the method will
|
||||
get through and be passed on to callers for them to handle in the usual way.
|
||||
|
||||
The decorator can also be used on cleanup functions meant to be called on
|
||||
a finally block, however you must also include an except-raise clause to
|
||||
properly signal (in Python 2.7) whether an exception is being handled; e.g.:
|
||||
|
||||
@exc_util.BestEffort
|
||||
def cleanup():
|
||||
# do cleanup things ...
|
||||
|
||||
try:
|
||||
process(thing)
|
||||
except:
|
||||
raise # Needed to let cleanup know if an exception is being handled.
|
||||
finally:
|
||||
cleanup()
|
||||
|
||||
Failing to include the except-raise block has the same effect as not
|
||||
including the decorator at all. Namely: exceptions during |cleanup| are
|
||||
raised and swallow any prior exceptions that occurred during |process|.
|
||||
"""
|
||||
@functools.wraps(func)
|
||||
def Wrapper(*args, **kwargs):
|
||||
exc_type = sys.exc_info()[0]
|
||||
if exc_type is None:
|
||||
# Not currently handling an exception; let any errors raise exceptions
|
||||
# as usual.
|
||||
func(*args, **kwargs)
|
||||
else:
|
||||
# Otherwise, we are currently handling an exception, dismiss and log
|
||||
# any further cascading errors. Callers are responsible to handle the
|
||||
# original exception.
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logging.exception(
|
||||
'While handling a %s, the following exception was also raised:',
|
||||
exc_type.__name__)
|
||||
|
||||
return Wrapper
|
||||
@@ -0,0 +1,183 @@
|
||||
# Copyright 2019 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from py_utils import exc_util
|
||||
|
||||
|
||||
class FakeConnectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class FakeDisconnectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class FakeProcessingError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class FakeCleanupError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class FaultyClient(object):
|
||||
def __init__(self, *args):
|
||||
self.failures = set(args)
|
||||
self.called = set()
|
||||
|
||||
def Connect(self):
|
||||
self.called.add('Connect')
|
||||
if FakeConnectionError in self.failures:
|
||||
raise FakeConnectionError('Oops!')
|
||||
|
||||
def Process(self):
|
||||
self.called.add('Process')
|
||||
if FakeProcessingError in self.failures:
|
||||
raise FakeProcessingError('Oops!')
|
||||
|
||||
@exc_util.BestEffort
|
||||
def Disconnect(self):
|
||||
self.called.add('Disconnect')
|
||||
if FakeDisconnectionError in self.failures:
|
||||
raise FakeDisconnectionError('Oops!')
|
||||
|
||||
@exc_util.BestEffort
|
||||
def Cleanup(self):
|
||||
self.called.add('Cleanup')
|
||||
if FakeCleanupError in self.failures:
|
||||
raise FakeCleanupError('Oops!')
|
||||
|
||||
|
||||
class ReraiseTests(unittest.TestCase):
|
||||
def assertLogMatches(self, pattern):
|
||||
self.assertRegexpMatches(
|
||||
sys.stderr.getvalue(), pattern) # pylint: disable=no-member
|
||||
|
||||
def assertLogNotMatches(self, pattern):
|
||||
self.assertNotRegexpMatches(
|
||||
sys.stderr.getvalue(), pattern) # pylint: disable=no-member
|
||||
|
||||
def testTryRaisesExceptRaises(self):
|
||||
client = FaultyClient(FakeConnectionError, FakeDisconnectionError)
|
||||
|
||||
# The connection error reaches the top level, while the disconnection
|
||||
# error is logged.
|
||||
with self.assertRaises(FakeConnectionError):
|
||||
try:
|
||||
client.Connect()
|
||||
except:
|
||||
client.Disconnect()
|
||||
raise
|
||||
|
||||
self.assertLogMatches(re.compile(
|
||||
r'While handling a FakeConnectionError, .* was also raised:\n'
|
||||
r'Traceback \(most recent call last\):\n'
|
||||
r'.*\n'
|
||||
r'FakeDisconnectionError: Oops!\n', re.DOTALL))
|
||||
self.assertItemsEqual(client.called, ['Connect', 'Disconnect'])
|
||||
|
||||
def testTryRaisesExceptDoesnt(self):
|
||||
client = FaultyClient(FakeConnectionError)
|
||||
|
||||
# The connection error reaches the top level, disconnecting did not raise
|
||||
# an exception (so nothing is logged).
|
||||
with self.assertRaises(FakeConnectionError):
|
||||
try:
|
||||
client.Connect()
|
||||
except:
|
||||
client.Disconnect()
|
||||
raise
|
||||
|
||||
self.assertLogNotMatches('FakeDisconnectionError')
|
||||
self.assertItemsEqual(client.called, ['Connect', 'Disconnect'])
|
||||
|
||||
def testTryPassesNoException(self):
|
||||
client = FaultyClient(FakeDisconnectionError)
|
||||
|
||||
# If there is no connection error, the except clause is not called (even if
|
||||
# it would have raised an exception).
|
||||
try:
|
||||
client.Connect()
|
||||
except:
|
||||
client.Disconnect()
|
||||
raise
|
||||
|
||||
self.assertLogNotMatches('FakeConnectionError')
|
||||
self.assertLogNotMatches('FakeDisconnectionError')
|
||||
self.assertItemsEqual(client.called, ['Connect'])
|
||||
|
||||
def testTryRaisesFinallyRaises(self):
|
||||
worker = FaultyClient(FakeProcessingError, FakeCleanupError)
|
||||
|
||||
# The processing error reaches the top level, the cleanup error is logged.
|
||||
with self.assertRaises(FakeProcessingError):
|
||||
try:
|
||||
worker.Process()
|
||||
except:
|
||||
raise # Needed for Cleanup to know if an exception is handled.
|
||||
finally:
|
||||
worker.Cleanup()
|
||||
|
||||
self.assertLogMatches(re.compile(
|
||||
r'While handling a FakeProcessingError, .* was also raised:\n'
|
||||
r'Traceback \(most recent call last\):\n'
|
||||
r'.*\n'
|
||||
r'FakeCleanupError: Oops!\n', re.DOTALL))
|
||||
self.assertItemsEqual(worker.called, ['Process', 'Cleanup'])
|
||||
|
||||
def testTryRaisesFinallyDoesnt(self):
|
||||
worker = FaultyClient(FakeProcessingError)
|
||||
|
||||
# The processing error reaches the top level, the cleanup code runs fine.
|
||||
with self.assertRaises(FakeProcessingError):
|
||||
try:
|
||||
worker.Process()
|
||||
except:
|
||||
raise # Needed for Cleanup to know if an exception is handled.
|
||||
finally:
|
||||
worker.Cleanup()
|
||||
|
||||
self.assertLogNotMatches('FakeProcessingError')
|
||||
self.assertLogNotMatches('FakeCleanupError')
|
||||
self.assertItemsEqual(worker.called, ['Process', 'Cleanup'])
|
||||
|
||||
def testTryPassesFinallyRaises(self):
|
||||
worker = FaultyClient(FakeCleanupError)
|
||||
|
||||
# The processing code runs fine, the cleanup code raises an exception
|
||||
# which reaches the top level.
|
||||
with self.assertRaises(FakeCleanupError):
|
||||
try:
|
||||
worker.Process()
|
||||
except:
|
||||
raise # Needed for Cleanup to know if an exception is handled.
|
||||
finally:
|
||||
worker.Cleanup()
|
||||
|
||||
self.assertLogNotMatches('FakeProcessingError')
|
||||
self.assertLogNotMatches('FakeCleanupError')
|
||||
self.assertItemsEqual(worker.called, ['Process', 'Cleanup'])
|
||||
|
||||
def testTryRaisesExceptRaisesFinallyRaises(self):
|
||||
worker = FaultyClient(
|
||||
FakeProcessingError, FakeDisconnectionError, FakeCleanupError)
|
||||
|
||||
# Chaining try-except-finally works fine. Only the processing error reaches
|
||||
# the top level; the other two are logged.
|
||||
with self.assertRaises(FakeProcessingError):
|
||||
try:
|
||||
worker.Process()
|
||||
except:
|
||||
worker.Disconnect()
|
||||
raise
|
||||
finally:
|
||||
worker.Cleanup()
|
||||
|
||||
self.assertLogMatches('FakeDisconnectionError')
|
||||
self.assertLogMatches('FakeCleanupError')
|
||||
self.assertItemsEqual(worker.called, ['Process', 'Disconnect', 'Cleanup'])
|
||||
@@ -0,0 +1,128 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
import re
|
||||
import six
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Expectation(object):
|
||||
def __init__(self, reason, test, conditions, results):
|
||||
"""Constructor for expectations.
|
||||
|
||||
Args:
|
||||
reason: String that indicates the reason for disabling.
|
||||
test: String indicating which test is being disabled.
|
||||
conditions: List of tags indicating which conditions to disable for.
|
||||
Conditions are combined using logical and. Example: ['Mac', 'Debug']
|
||||
results: List of outcomes for test. Example: ['Skip', 'Pass']
|
||||
"""
|
||||
assert isinstance(reason, six.string_types) or reason is None
|
||||
self._reason = reason
|
||||
assert isinstance(test, six.string_types)
|
||||
self._test = test
|
||||
assert isinstance(conditions, list)
|
||||
self._conditions = conditions
|
||||
assert isinstance(results, list)
|
||||
self._results = results
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.reason == other.reason and
|
||||
self.test == other.test and
|
||||
self.conditions == other.conditions and
|
||||
self.results == other.results)
|
||||
|
||||
@property
|
||||
def reason(self):
|
||||
return self._reason
|
||||
|
||||
@property
|
||||
def test(self):
|
||||
return self._test
|
||||
|
||||
@property
|
||||
def conditions(self):
|
||||
return self._conditions
|
||||
|
||||
@property
|
||||
def results(self):
|
||||
return self._results
|
||||
|
||||
|
||||
class TestExpectationParser(object):
|
||||
"""Parse expectations data in TA/DA format.
|
||||
|
||||
This parser covers the 'tagged' test lists format in:
|
||||
bit.ly/chromium-test-list-format
|
||||
|
||||
Takes raw expectations data as a string read from the TA/DA expectation file
|
||||
in the format:
|
||||
|
||||
# This is an example expectation file.
|
||||
#
|
||||
# tags: Mac Mac10.10 Mac10.11
|
||||
# tags: Win Win8
|
||||
|
||||
crbug.com/123 [ Win ] benchmark/story [ Skip ]
|
||||
...
|
||||
"""
|
||||
|
||||
TAG_TOKEN = '# tags:'
|
||||
_MATCH_STRING = r'^(?:(crbug.com/\d+) )?' # The bug field (optional).
|
||||
_MATCH_STRING += r'(?:\[ (.+) \] )?' # The label field (optional).
|
||||
_MATCH_STRING += r'(\S+) ' # The test path field.
|
||||
_MATCH_STRING += r'\[ ([^\[.]+) \]' # The expectation field.
|
||||
_MATCH_STRING += r'(\s+#.*)?$' # End comment (optional).
|
||||
MATCHER = re.compile(_MATCH_STRING)
|
||||
|
||||
def __init__(self, raw_data):
|
||||
self._tags = []
|
||||
self._expectations = []
|
||||
self._ParseRawExpectationData(raw_data)
|
||||
|
||||
def _ParseRawExpectationData(self, raw_data):
|
||||
for count, line in list(enumerate(raw_data.splitlines(), start=1)):
|
||||
# Handle metadata and comments.
|
||||
if line.startswith(self.TAG_TOKEN):
|
||||
for word in line[len(self.TAG_TOKEN):].split():
|
||||
# Expectations must be after all tags are declared.
|
||||
if self._expectations:
|
||||
raise ParseError('Tag found after first expectation.')
|
||||
self._tags.append(word)
|
||||
elif line.startswith('#') or not line:
|
||||
continue # Ignore, it is just a comment or empty.
|
||||
else:
|
||||
self._expectations.append(
|
||||
self._ParseExpectationLine(count, line, self._tags))
|
||||
|
||||
def _ParseExpectationLine(self, line_number, line, tags):
|
||||
match = self.MATCHER.match(line)
|
||||
if not match:
|
||||
raise ParseError(
|
||||
'Expectation has invalid syntax on line %d: %s'
|
||||
% (line_number, line))
|
||||
# Unused group is optional trailing comment.
|
||||
reason, raw_conditions, test, results, _ = match.groups()
|
||||
conditions = [c for c in raw_conditions.split()] if raw_conditions else []
|
||||
|
||||
for c in conditions:
|
||||
if c not in tags:
|
||||
raise ParseError(
|
||||
'Condition %s not found in expectations tag data. Line %d'
|
||||
% (c, line_number))
|
||||
return Expectation(reason, test, conditions, [r for r in results.split()])
|
||||
|
||||
@property
|
||||
def expectations(self):
|
||||
return self._expectations
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
return self._tags
|
||||
@@ -0,0 +1,170 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import unittest
|
||||
|
||||
from py_utils import expectations_parser
|
||||
from six.moves import range # pylint: disable=redefined-builtin
|
||||
|
||||
|
||||
class TestExpectationParserTest(unittest.TestCase):
|
||||
|
||||
def testInitWithGoodData(self):
|
||||
good_data = """
|
||||
# This is a test expectation file.
|
||||
#
|
||||
# tags: tag1 tag2 tag3
|
||||
# tags: tag4 Mac Win Debug
|
||||
|
||||
crbug.com/12345 [ Mac ] b1/s1 [ Skip ]
|
||||
crbug.com/23456 [ Mac Debug ] b1/s2 [ Skip ]
|
||||
"""
|
||||
parser = expectations_parser.TestExpectationParser(good_data)
|
||||
tags = ['tag1', 'tag2', 'tag3', 'tag4', 'Mac', 'Win', 'Debug']
|
||||
self.assertEqual(parser.tags, tags)
|
||||
expected_outcome = [
|
||||
expectations_parser.Expectation(
|
||||
'crbug.com/12345', 'b1/s1', ['Mac'], ['Skip']),
|
||||
expectations_parser.Expectation(
|
||||
'crbug.com/23456', 'b1/s2', ['Mac', 'Debug'], ['Skip'])
|
||||
]
|
||||
for i in range(len(parser.expectations)):
|
||||
self.assertEqual(parser.expectations[i], expected_outcome[i])
|
||||
|
||||
def testInitWithBadData(self):
|
||||
bad_data = """
|
||||
# This is a test expectation file.
|
||||
#
|
||||
# tags: tag1 tag2 tag3
|
||||
# tags: tag4
|
||||
|
||||
crbug.com/12345 [ Mac b1/s1 [ Skip ]
|
||||
"""
|
||||
with self.assertRaises(expectations_parser.ParseError):
|
||||
expectations_parser.TestExpectationParser(bad_data)
|
||||
|
||||
def testTagAfterExpectationsStart(self):
|
||||
bad_data = """
|
||||
# This is a test expectation file.
|
||||
#
|
||||
# tags: tag1 tag2 tag3
|
||||
|
||||
crbug.com/12345 [ tag1 ] b1/s1 [ Skip ]
|
||||
|
||||
# tags: tag4
|
||||
"""
|
||||
with self.assertRaises(expectations_parser.ParseError):
|
||||
expectations_parser.TestExpectationParser(bad_data)
|
||||
|
||||
def testParseExpectationLineEverythingThere(self):
|
||||
raw_data = '# tags: Mac\ncrbug.com/23456 [ Mac ] b1/s2 [ Skip ]'
|
||||
parser = expectations_parser.TestExpectationParser(raw_data)
|
||||
expected_outcome = [
|
||||
expectations_parser.Expectation(
|
||||
'crbug.com/23456', 'b1/s2', ['Mac'], ['Skip'])
|
||||
]
|
||||
for i in range(len(parser.expectations)):
|
||||
self.assertEqual(parser.expectations[i], expected_outcome[i])
|
||||
|
||||
def testParseExpectationLineBadTag(self):
|
||||
raw_data = '# tags: None\ncrbug.com/23456 [ Mac ] b1/s2 [ Skip ]'
|
||||
with self.assertRaises(expectations_parser.ParseError):
|
||||
expectations_parser.TestExpectationParser(raw_data)
|
||||
|
||||
def testParseExpectationLineNoConditions(self):
|
||||
raw_data = '# tags: All\ncrbug.com/12345 b1/s1 [ Skip ]'
|
||||
parser = expectations_parser.TestExpectationParser(raw_data)
|
||||
expected_outcome = [
|
||||
expectations_parser.Expectation(
|
||||
'crbug.com/12345', 'b1/s1', [], ['Skip']),
|
||||
]
|
||||
for i in range(len(parser.expectations)):
|
||||
self.assertEqual(parser.expectations[i], expected_outcome[i])
|
||||
|
||||
def testParseExpectationLineNoBug(self):
|
||||
raw_data = '# tags: All\n[ All ] b1/s1 [ Skip ]'
|
||||
parser = expectations_parser.TestExpectationParser(raw_data)
|
||||
expected_outcome = [
|
||||
expectations_parser.Expectation(
|
||||
None, 'b1/s1', ['All'], ['Skip']),
|
||||
]
|
||||
for i in range(len(parser.expectations)):
|
||||
self.assertEqual(parser.expectations[i], expected_outcome[i])
|
||||
|
||||
def testParseExpectationLineNoBugNoConditions(self):
|
||||
raw_data = '# tags: All\nb1/s1 [ Skip ]'
|
||||
parser = expectations_parser.TestExpectationParser(raw_data)
|
||||
expected_outcome = [
|
||||
expectations_parser.Expectation(
|
||||
None, 'b1/s1', [], ['Skip']),
|
||||
]
|
||||
for i in range(len(parser.expectations)):
|
||||
self.assertEqual(parser.expectations[i], expected_outcome[i])
|
||||
|
||||
def testParseExpectationLineMultipleConditions(self):
|
||||
raw_data = ('# tags:All None Batman\n'
|
||||
'crbug.com/123 [ All None Batman ] b1/s1 [ Skip ]')
|
||||
parser = expectations_parser.TestExpectationParser(raw_data)
|
||||
expected_outcome = [
|
||||
expectations_parser.Expectation(
|
||||
'crbug.com/123', 'b1/s1', ['All', 'None', 'Batman'], ['Skip']),
|
||||
]
|
||||
for i in range(len(parser.expectations)):
|
||||
self.assertEqual(parser.expectations[i], expected_outcome[i])
|
||||
|
||||
def testParseExpectationLineBadConditionBracket(self):
|
||||
raw_data = '# tags: Mac\ncrbug.com/23456 ] Mac ] b1/s2 [ Skip ]'
|
||||
with self.assertRaises(expectations_parser.ParseError):
|
||||
expectations_parser.TestExpectationParser(raw_data)
|
||||
|
||||
def testParseExpectationLineBadResultBracket(self):
|
||||
raw_data = '# tags: Mac\ncrbug.com/23456 ] Mac ] b1/s2 ] Skip ]'
|
||||
with self.assertRaises(expectations_parser.ParseError):
|
||||
expectations_parser.TestExpectationParser(raw_data)
|
||||
|
||||
def testParseExpectationLineBadConditionBracketSpacing(self):
|
||||
raw_data = '# tags: Mac\ncrbug.com/2345 [Mac] b1/s1 [ Skip ]'
|
||||
with self.assertRaises(expectations_parser.ParseError):
|
||||
expectations_parser.TestExpectationParser(raw_data)
|
||||
|
||||
def testParseExpectationLineBadResultBracketSpacing(self):
|
||||
raw_data = '# tags: Mac\ncrbug.com/2345 [ Mac ] b1/s1 [Skip]'
|
||||
with self.assertRaises(expectations_parser.ParseError):
|
||||
expectations_parser.TestExpectationParser(raw_data)
|
||||
|
||||
def testParseExpectationLineNoClosingConditionBracket(self):
|
||||
raw_data = '# tags: Mac\ncrbug.com/2345 [ Mac b1/s1 [ Skip ]'
|
||||
with self.assertRaises(expectations_parser.ParseError):
|
||||
expectations_parser.TestExpectationParser(raw_data)
|
||||
|
||||
def testParseExpectationLineNoClosingResultBracket(self):
|
||||
raw_data = '# tags: Mac\ncrbug.com/2345 [ Mac ] b1/s1 [ Skip'
|
||||
with self.assertRaises(expectations_parser.ParseError):
|
||||
expectations_parser.TestExpectationParser(raw_data)
|
||||
|
||||
def testParseExpectationLineUrlInTestName(self):
|
||||
raw_data = (
|
||||
'# tags: Mac\ncrbug.com/123 [ Mac ] b.1/http://google.com [ Skip ]')
|
||||
expected_outcomes = [
|
||||
expectations_parser.Expectation(
|
||||
'crbug.com/123', 'b.1/http://google.com', ['Mac'], ['Skip'])
|
||||
]
|
||||
parser = expectations_parser.TestExpectationParser(raw_data)
|
||||
for i in range(len(parser.expectations)):
|
||||
self.assertEqual(parser.expectations[i], expected_outcomes[i])
|
||||
|
||||
def testParseExpectationLineEndingComment(self):
|
||||
raw_data = '# tags: Mac\ncrbug.com/23456 [ Mac ] b1/s2 [ Skip ] # abc 123'
|
||||
parser = expectations_parser.TestExpectationParser(raw_data)
|
||||
expected_outcome = [
|
||||
expectations_parser.Expectation(
|
||||
'crbug.com/23456', 'b1/s2', ['Mac'], ['Skip'])
|
||||
]
|
||||
for i in range(len(parser.expectations)):
|
||||
self.assertEqual(parser.expectations[i], expected_outcome[i])
|
||||
@@ -0,0 +1,23 @@
|
||||
# Copyright 2018 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
||||
def CopyFileWithIntermediateDirectories(source_path, dest_path):
|
||||
"""Copies a file and creates intermediate directories as needed.
|
||||
|
||||
Args:
|
||||
source_path: Path to the source file.
|
||||
dest_path: Path to the destination where the source file should be copied.
|
||||
"""
|
||||
assert os.path.exists(source_path)
|
||||
try:
|
||||
os.makedirs(os.path.dirname(dest_path))
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
shutil.copy(source_path, dest_path)
|
||||
@@ -0,0 +1,66 @@
|
||||
# Copyright 2018 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from py_utils import file_util
|
||||
|
||||
|
||||
class FileUtilTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._tempdir = tempfile.mkdtemp()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self._tempdir)
|
||||
|
||||
def testCopySimple(self):
|
||||
source_path = os.path.join(self._tempdir, 'source')
|
||||
with open(source_path, 'w') as f:
|
||||
f.write('data')
|
||||
|
||||
dest_path = os.path.join(self._tempdir, 'dest')
|
||||
|
||||
self.assertFalse(os.path.exists(dest_path))
|
||||
file_util.CopyFileWithIntermediateDirectories(source_path, dest_path)
|
||||
self.assertTrue(os.path.exists(dest_path))
|
||||
self.assertEqual('data', open(dest_path, 'r').read())
|
||||
|
||||
def testCopyMakeDirectories(self):
|
||||
source_path = os.path.join(self._tempdir, 'source')
|
||||
with open(source_path, 'w') as f:
|
||||
f.write('data')
|
||||
|
||||
dest_path = os.path.join(self._tempdir, 'path', 'to', 'dest')
|
||||
|
||||
self.assertFalse(os.path.exists(dest_path))
|
||||
file_util.CopyFileWithIntermediateDirectories(source_path, dest_path)
|
||||
self.assertTrue(os.path.exists(dest_path))
|
||||
self.assertEqual('data', open(dest_path, 'r').read())
|
||||
|
||||
def testCopyOverwrites(self):
|
||||
source_path = os.path.join(self._tempdir, 'source')
|
||||
with open(source_path, 'w') as f:
|
||||
f.write('source_data')
|
||||
|
||||
dest_path = os.path.join(self._tempdir, 'dest')
|
||||
with open(dest_path, 'w') as f:
|
||||
f.write('existing_data')
|
||||
|
||||
file_util.CopyFileWithIntermediateDirectories(source_path, dest_path)
|
||||
self.assertEqual('source_data', open(dest_path, 'r').read())
|
||||
|
||||
def testRaisesError(self):
|
||||
source_path = os.path.join(self._tempdir, 'source')
|
||||
with open(source_path, 'w') as f:
|
||||
f.write('data')
|
||||
|
||||
dest_path = ""
|
||||
with self.assertRaises(OSError) as cm:
|
||||
file_util.CopyFileWithIntermediateDirectories(source_path, dest_path)
|
||||
self.assertEqual(errno.ENOENT, cm.exception.error_code)
|
||||
121
tools/adb/systrace/catapult/common/py_utils/py_utils/lock.py
Normal file
121
tools/adb/systrace/catapult/common/py_utils/py_utils/lock.py
Normal file
@@ -0,0 +1,121 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
|
||||
LOCK_EX = None # Exclusive lock
|
||||
LOCK_SH = None # Shared lock
|
||||
LOCK_NB = None # Non-blocking (LockException is raised if resource is locked)
|
||||
|
||||
|
||||
class LockException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# pylint: disable=import-error
|
||||
# pylint: disable=wrong-import-position
|
||||
if os.name == 'nt':
|
||||
import win32con
|
||||
import win32file
|
||||
import pywintypes
|
||||
LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK
|
||||
LOCK_SH = 0 # the default
|
||||
LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY
|
||||
_OVERLAPPED = pywintypes.OVERLAPPED()
|
||||
elif os.name == 'posix':
|
||||
import fcntl
|
||||
LOCK_EX = fcntl.LOCK_EX
|
||||
LOCK_SH = fcntl.LOCK_SH
|
||||
LOCK_NB = fcntl.LOCK_NB
|
||||
# pylint: enable=import-error
|
||||
# pylint: enable=wrong-import-position
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def FileLock(target_file, flags):
|
||||
""" Lock the target file. Similar to AcquireFileLock but allow user to write:
|
||||
with FileLock(f, LOCK_EX):
|
||||
...do stuff on file f without worrying about race condition
|
||||
Args: see AcquireFileLock's documentation.
|
||||
"""
|
||||
AcquireFileLock(target_file, flags)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
ReleaseFileLock(target_file)
|
||||
|
||||
|
||||
def AcquireFileLock(target_file, flags):
|
||||
""" Lock the target file. Note that if |target_file| is closed, the lock is
|
||||
automatically released.
|
||||
Args:
|
||||
target_file: file handle of the file to acquire lock.
|
||||
flags: can be any of the type LOCK_EX, LOCK_SH, LOCK_NB, or a bitwise
|
||||
OR combination of flags.
|
||||
"""
|
||||
assert flags in (
|
||||
LOCK_EX, LOCK_SH, LOCK_NB, LOCK_EX | LOCK_NB, LOCK_SH | LOCK_NB)
|
||||
if os.name == 'nt':
|
||||
_LockImplWin(target_file, flags)
|
||||
elif os.name == 'posix':
|
||||
_LockImplPosix(target_file, flags)
|
||||
else:
|
||||
raise NotImplementedError('%s is not supported' % os.name)
|
||||
|
||||
|
||||
def ReleaseFileLock(target_file):
|
||||
""" Unlock the target file.
|
||||
Args:
|
||||
target_file: file handle of the file to release the lock.
|
||||
"""
|
||||
if os.name == 'nt':
|
||||
_UnlockImplWin(target_file)
|
||||
elif os.name == 'posix':
|
||||
_UnlockImplPosix(target_file)
|
||||
else:
|
||||
raise NotImplementedError('%s is not supported' % os.name)
|
||||
|
||||
# These implementations are based on
|
||||
# http://code.activestate.com/recipes/65203/
|
||||
|
||||
def _LockImplWin(target_file, flags):
|
||||
hfile = win32file._get_osfhandle(target_file.fileno())
|
||||
try:
|
||||
win32file.LockFileEx(hfile, flags, 0, -0x10000, _OVERLAPPED)
|
||||
except pywintypes.error as exc_value:
|
||||
if exc_value[0] == 33:
|
||||
raise LockException('Error trying acquiring lock of %s: %s' %
|
||||
(target_file.name, exc_value[2]))
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def _UnlockImplWin(target_file):
|
||||
hfile = win32file._get_osfhandle(target_file.fileno())
|
||||
try:
|
||||
win32file.UnlockFileEx(hfile, 0, -0x10000, _OVERLAPPED)
|
||||
except pywintypes.error as exc_value:
|
||||
if exc_value[0] == 158:
|
||||
# error: (158, 'UnlockFileEx', 'The segment is already unlocked.')
|
||||
# To match the 'posix' implementation, silently ignore this error
|
||||
pass
|
||||
else:
|
||||
# Q: Are there exceptions/codes we should be dealing with here?
|
||||
raise
|
||||
|
||||
|
||||
def _LockImplPosix(target_file, flags):
|
||||
try:
|
||||
fcntl.flock(target_file.fileno(), flags)
|
||||
except IOError as exc_value:
|
||||
if exc_value[0] == 11 or exc_value[0] == 35:
|
||||
raise LockException('Error trying acquiring lock of %s: %s' %
|
||||
(target_file.name, exc_value[1]))
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def _UnlockImplPosix(target_file):
|
||||
fcntl.flock(target_file.fileno(), fcntl.LOCK_UN)
|
||||
@@ -0,0 +1,169 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
import unittest
|
||||
|
||||
from py_utils import lock
|
||||
from six.moves import range # pylint: disable=redefined-builtin
|
||||
|
||||
|
||||
def _AppendTextToFile(file_name):
|
||||
with open(file_name, 'a') as f:
|
||||
lock.AcquireFileLock(f, lock.LOCK_EX)
|
||||
# Sleep 100 ms to increase the chance of another process trying to acquire
|
||||
# the lock of file as the same time.
|
||||
time.sleep(0.1)
|
||||
f.write('Start')
|
||||
for _ in range(10000):
|
||||
f.write('*')
|
||||
f.write('End')
|
||||
|
||||
|
||||
def _ReadFileWithSharedLockBlockingThenWrite(read_file, write_file):
|
||||
with open(read_file, 'r') as f:
|
||||
lock.AcquireFileLock(f, lock.LOCK_SH)
|
||||
content = f.read()
|
||||
with open(write_file, 'a') as f2:
|
||||
lock.AcquireFileLock(f2, lock.LOCK_EX)
|
||||
f2.write(content)
|
||||
|
||||
|
||||
def _ReadFileWithExclusiveLockNonBlocking(target_file, status_file):
|
||||
with open(target_file, 'r') as f:
|
||||
try:
|
||||
lock.AcquireFileLock(f, lock.LOCK_EX | lock.LOCK_NB)
|
||||
with open(status_file, 'w') as f2:
|
||||
f2.write('LockException was not raised')
|
||||
except lock.LockException:
|
||||
with open(status_file, 'w') as f2:
|
||||
f2.write('LockException raised')
|
||||
|
||||
|
||||
class FileLockTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
tf = tempfile.NamedTemporaryFile(delete=False)
|
||||
tf.close()
|
||||
self.temp_file_path = tf.name
|
||||
|
||||
def tearDown(self):
|
||||
os.remove(self.temp_file_path)
|
||||
|
||||
def testExclusiveLock(self):
|
||||
processess = []
|
||||
for _ in range(10):
|
||||
p = multiprocessing.Process(
|
||||
target=_AppendTextToFile, args=(self.temp_file_path,))
|
||||
p.start()
|
||||
processess.append(p)
|
||||
for p in processess:
|
||||
p.join()
|
||||
|
||||
# If the file lock works as expected, there should be 10 atomic writes of
|
||||
# 'Start***...***End' to the file in some order, which lead to the final
|
||||
# file content as below.
|
||||
expected_file_content = ''.join((['Start'] + ['*']*10000 + ['End']) * 10)
|
||||
with open(self.temp_file_path, 'r') as f:
|
||||
# Use assertTrue instead of assertEquals since the strings are big, hence
|
||||
# assertEquals's assertion failure will contain huge strings.
|
||||
self.assertTrue(expected_file_content == f.read())
|
||||
|
||||
def testSharedLock(self):
|
||||
tf = tempfile.NamedTemporaryFile(delete=False)
|
||||
tf.close()
|
||||
temp_write_file = tf.name
|
||||
try:
|
||||
with open(self.temp_file_path, 'w') as f:
|
||||
f.write('0123456789')
|
||||
with open(self.temp_file_path, 'r') as f:
|
||||
# First, acquire a shared lock on temp_file_path
|
||||
lock.AcquireFileLock(f, lock.LOCK_SH)
|
||||
|
||||
processess = []
|
||||
# Create 10 processes that also try to acquire shared lock from
|
||||
# temp_file_path then append temp_file_path's content to temp_write_file
|
||||
for _ in range(10):
|
||||
p = multiprocessing.Process(
|
||||
target=_ReadFileWithSharedLockBlockingThenWrite,
|
||||
args=(self.temp_file_path, temp_write_file))
|
||||
p.start()
|
||||
processess.append(p)
|
||||
for p in processess:
|
||||
p.join()
|
||||
|
||||
# temp_write_file should contains 10 copy of temp_file_path's content.
|
||||
with open(temp_write_file, 'r') as f:
|
||||
self.assertEquals('0123456789'*10, f.read())
|
||||
finally:
|
||||
os.remove(temp_write_file)
|
||||
|
||||
def testNonBlockingLockAcquiring(self):
|
||||
tf = tempfile.NamedTemporaryFile(delete=False)
|
||||
tf.close()
|
||||
temp_status_file = tf.name
|
||||
try:
|
||||
with open(self.temp_file_path, 'w') as f:
|
||||
lock.AcquireFileLock(f, lock.LOCK_EX)
|
||||
p = multiprocessing.Process(
|
||||
target=_ReadFileWithExclusiveLockNonBlocking,
|
||||
args=(self.temp_file_path, temp_status_file))
|
||||
p.start()
|
||||
p.join()
|
||||
with open(temp_status_file, 'r') as f:
|
||||
self.assertEquals('LockException raised', f.read())
|
||||
finally:
|
||||
os.remove(temp_status_file)
|
||||
|
||||
def testUnlockBeforeClosingFile(self):
|
||||
tf = tempfile.NamedTemporaryFile(delete=False)
|
||||
tf.close()
|
||||
temp_status_file = tf.name
|
||||
try:
|
||||
with open(self.temp_file_path, 'r') as f:
|
||||
lock.AcquireFileLock(f, lock.LOCK_SH)
|
||||
lock.ReleaseFileLock(f)
|
||||
p = multiprocessing.Process(
|
||||
target=_ReadFileWithExclusiveLockNonBlocking,
|
||||
args=(self.temp_file_path, temp_status_file))
|
||||
p.start()
|
||||
p.join()
|
||||
with open(temp_status_file, 'r') as f:
|
||||
self.assertEquals('LockException was not raised', f.read())
|
||||
finally:
|
||||
os.remove(temp_status_file)
|
||||
|
||||
def testContextualLock(self):
|
||||
tf = tempfile.NamedTemporaryFile(delete=False)
|
||||
tf.close()
|
||||
temp_status_file = tf.name
|
||||
try:
|
||||
with open(self.temp_file_path, 'r') as f:
|
||||
with lock.FileLock(f, lock.LOCK_EX):
|
||||
# Within this block, accessing self.temp_file_path from another
|
||||
# process should raise exception.
|
||||
p = multiprocessing.Process(
|
||||
target=_ReadFileWithExclusiveLockNonBlocking,
|
||||
args=(self.temp_file_path, temp_status_file))
|
||||
p.start()
|
||||
p.join()
|
||||
with open(temp_status_file, 'r') as f:
|
||||
self.assertEquals('LockException raised', f.read())
|
||||
|
||||
# Accessing self.temp_file_path here should not raise exception.
|
||||
p = multiprocessing.Process(
|
||||
target=_ReadFileWithExclusiveLockNonBlocking,
|
||||
args=(self.temp_file_path, temp_status_file))
|
||||
p.start()
|
||||
p.join()
|
||||
with open(temp_status_file, 'r') as f:
|
||||
self.assertEquals('LockException was not raised', f.read())
|
||||
finally:
|
||||
os.remove(temp_status_file)
|
||||
@@ -0,0 +1,35 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Logging util functions.
|
||||
|
||||
It would be named logging, but other modules in this directory use the default
|
||||
logging module, so that would break them.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
|
||||
@contextlib.contextmanager
|
||||
def CaptureLogs(file_stream):
|
||||
if not file_stream:
|
||||
# No file stream given, just don't capture logs.
|
||||
yield
|
||||
return
|
||||
|
||||
fh = logging.StreamHandler(file_stream)
|
||||
|
||||
logger = logging.getLogger()
|
||||
# Try to copy the current log format, if one is set.
|
||||
if logger.handlers and hasattr(logger.handlers[0], 'formatter'):
|
||||
fh.formatter = logger.handlers[0].formatter
|
||||
else:
|
||||
fh.setFormatter(logging.Formatter(
|
||||
'(%(levelname)s) %(asctime)s %(message)s'))
|
||||
logger.addHandler(fh)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
logger = logging.getLogger()
|
||||
logger.removeHandler(fh)
|
||||
@@ -0,0 +1,27 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
import logging
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from six import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
from py_utils import logging_util
|
||||
|
||||
|
||||
class LoggingUtilTest(unittest.TestCase):
|
||||
def testCapture(self):
|
||||
s = StringIO()
|
||||
with logging_util.CaptureLogs(s):
|
||||
logging.fatal('test')
|
||||
|
||||
# Only assert ends with, since the logging message by default has the date
|
||||
# in it.
|
||||
self.assertTrue(s.getvalue().endswith('test\n'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import heapq
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
try:
|
||||
import psutil
|
||||
except ImportError:
|
||||
psutil = None
|
||||
|
||||
|
||||
BYTE_UNITS = ['B', 'KiB', 'MiB', 'GiB']
|
||||
|
||||
|
||||
def FormatBytes(value):
|
||||
def GetValueAndUnit(value):
|
||||
for unit in BYTE_UNITS[:-1]:
|
||||
if abs(value) < 1024.0:
|
||||
return value, unit
|
||||
value /= 1024.0
|
||||
return value, BYTE_UNITS[-1]
|
||||
|
||||
if value is not None:
|
||||
return '%.1f %s' % GetValueAndUnit(value)
|
||||
else:
|
||||
return 'N/A'
|
||||
|
||||
|
||||
def _GetProcessInfo(p):
|
||||
pinfo = p.as_dict(attrs=['pid', 'name', 'memory_info'])
|
||||
pinfo['mem_rss'] = getattr(pinfo['memory_info'], 'rss', 0)
|
||||
return pinfo
|
||||
|
||||
|
||||
def _LogProcessInfo(pinfo, level):
|
||||
pinfo['mem_rss_fmt'] = FormatBytes(pinfo['mem_rss'])
|
||||
logging.log(level, '%(mem_rss_fmt)s (pid=%(pid)s)', pinfo)
|
||||
|
||||
|
||||
def LogHostMemoryUsage(top_n=10, level=logging.INFO):
|
||||
if not psutil:
|
||||
logging.warning('psutil module is not found, skipping logging memory info')
|
||||
return
|
||||
if psutil.version_info < (2, 0):
|
||||
logging.warning('psutil %s too old, upgrade to version 2.0 or higher'
|
||||
' for memory usage information.', psutil.__version__)
|
||||
return
|
||||
|
||||
# TODO(crbug.com/777865): Remove the following pylint disable. Even if we
|
||||
# check for a recent enough psutil version above, the catapult presubmit
|
||||
# builder (still running some old psutil) fails pylint checks due to API
|
||||
# changes in psutil.
|
||||
# pylint: disable=no-member
|
||||
mem = psutil.virtual_memory()
|
||||
logging.log(level, 'Used %s out of %s memory available.',
|
||||
FormatBytes(mem.used), FormatBytes(mem.total))
|
||||
logging.log(level, 'Memory usage of top %i processes groups', top_n)
|
||||
pinfos_by_names = {}
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
pinfo = _GetProcessInfo(p)
|
||||
except psutil.NoSuchProcess:
|
||||
logging.exception('process %s no longer exists', p)
|
||||
continue
|
||||
pname = pinfo['name']
|
||||
if pname not in pinfos_by_names:
|
||||
pinfos_by_names[pname] = {'name': pname, 'total_mem_rss': 0, 'pids': []}
|
||||
pinfos_by_names[pname]['total_mem_rss'] += pinfo['mem_rss']
|
||||
pinfos_by_names[pname]['pids'].append(str(pinfo['pid']))
|
||||
|
||||
sorted_pinfo_groups = heapq.nlargest(
|
||||
top_n,
|
||||
list(pinfos_by_names.values()),
|
||||
key=lambda item: item['total_mem_rss'])
|
||||
for group in sorted_pinfo_groups:
|
||||
group['total_mem_rss_fmt'] = FormatBytes(group['total_mem_rss'])
|
||||
group['pids_fmt'] = ', '.join(group['pids'])
|
||||
logging.log(
|
||||
level, '- %(name)s - %(total_mem_rss_fmt)s - pids: %(pids)s', group)
|
||||
logging.log(level, 'Current process:')
|
||||
pinfo = _GetProcessInfo(psutil.Process(os.getpid()))
|
||||
_LogProcessInfo(pinfo, level)
|
||||
|
||||
|
||||
def main():
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
LogHostMemoryUsage()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -0,0 +1,35 @@
|
||||
# Copyright 2019 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
from distutils import version # pylint: disable=no-name-in-module
|
||||
|
||||
|
||||
def RequireVersion(module, min_version, max_version=None):
|
||||
"""Ensure that an imported module's version is within a required range.
|
||||
|
||||
Version strings are parsed with LooseVersion, so versions like "1.8.0rc1"
|
||||
(default numpy on macOS Sierra) and "2.4.13.2" (a version of OpenCV 2.x)
|
||||
are allowed.
|
||||
|
||||
Args:
|
||||
module: An already imported python module.
|
||||
min_version: The module must have this or a higher version.
|
||||
max_version: Optional, the module should not have this or a higher version.
|
||||
|
||||
Raises:
|
||||
ImportError if the module's __version__ is not within the allowed range.
|
||||
"""
|
||||
module_version = version.LooseVersion(module.__version__)
|
||||
min_version = version.LooseVersion(str(min_version))
|
||||
valid_version = min_version <= module_version
|
||||
|
||||
if max_version is not None:
|
||||
max_version = version.LooseVersion(str(max_version))
|
||||
valid_version = valid_version and (module_version < max_version)
|
||||
wants_version = 'at or above %s and below %s' % (min_version, max_version)
|
||||
else:
|
||||
wants_version = '%s or higher' % min_version
|
||||
|
||||
if not valid_version:
|
||||
raise ImportError('%s has version %s, but version %s is required' % (
|
||||
module.__name__, module_version, wants_version))
|
||||
@@ -0,0 +1,41 @@
|
||||
# Copyright 2019 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
import unittest
|
||||
|
||||
from py_utils import modules_util
|
||||
|
||||
|
||||
class FakeModule(object):
|
||||
def __init__(self, name, version):
|
||||
self.__name__ = name
|
||||
self.__version__ = version
|
||||
|
||||
|
||||
class ModulesUitlTest(unittest.TestCase):
|
||||
def testRequireVersion_valid(self):
|
||||
numpy = FakeModule('numpy', '2.3')
|
||||
try:
|
||||
modules_util.RequireVersion(numpy, '1.0')
|
||||
except ImportError:
|
||||
self.fail('ImportError raised unexpectedly')
|
||||
|
||||
def testRequireVersion_versionTooLow(self):
|
||||
numpy = FakeModule('numpy', '2.3')
|
||||
with self.assertRaises(ImportError) as error:
|
||||
modules_util.RequireVersion(numpy, '2.5')
|
||||
self.assertEqual(
|
||||
str(error.exception),
|
||||
'numpy has version 2.3, but version 2.5 or higher is required')
|
||||
|
||||
def testRequireVersion_versionTooHigh(self):
|
||||
numpy = FakeModule('numpy', '2.3')
|
||||
with self.assertRaises(ImportError) as error:
|
||||
modules_util.RequireVersion(numpy, '1.0', '2.0')
|
||||
self.assertEqual(
|
||||
str(error.exception), 'numpy has version 2.3, but version'
|
||||
' at or above 1.0 and below 2.0 is required')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,56 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import py_utils
|
||||
|
||||
|
||||
class PathTest(unittest.TestCase):
|
||||
|
||||
def testIsExecutable(self):
|
||||
self.assertFalse(py_utils.IsExecutable('nonexistent_file'))
|
||||
# We use actual files on disk instead of pyfakefs because the executable is
|
||||
# set different on win that posix platforms and pyfakefs doesn't support
|
||||
# win platform well.
|
||||
self.assertFalse(py_utils.IsExecutable(_GetFileInTestDir('foo.txt')))
|
||||
self.assertTrue(py_utils.IsExecutable(sys.executable))
|
||||
|
||||
|
||||
def _GetFileInTestDir(file_name):
|
||||
return os.path.join(os.path.dirname(__file__), 'test_data', file_name)
|
||||
|
||||
|
||||
class WaitForTest(unittest.TestCase):
|
||||
|
||||
def testWaitForTrue(self):
|
||||
def ReturnTrue():
|
||||
return True
|
||||
self.assertTrue(py_utils.WaitFor(ReturnTrue, .1))
|
||||
|
||||
def testWaitForFalse(self):
|
||||
def ReturnFalse():
|
||||
return False
|
||||
|
||||
with self.assertRaises(py_utils.TimeoutException):
|
||||
py_utils.WaitFor(ReturnFalse, .1)
|
||||
|
||||
def testWaitForEventuallyTrue(self):
|
||||
# Use list to pass to inner function in order to allow modifying the
|
||||
# variable from the outer scope.
|
||||
c = [0]
|
||||
def ReturnCounterBasedValue():
|
||||
c[0] += 1
|
||||
return c[0] > 2
|
||||
|
||||
self.assertTrue(py_utils.WaitFor(ReturnCounterBasedValue, .5))
|
||||
|
||||
def testWaitForTrueLambda(self):
|
||||
self.assertTrue(py_utils.WaitFor(lambda: True, .1))
|
||||
|
||||
def testWaitForFalseLambda(self):
|
||||
with self.assertRaises(py_utils.TimeoutException):
|
||||
py_utils.WaitFor(lambda: False, .1)
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Style-preserving Python code transforms.
|
||||
|
||||
This module provides components for modifying and querying Python code. They can
|
||||
be used to build custom refactorings and linters.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import multiprocessing
|
||||
|
||||
# pylint: disable=wildcard-import
|
||||
from py_utils.refactor.annotated_symbol import * # pylint: disable=redefined-builtin
|
||||
from py_utils.refactor.module import Module
|
||||
|
||||
|
||||
def _TransformFile(transform, file_path):
|
||||
module = Module(file_path)
|
||||
result = transform(module)
|
||||
module.Write()
|
||||
return result
|
||||
|
||||
|
||||
def Transform(transform, file_paths):
|
||||
transform = functools.partial(_TransformFile, transform)
|
||||
return multiprocessing.Pool().map(transform, file_paths)
|
||||
@@ -0,0 +1,71 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# pylint: disable=wildcard-import
|
||||
from py_utils.refactor.annotated_symbol.class_definition import *
|
||||
from py_utils.refactor.annotated_symbol.function_definition import *
|
||||
from py_utils.refactor.annotated_symbol.import_statement import *
|
||||
from py_utils.refactor.annotated_symbol.reference import * # pylint: disable=redefined-builtin
|
||||
from py_utils.refactor import snippet
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Annotate',
|
||||
|
||||
'Class',
|
||||
'Function',
|
||||
'Import',
|
||||
'Reference',
|
||||
]
|
||||
|
||||
|
||||
# Specific symbol types with extra methods for manipulating them.
|
||||
# Python's full grammar is here:
|
||||
# https://docs.python.org/2/reference/grammar.html
|
||||
|
||||
# Annotated Symbols have an Annotate classmethod that takes a symbol type and
|
||||
# list of children, and returns an instance of that annotated Symbol.
|
||||
|
||||
ANNOTATED_SYMBOLS = (
|
||||
AsName,
|
||||
Class,
|
||||
DottedName,
|
||||
ImportFrom,
|
||||
ImportName,
|
||||
Function,
|
||||
)
|
||||
|
||||
|
||||
# Unfortunately, some logical groupings are not represented by a node in the
|
||||
# parse tree. To work around this, some annotated Symbols have an Annotate
|
||||
# classmethod that takes and returns a list of Snippets instead.
|
||||
|
||||
ANNOTATED_GROUPINGS = (
|
||||
Reference,
|
||||
)
|
||||
|
||||
|
||||
def Annotate(f):
|
||||
"""Return the syntax tree of the given file."""
|
||||
return _AnnotateNode(snippet.Snippetize(f))
|
||||
|
||||
|
||||
def _AnnotateNode(node):
|
||||
if not isinstance(node, snippet.Symbol):
|
||||
return node
|
||||
|
||||
children = [_AnnotateNode(c) for c in node.children]
|
||||
|
||||
for symbol_type in ANNOTATED_GROUPINGS:
|
||||
annotated_grouping = symbol_type.Annotate(children)
|
||||
if annotated_grouping:
|
||||
children = annotated_grouping
|
||||
break
|
||||
|
||||
for symbol_type in ANNOTATED_SYMBOLS:
|
||||
annotated_symbol = symbol_type.Annotate(node.type, children)
|
||||
if annotated_symbol:
|
||||
return annotated_symbol
|
||||
|
||||
return snippet.Symbol(node.type, children)
|
||||
@@ -0,0 +1,40 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from py_utils.refactor import snippet
|
||||
from six.moves import range # pylint: disable=redefined-builtin
|
||||
|
||||
|
||||
class AnnotatedSymbol(snippet.Symbol):
|
||||
def __init__(self, symbol_type, children):
|
||||
super(AnnotatedSymbol, self).__init__(symbol_type, children)
|
||||
self._modified = False
|
||||
|
||||
@property
|
||||
def modified(self):
|
||||
if self._modified:
|
||||
return True
|
||||
return super(AnnotatedSymbol, self).modified
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if (hasattr(self.__class__, name) and
|
||||
isinstance(getattr(self.__class__, name), property)):
|
||||
self._modified = True
|
||||
return super(AnnotatedSymbol, self).__setattr__(name, value)
|
||||
|
||||
def Cut(self, child):
|
||||
for i in range(len(self._children)):
|
||||
if self._children[i] == child:
|
||||
self._modified = True
|
||||
del self._children[i]
|
||||
break
|
||||
else:
|
||||
raise ValueError('%s is not in %s.' % (child, self))
|
||||
|
||||
def Paste(self, child):
|
||||
self._modified = True
|
||||
self._children.append(child)
|
||||
@@ -0,0 +1,49 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import symbol
|
||||
|
||||
from py_utils.refactor.annotated_symbol import base_symbol
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Class',
|
||||
]
|
||||
|
||||
|
||||
class Class(base_symbol.AnnotatedSymbol):
|
||||
@classmethod
|
||||
def Annotate(cls, symbol_type, children):
|
||||
if symbol_type != symbol.stmt:
|
||||
return None
|
||||
|
||||
compound_statement = children[0]
|
||||
if compound_statement.type != symbol.compound_stmt:
|
||||
return None
|
||||
|
||||
statement = compound_statement.children[0]
|
||||
if statement.type == symbol.classdef:
|
||||
return cls(statement.type, statement.children)
|
||||
elif (statement.type == symbol.decorated and
|
||||
statement.children[-1].type == symbol.classdef):
|
||||
return cls(statement.type, statement.children)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def suite(self):
|
||||
# TODO: Complete.
|
||||
raise NotImplementedError()
|
||||
|
||||
def FindChild(self, snippet_type, **kwargs):
|
||||
return self.suite.FindChild(snippet_type, **kwargs)
|
||||
|
||||
def FindChildren(self, snippet_type):
|
||||
return self.suite.FindChildren(snippet_type)
|
||||
|
||||
def Cut(self, child):
|
||||
self.suite.Cut(child)
|
||||
|
||||
def Paste(self, child):
|
||||
self.suite.Paste(child)
|
||||
@@ -0,0 +1,49 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import symbol
|
||||
|
||||
from py_utils.refactor.annotated_symbol import base_symbol
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Function',
|
||||
]
|
||||
|
||||
|
||||
class Function(base_symbol.AnnotatedSymbol):
|
||||
@classmethod
|
||||
def Annotate(cls, symbol_type, children):
|
||||
if symbol_type != symbol.stmt:
|
||||
return None
|
||||
|
||||
compound_statement = children[0]
|
||||
if compound_statement.type != symbol.compound_stmt:
|
||||
return None
|
||||
|
||||
statement = compound_statement.children[0]
|
||||
if statement.type == symbol.funcdef:
|
||||
return cls(statement.type, statement.children)
|
||||
elif (statement.type == symbol.decorated and
|
||||
statement.children[-1].type == symbol.funcdef):
|
||||
return cls(statement.type, statement.children)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def suite(self):
|
||||
# TODO: Complete.
|
||||
raise NotImplementedError()
|
||||
|
||||
def FindChild(self, snippet_type, **kwargs):
|
||||
return self.suite.FindChild(snippet_type, **kwargs)
|
||||
|
||||
def FindChildren(self, snippet_type):
|
||||
return self.suite.FindChildren(snippet_type)
|
||||
|
||||
def Cut(self, child):
|
||||
self.suite.Cut(child)
|
||||
|
||||
def Paste(self, child):
|
||||
self.suite.Paste(child)
|
||||
@@ -0,0 +1,330 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import keyword
|
||||
import symbol
|
||||
import token
|
||||
|
||||
from py_utils.refactor import snippet
|
||||
from py_utils.refactor.annotated_symbol import base_symbol
|
||||
from six.moves import zip_longest # pylint: disable=redefined-builtin
|
||||
|
||||
|
||||
__all__ = [
|
||||
'AsName',
|
||||
'DottedName',
|
||||
'Import',
|
||||
'ImportFrom',
|
||||
'ImportName',
|
||||
]
|
||||
|
||||
|
||||
class DottedName(base_symbol.AnnotatedSymbol):
|
||||
@classmethod
|
||||
def Annotate(cls, symbol_type, children):
|
||||
if symbol_type != symbol.dotted_name:
|
||||
return None
|
||||
return cls(symbol_type, children)
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
return ''.join(token_snippet.value for token_snippet in self._children)
|
||||
|
||||
@value.setter
|
||||
def value(self, value):
|
||||
value_parts = value.split('.')
|
||||
for value_part in value_parts:
|
||||
if keyword.iskeyword(value_part):
|
||||
raise ValueError('%s is a reserved keyword.' % value_part)
|
||||
|
||||
# If we have too many children, cut the list down to size.
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
self._children = self._children[:len(value_parts)*2-1]
|
||||
|
||||
# Update child nodes.
|
||||
for child, value_part in zip_longest(self._children[::2], value_parts):
|
||||
if child:
|
||||
# Modify existing children. This helps preserve comments and spaces.
|
||||
child.value = value_part
|
||||
else:
|
||||
# Add children as needed.
|
||||
self._children.append(snippet.TokenSnippet.Create(token.DOT, '.'))
|
||||
self._children.append(
|
||||
snippet.TokenSnippet.Create(token.NAME, value_part))
|
||||
|
||||
|
||||
class AsName(base_symbol.AnnotatedSymbol):
|
||||
@classmethod
|
||||
def Annotate(cls, symbol_type, children):
|
||||
if (symbol_type != symbol.dotted_as_name and
|
||||
symbol_type != symbol.import_as_name):
|
||||
return None
|
||||
return cls(symbol_type, children)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.children[0].value
|
||||
|
||||
@name.setter
|
||||
def name(self, value):
|
||||
self.children[0].value = value
|
||||
|
||||
@property
|
||||
def alias(self):
|
||||
if len(self.children) < 3:
|
||||
return None
|
||||
return self.children[2].value
|
||||
|
||||
@alias.setter
|
||||
def alias(self, value):
|
||||
if keyword.iskeyword(value):
|
||||
raise ValueError('%s is a reserved keyword.' % value)
|
||||
|
||||
if value:
|
||||
# pylint: disable=access-member-before-definition
|
||||
if len(self.children) < 3:
|
||||
# If we currently have no alias, add one.
|
||||
# pylint: disable=access-member-before-definition
|
||||
self.children.append(
|
||||
snippet.TokenSnippet.Create(token.NAME, 'as', (0, 1)))
|
||||
# pylint: disable=access-member-before-definition
|
||||
self.children.append(
|
||||
snippet.TokenSnippet.Create(token.NAME, value, (0, 1)))
|
||||
else:
|
||||
# We already have an alias. Just update the value.
|
||||
# pylint: disable=access-member-before-definition
|
||||
self.children[2].value = value
|
||||
else:
|
||||
# Removing the alias. Strip the "as foo".
|
||||
self.children = [self.children[0]] # pylint: disable=line-too-long, attribute-defined-outside-init
|
||||
|
||||
|
||||
class Import(base_symbol.AnnotatedSymbol):
|
||||
"""An import statement.
|
||||
|
||||
Example:
|
||||
import a.b.c as d
|
||||
from a.b import c as d
|
||||
|
||||
In these examples,
|
||||
path == 'a.b.c'
|
||||
alias == 'd'
|
||||
root == 'a.b' (only for "from" imports)
|
||||
module == 'c' (only for "from" imports)
|
||||
name (read-only) == the name used by references to the module, which is the
|
||||
alias if there is one, the full module path in "full" imports, and the
|
||||
module name in "from" imports.
|
||||
"""
|
||||
@property
|
||||
def has_from(self):
|
||||
"""Returns True iff the import statment is of the form "from x import y"."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def values(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def paths(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def aliases(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""The full dotted path of the module."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@path.setter
|
||||
def path(self, value):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def alias(self):
|
||||
"""The alias, if the module is renamed with "as". None otherwise."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@alias.setter
|
||||
def alias(self, value):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""The name used to reference this import's module."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class ImportName(Import):
|
||||
@classmethod
|
||||
def Annotate(cls, symbol_type, children):
|
||||
if symbol_type != symbol.import_stmt:
|
||||
return None
|
||||
if children[0].type != symbol.import_name:
|
||||
return None
|
||||
assert len(children) == 1
|
||||
return cls(symbol_type, children[0].children)
|
||||
|
||||
@property
|
||||
def has_from(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def values(self):
|
||||
dotted_as_names = self.children[1]
|
||||
return tuple((dotted_as_name.name, dotted_as_name.alias)
|
||||
for dotted_as_name in dotted_as_names.children[::2])
|
||||
|
||||
@property
|
||||
def paths(self):
|
||||
return tuple(path for path, _ in self.values)
|
||||
|
||||
@property
|
||||
def aliases(self):
|
||||
return tuple(alias for _, alias in self.values)
|
||||
|
||||
@property
|
||||
def _dotted_as_name(self):
|
||||
dotted_as_names = self.children[1]
|
||||
if len(dotted_as_names.children) != 1:
|
||||
raise NotImplementedError(
|
||||
'This method only works if the statement has one import.')
|
||||
return dotted_as_names.children[0]
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._dotted_as_name.name
|
||||
|
||||
@path.setter
|
||||
def path(self, value): # pylint: disable=arguments-differ
|
||||
self._dotted_as_name.name = value
|
||||
|
||||
@property
|
||||
def alias(self):
|
||||
return self._dotted_as_name.alias
|
||||
|
||||
@alias.setter
|
||||
def alias(self, value): # pylint: disable=arguments-differ
|
||||
self._dotted_as_name.alias = value
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self.alias:
|
||||
return self.alias
|
||||
else:
|
||||
return self.path
|
||||
|
||||
|
||||
class ImportFrom(Import):
|
||||
@classmethod
|
||||
def Annotate(cls, symbol_type, children):
|
||||
if symbol_type != symbol.import_stmt:
|
||||
return None
|
||||
if children[0].type != symbol.import_from:
|
||||
return None
|
||||
assert len(children) == 1
|
||||
return cls(symbol_type, children[0].children)
|
||||
|
||||
@property
|
||||
def has_from(self):
|
||||
return True
|
||||
|
||||
@property
|
||||
def values(self):
|
||||
try:
|
||||
import_as_names = self.FindChild(symbol.import_as_names)
|
||||
except ValueError:
|
||||
return (('*', None),)
|
||||
|
||||
return tuple((import_as_name.name, import_as_name.alias)
|
||||
for import_as_name in import_as_names.children[::2])
|
||||
|
||||
@property
|
||||
def paths(self):
|
||||
module = self.module
|
||||
return tuple('.'.join((module, name)) for name, _ in self.values)
|
||||
|
||||
@property
|
||||
def aliases(self):
|
||||
return tuple(alias for _, alias in self.values)
|
||||
|
||||
@property
|
||||
def root(self):
|
||||
return self.FindChild(symbol.dotted_name).value
|
||||
|
||||
@root.setter
|
||||
def root(self, value):
|
||||
self.FindChild(symbol.dotted_name).value = value
|
||||
|
||||
@property
|
||||
def _import_as_name(self):
|
||||
try:
|
||||
import_as_names = self.FindChild(symbol.import_as_names)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
if len(import_as_names.children) != 1:
|
||||
raise NotImplementedError(
|
||||
'This method only works if the statement has one import.')
|
||||
|
||||
return import_as_names.children[0]
|
||||
|
||||
@property
|
||||
def module(self):
|
||||
import_as_name = self._import_as_name
|
||||
if import_as_name:
|
||||
return import_as_name.name
|
||||
else:
|
||||
return '*'
|
||||
|
||||
@module.setter
|
||||
def module(self, value):
|
||||
if keyword.iskeyword(value):
|
||||
raise ValueError('%s is a reserved keyword.' % value)
|
||||
|
||||
import_as_name = self._import_as_name
|
||||
if value == '*':
|
||||
# TODO: Implement this.
|
||||
raise NotImplementedError()
|
||||
else:
|
||||
if import_as_name:
|
||||
import_as_name.name = value
|
||||
else:
|
||||
# TODO: Implement this.
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return '.'.join((self.root, self.module))
|
||||
|
||||
@path.setter
|
||||
def path(self, value): # pylint: disable=arguments-differ
|
||||
self.root, _, self.module = value.rpartition('.')
|
||||
|
||||
@property
|
||||
def alias(self):
|
||||
import_as_name = self._import_as_name
|
||||
if import_as_name:
|
||||
return import_as_name.alias
|
||||
else:
|
||||
return None
|
||||
|
||||
@alias.setter
|
||||
def alias(self, value): # pylint: disable=arguments-differ
|
||||
import_as_name = self._import_as_name
|
||||
if not import_as_name:
|
||||
raise NotImplementedError('Cannot change alias for "import *".')
|
||||
import_as_name.alias = value
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self.alias:
|
||||
return self.alias
|
||||
else:
|
||||
return self.module
|
||||
@@ -0,0 +1,80 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import symbol
|
||||
import token
|
||||
|
||||
from py_utils.refactor import snippet
|
||||
from py_utils.refactor.annotated_symbol import base_symbol
|
||||
from six.moves import range # pylint: disable=redefined-builtin
|
||||
from six.moves import zip_longest # pylint: disable=redefined-builtin
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Reference',
|
||||
]
|
||||
|
||||
|
||||
class Reference(base_symbol.AnnotatedSymbol):
|
||||
@classmethod
|
||||
def Annotate(cls, nodes):
|
||||
if not nodes:
|
||||
return None
|
||||
if nodes[0].type != symbol.atom:
|
||||
return None
|
||||
if not nodes[0].children or nodes[0].children[0].type != token.NAME:
|
||||
return None
|
||||
|
||||
for i in range(1, len(nodes)):
|
||||
if not nodes:
|
||||
break
|
||||
if nodes[i].type != symbol.trailer:
|
||||
break
|
||||
if len(nodes[i].children) != 2:
|
||||
break
|
||||
if (nodes[i].children[0].type != token.DOT or
|
||||
nodes[i].children[1].type != token.NAME):
|
||||
break
|
||||
else:
|
||||
i = len(nodes)
|
||||
|
||||
return [cls(nodes[:i])] + nodes[i:]
|
||||
|
||||
def __init__(self, children):
|
||||
super(Reference, self).__init__(-1, children)
|
||||
|
||||
@property
|
||||
def type_name(self):
|
||||
return 'attribute_reference'
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
return ''.join(token_snippet.value
|
||||
for child in self.children
|
||||
for token_snippet in child.children)
|
||||
|
||||
@value.setter
|
||||
def value(self, value):
|
||||
value_parts = value.split('.')
|
||||
|
||||
# If we have too many children, cut the list down to size.
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
self._children = self._children[:len(value_parts)]
|
||||
|
||||
# Update child nodes.
|
||||
for child, value_part in zip_longest(self._children, value_parts):
|
||||
if child:
|
||||
# Modify existing children. This helps preserve comments and spaces.
|
||||
child.children[-1].value = value_part
|
||||
else:
|
||||
# Add children as needed.
|
||||
token_snippets = [
|
||||
snippet.TokenSnippet.Create(token.DOT, '.'),
|
||||
snippet.TokenSnippet.Create(token.NAME, value_part),
|
||||
]
|
||||
self._children.append(snippet.Symbol(symbol.trailer, token_snippets))
|
||||
@@ -0,0 +1,39 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from py_utils.refactor import annotated_symbol
|
||||
|
||||
|
||||
class Module(object):
|
||||
|
||||
def __init__(self, file_path):
|
||||
self._file_path = file_path
|
||||
|
||||
with open(self._file_path, 'r') as f:
|
||||
self._snippet = annotated_symbol.Annotate(f)
|
||||
|
||||
@property
|
||||
def file_path(self):
|
||||
return self._file_path
|
||||
|
||||
@property
|
||||
def modified(self):
|
||||
return self._snippet.modified
|
||||
|
||||
def FindAll(self, snippet_type):
|
||||
return self._snippet.FindAll(snippet_type)
|
||||
|
||||
def FindChildren(self, snippet_type):
|
||||
return self._snippet.FindChildren(snippet_type)
|
||||
|
||||
def Write(self):
|
||||
"""Write modifications to the file."""
|
||||
if not self.modified:
|
||||
return
|
||||
|
||||
# Stringify before opening the file for writing.
|
||||
# If we fail, we won't truncate the file.
|
||||
string = str(self._snippet)
|
||||
with open(self._file_path, 'w') as f:
|
||||
f.write(string)
|
||||
@@ -0,0 +1,120 @@
|
||||
# Lint as: python2, python3
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
import collections
|
||||
import itertools
|
||||
import token
|
||||
import tokenize
|
||||
from six.moves import zip # pylint: disable=redefined-builtin
|
||||
|
||||
|
||||
def _Pairwise(iterable):
|
||||
"""s -> (None, s0), (s0, s1), (s1, s2), (s2, s3), ..."""
|
||||
a, b = itertools.tee(iterable)
|
||||
a = itertools.chain((None,), a)
|
||||
return zip(a, b)
|
||||
|
||||
|
||||
class OffsetToken(object):
|
||||
"""A Python token with a relative position.
|
||||
|
||||
A token is represented by a type defined in Python's token module, a string
|
||||
representing the content, and an offset. Using relative positions makes it
|
||||
easy to insert and remove tokens.
|
||||
"""
|
||||
|
||||
def __init__(self, token_type, string, offset):
|
||||
self._type = token_type
|
||||
self._string = string
|
||||
self._offset = offset
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def type_name(self):
|
||||
return token.tok_name[self._type]
|
||||
|
||||
@property
|
||||
def string(self):
|
||||
return self._string
|
||||
|
||||
@string.setter
|
||||
def string(self, value):
|
||||
self._string = value
|
||||
|
||||
@property
|
||||
def offset(self):
|
||||
return self._offset
|
||||
|
||||
def __str__(self):
|
||||
return str((self.type_name, self.string, self.offset))
|
||||
|
||||
|
||||
def Tokenize(f):
|
||||
"""Read tokens from a file-like object.
|
||||
|
||||
Args:
|
||||
f: Any object that has a readline method.
|
||||
|
||||
Returns:
|
||||
A collections.deque containing OffsetTokens. Deques are cheaper and easier
|
||||
to manipulate sequentially than lists.
|
||||
"""
|
||||
f.seek(0)
|
||||
tokenize_tokens = tokenize.generate_tokens(f.readline)
|
||||
|
||||
offset_tokens = collections.deque()
|
||||
for prev_token, next_token in _Pairwise(tokenize_tokens):
|
||||
token_type, string, (srow, scol), _, _ = next_token
|
||||
if not prev_token:
|
||||
offset_tokens.append(OffsetToken(token_type, string, (0, 0)))
|
||||
else:
|
||||
erow, ecol = prev_token[3]
|
||||
if erow == srow:
|
||||
offset_tokens.append(OffsetToken(token_type, string, (0, scol - ecol)))
|
||||
else:
|
||||
offset_tokens.append(OffsetToken(
|
||||
token_type, string, (srow - erow, scol)))
|
||||
|
||||
return offset_tokens
|
||||
|
||||
|
||||
def Untokenize(offset_tokens):
|
||||
"""Return the string representation of an iterable of OffsetTokens."""
|
||||
# Make a copy. Don't modify the original.
|
||||
offset_tokens = collections.deque(offset_tokens)
|
||||
|
||||
# Strip leading NL tokens.
|
||||
while offset_tokens[0].type == tokenize.NL:
|
||||
offset_tokens.popleft()
|
||||
|
||||
# Strip leading vertical whitespace.
|
||||
first_token = offset_tokens.popleft()
|
||||
# Take care not to modify the existing token. Create a new one in its place.
|
||||
first_token = OffsetToken(first_token.type, first_token.string,
|
||||
(0, first_token.offset[1]))
|
||||
offset_tokens.appendleft(first_token)
|
||||
|
||||
# Convert OffsetTokens to tokenize tokens.
|
||||
tokenize_tokens = []
|
||||
row = 1
|
||||
col = 0
|
||||
for t in offset_tokens:
|
||||
offset_row, offset_col = t.offset
|
||||
if offset_row == 0:
|
||||
col += offset_col
|
||||
else:
|
||||
row += offset_row
|
||||
col = offset_col
|
||||
tokenize_tokens.append((t.type, t.string, (row, col), (row, col), None))
|
||||
|
||||
# tokenize can't handle whitespace before line continuations.
|
||||
# So add a space.
|
||||
return tokenize.untokenize(tokenize_tokens).replace('\\\n', ' \\\n')
|
||||
@@ -0,0 +1,246 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import parser
|
||||
import symbol
|
||||
import sys
|
||||
import token
|
||||
import tokenize
|
||||
|
||||
from py_utils.refactor import offset_token
|
||||
|
||||
|
||||
class Snippet(object):
|
||||
"""A node in the Python parse tree.
|
||||
|
||||
The Python grammar is defined at:
|
||||
https://docs.python.org/2/reference/grammar.html
|
||||
|
||||
There are two types of Snippets:
|
||||
TokenSnippets are leaf nodes containing actual text.
|
||||
Symbols are internal nodes representing higher-level groupings, and are
|
||||
defined by the left-hand sides of the BNFs in the above link.
|
||||
"""
|
||||
@property
|
||||
def type(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def type_name(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
"""Return a list of this node's children."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def tokens(self):
|
||||
"""Return a tuple of the tokens this Snippet contains."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def PrintTree(self, indent=0, stream=sys.stdout):
|
||||
"""Spew a pretty-printed parse tree. Mostly useful for debugging."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __str__(self):
|
||||
return offset_token.Untokenize(self.tokens)
|
||||
|
||||
def FindAll(self, snippet_type):
|
||||
if isinstance(snippet_type, int):
|
||||
if self.type == snippet_type:
|
||||
yield self
|
||||
else:
|
||||
if isinstance(self, snippet_type):
|
||||
yield self
|
||||
|
||||
for child in self.children:
|
||||
for snippet in child.FindAll(snippet_type):
|
||||
yield snippet
|
||||
|
||||
def FindChild(self, snippet_type, **kwargs):
|
||||
for child in self.children:
|
||||
if isinstance(snippet_type, int):
|
||||
if child.type != snippet_type:
|
||||
continue
|
||||
else:
|
||||
if not isinstance(child, snippet_type):
|
||||
continue
|
||||
|
||||
for attribute, value in kwargs:
|
||||
if getattr(child, attribute) != value:
|
||||
break
|
||||
else:
|
||||
return child
|
||||
raise ValueError('%s is not in %s. Children are: %s' %
|
||||
(snippet_type, self, self.children))
|
||||
|
||||
def FindChildren(self, snippet_type):
|
||||
if isinstance(snippet_type, int):
|
||||
for child in self.children:
|
||||
if child.type == snippet_type:
|
||||
yield child
|
||||
else:
|
||||
for child in self.children:
|
||||
if isinstance(child, snippet_type):
|
||||
yield child
|
||||
|
||||
|
||||
class TokenSnippet(Snippet):
|
||||
"""A Snippet containing a list of tokens.
|
||||
|
||||
A list of tokens may start with any number of comments and non-terminating
|
||||
newlines, but must end with a syntactically meaningful token.
|
||||
"""
|
||||
|
||||
def __init__(self, token_type, tokens):
|
||||
# For operators and delimiters, the TokenSnippet's type may be more specific
|
||||
# than the type of the constituent token. E.g. the TokenSnippet type is
|
||||
# token.DOT, but the token type is token.OP. This is because the parser
|
||||
# has more context than the tokenizer.
|
||||
self._type = token_type
|
||||
self._tokens = tokens
|
||||
self._modified = False
|
||||
|
||||
@classmethod
|
||||
def Create(cls, token_type, string, offset=(0, 0)):
|
||||
return cls(token_type,
|
||||
[offset_token.OffsetToken(token_type, string, offset)])
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def type_name(self):
|
||||
return token.tok_name[self.type]
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
return self._tokens[-1].string
|
||||
|
||||
@value.setter
|
||||
def value(self, value):
|
||||
self._tokens[-1].string = value
|
||||
self._modified = True
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
return []
|
||||
|
||||
@property
|
||||
def tokens(self):
|
||||
return tuple(self._tokens)
|
||||
|
||||
@property
|
||||
def modified(self):
|
||||
return self._modified
|
||||
|
||||
def PrintTree(self, indent=0, stream=sys.stdout):
|
||||
stream.write(' ' * indent)
|
||||
if not self.tokens:
|
||||
print(self.type_name, file=stream)
|
||||
return
|
||||
|
||||
print('%-4s' % self.type_name, repr(self.tokens[0].string), file=stream)
|
||||
for tok in self.tokens[1:]:
|
||||
stream.write(' ' * indent)
|
||||
print(' ' * max(len(self.type_name), 4), repr(tok.string), file=stream)
|
||||
|
||||
|
||||
class Symbol(Snippet):
|
||||
"""A Snippet containing sub-Snippets.
|
||||
|
||||
The possible types and type_names are defined in Python's symbol module."""
|
||||
|
||||
def __init__(self, symbol_type, children):
|
||||
self._type = symbol_type
|
||||
self._children = children
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def type_name(self):
|
||||
return symbol.sym_name[self.type]
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
return self._children
|
||||
|
||||
@children.setter
|
||||
def children(self, value): # pylint: disable=arguments-differ
|
||||
self._children = value
|
||||
|
||||
@property
|
||||
def tokens(self):
|
||||
tokens = []
|
||||
for child in self.children:
|
||||
tokens += child.tokens
|
||||
return tuple(tokens)
|
||||
|
||||
@property
|
||||
def modified(self):
|
||||
return any(child.modified for child in self.children)
|
||||
|
||||
def PrintTree(self, indent=0, stream=sys.stdout):
|
||||
stream.write(' ' * indent)
|
||||
|
||||
# If there's only one child, collapse it onto the same line.
|
||||
node = self
|
||||
while len(node.children) == 1 and len(node.children[0].children) == 1:
|
||||
print(node.type_name, end=' ', file=stream)
|
||||
node = node.children[0]
|
||||
|
||||
print(node.type_name, file=stream)
|
||||
for child in node.children:
|
||||
child.PrintTree(indent + 2, stream)
|
||||
|
||||
|
||||
def Snippetize(f):
|
||||
"""Return the syntax tree of the given file."""
|
||||
f.seek(0)
|
||||
syntax_tree = parser.st2list(parser.suite(f.read()))
|
||||
tokens = offset_token.Tokenize(f)
|
||||
|
||||
snippet = _SnippetizeNode(syntax_tree, tokens)
|
||||
assert not tokens
|
||||
return snippet
|
||||
|
||||
|
||||
def _SnippetizeNode(node, tokens):
|
||||
# The parser module gives a syntax tree that discards comments,
|
||||
# non-terminating newlines, and whitespace information. Use the tokens given
|
||||
# by the tokenize module to annotate the syntax tree with the information
|
||||
# needed to exactly reproduce the original source code.
|
||||
node_type = node[0]
|
||||
|
||||
if node_type >= token.NT_OFFSET:
|
||||
# Symbol.
|
||||
children = tuple(_SnippetizeNode(child, tokens) for child in node[1:])
|
||||
return Symbol(node_type, children)
|
||||
else:
|
||||
# Token.
|
||||
grabbed_tokens = []
|
||||
while tokens and (
|
||||
tokens[0].type == tokenize.COMMENT or tokens[0].type == tokenize.NL):
|
||||
grabbed_tokens.append(tokens.popleft())
|
||||
|
||||
# parser has 2 NEWLINEs right before the end.
|
||||
# tokenize has 0 or 1 depending on if the file has one.
|
||||
# Create extra nodes without consuming tokens to account for this.
|
||||
if node_type == token.NEWLINE:
|
||||
for tok in tokens:
|
||||
if tok.type == token.ENDMARKER:
|
||||
return TokenSnippet(node_type, grabbed_tokens)
|
||||
if tok.type != token.DEDENT:
|
||||
break
|
||||
|
||||
assert tokens[0].type == token.OP or node_type == tokens[0].type
|
||||
|
||||
grabbed_tokens.append(tokens.popleft())
|
||||
return TokenSnippet(node_type, grabbed_tokens)
|
||||
@@ -0,0 +1,118 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import functools
|
||||
import os
|
||||
import sys
|
||||
|
||||
from py_utils import refactor
|
||||
|
||||
|
||||
def Run(sources, target, files_to_update):
|
||||
"""Move modules and update imports.
|
||||
|
||||
Args:
|
||||
sources: List of source module or package paths.
|
||||
target: Destination module or package path.
|
||||
files_to_update: Modules whose imports we should check for changes.
|
||||
"""
|
||||
# TODO(dtu): Support moving classes and functions.
|
||||
moves = tuple(_Move(source, target) for source in sources)
|
||||
|
||||
# Update imports and references.
|
||||
refactor.Transform(functools.partial(_Update, moves), files_to_update)
|
||||
|
||||
# Move files.
|
||||
for move in moves:
|
||||
os.rename(move.source_path, move.target_path)
|
||||
|
||||
|
||||
def _Update(moves, module):
|
||||
for import_statement in module.FindAll(refactor.Import):
|
||||
for move in moves:
|
||||
try:
|
||||
if move.UpdateImportAndReferences(module, import_statement):
|
||||
break
|
||||
except NotImplementedError as e:
|
||||
print('Error updating %s: %s' % (module.file_path, e), file=sys.stderr)
|
||||
|
||||
|
||||
class _Move(object):
|
||||
|
||||
def __init__(self, source, target):
|
||||
self._source_path = os.path.realpath(source)
|
||||
self._target_path = os.path.realpath(target)
|
||||
|
||||
if os.path.isdir(self._target_path):
|
||||
self._target_path = os.path.join(
|
||||
self._target_path, os.path.basename(self._source_path))
|
||||
|
||||
@property
|
||||
def source_path(self):
|
||||
return self._source_path
|
||||
|
||||
@property
|
||||
def target_path(self):
|
||||
return self._target_path
|
||||
|
||||
@property
|
||||
def source_module_path(self):
|
||||
return _ModulePath(self._source_path)
|
||||
|
||||
@property
|
||||
def target_module_path(self):
|
||||
return _ModulePath(self._target_path)
|
||||
|
||||
def UpdateImportAndReferences(self, module, import_statement):
|
||||
"""Update an import statement in a module and all its references..
|
||||
|
||||
Args:
|
||||
module: The refactor.Module to update.
|
||||
import_statement: The refactor.Import to update.
|
||||
|
||||
Returns:
|
||||
True if the import statement was updated, or False if the import statement
|
||||
needed no updating.
|
||||
"""
|
||||
statement_path_parts = import_statement.path.split('.')
|
||||
source_path_parts = self.source_module_path.split('.')
|
||||
if source_path_parts != statement_path_parts[:len(source_path_parts)]:
|
||||
return False
|
||||
|
||||
# Update import statement.
|
||||
old_name_parts = import_statement.name.split('.')
|
||||
new_name_parts = ([self.target_module_path] +
|
||||
statement_path_parts[len(source_path_parts):])
|
||||
import_statement.path = '.'.join(new_name_parts)
|
||||
new_name = import_statement.name
|
||||
|
||||
# Update references.
|
||||
for reference in module.FindAll(refactor.Reference):
|
||||
reference_parts = reference.value.split('.')
|
||||
if old_name_parts != reference_parts[:len(old_name_parts)]:
|
||||
continue
|
||||
|
||||
new_reference_parts = [new_name] + reference_parts[len(old_name_parts):]
|
||||
reference.value = '.'.join(new_reference_parts)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _BaseDir(module_path):
|
||||
if not os.path.isdir(module_path):
|
||||
module_path = os.path.dirname(module_path)
|
||||
|
||||
while '__init__.py' in os.listdir(module_path):
|
||||
module_path = os.path.dirname(module_path)
|
||||
|
||||
return module_path
|
||||
|
||||
|
||||
def _ModulePath(module_path):
|
||||
if os.path.split(module_path)[1] == '__init__.py':
|
||||
module_path = os.path.dirname(module_path)
|
||||
rel_path = os.path.relpath(module_path, _BaseDir(module_path))
|
||||
return os.path.splitext(rel_path)[0].replace(os.sep, '.')
|
||||
@@ -0,0 +1,61 @@
|
||||
# Copyright 2018 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
import functools
|
||||
import logging
|
||||
import time
|
||||
from six.moves import range # pylint: disable=redefined-builtin
|
||||
|
||||
|
||||
def RetryOnException(exc_type, retries):
|
||||
"""Decorator to retry running a function if an exception is raised.
|
||||
|
||||
Implements exponential backoff to wait between each retry attempt, starting
|
||||
with 1 second.
|
||||
|
||||
Note: the default number of retries is defined on the decorator, the decorated
|
||||
function *must* also receive a "retries" argument (although its assigned
|
||||
default value is ignored), and clients of the funtion may override the actual
|
||||
number of retries at the call site.
|
||||
|
||||
The "unused" retries argument on the decorated function must be given to
|
||||
keep pylint happy and to avoid breaking the Principle of Least Astonishment
|
||||
if the decorator were to change the signature of the function.
|
||||
|
||||
For example:
|
||||
|
||||
@retry_util.RetryOnException(OSError, retries=3) # default no. of retries
|
||||
def ProcessSomething(thing, retries=None): # this default value is ignored
|
||||
del retries # Unused. Handled by the decorator.
|
||||
# Do your thing processing here, maybe sometimes raising exeptions.
|
||||
|
||||
ProcessSomething(a_thing) # retries 3 times.
|
||||
ProcessSomething(b_thing, retries=5) # retries 5 times.
|
||||
|
||||
Args:
|
||||
exc_type: An exception type (or a tuple of them), on which to retry.
|
||||
retries: Default number of extra attempts to try, the caller may also
|
||||
override this number. If an exception is raised during the last try,
|
||||
then the exception is not caught and passed back to the caller.
|
||||
"""
|
||||
def Decorator(f):
|
||||
@functools.wraps(f)
|
||||
def Wrapper(*args, **kwargs):
|
||||
wait = 1
|
||||
kwargs.setdefault('retries', retries)
|
||||
for _ in range(kwargs['retries']):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except exc_type as exc:
|
||||
logging.warning(
|
||||
'%s raised %s, will retry in %d second%s ...',
|
||||
f.__name__, type(exc).__name__, wait, '' if wait == 1 else 's')
|
||||
time.sleep(wait)
|
||||
wait *= 2
|
||||
# Last try with no exception catching.
|
||||
return f(*args, **kwargs)
|
||||
return Wrapper
|
||||
return Decorator
|
||||
@@ -0,0 +1,119 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from py_utils import retry_util
|
||||
|
||||
|
||||
class RetryOnExceptionTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.num_calls = 0
|
||||
# Patch time.sleep to make tests run faster (skip waits) and also check
|
||||
# that exponential backoff is implemented correctly.
|
||||
patcher = mock.patch('time.sleep')
|
||||
self.time_sleep = patcher.start()
|
||||
self.addCleanup(patcher.stop)
|
||||
|
||||
def testNoExceptionsReturnImmediately(self):
|
||||
@retry_util.RetryOnException(Exception, retries=3)
|
||||
def Test(retries=None):
|
||||
del retries
|
||||
self.num_calls += 1
|
||||
return 'OK!'
|
||||
|
||||
# The function is called once and returns the expected value.
|
||||
self.assertEqual(Test(), 'OK!')
|
||||
self.assertEqual(self.num_calls, 1)
|
||||
|
||||
def testRaisesExceptionIfAlwaysFailing(self):
|
||||
@retry_util.RetryOnException(KeyError, retries=5)
|
||||
def Test(retries=None):
|
||||
del retries
|
||||
self.num_calls += 1
|
||||
raise KeyError('oops!')
|
||||
|
||||
# The exception is eventually raised.
|
||||
with self.assertRaises(KeyError):
|
||||
Test()
|
||||
# The function is called the expected number of times.
|
||||
self.assertEqual(self.num_calls, 6)
|
||||
# Waits between retries do follow exponential backoff.
|
||||
self.assertEqual(
|
||||
self.time_sleep.call_args_list,
|
||||
[mock.call(i) for i in (1, 2, 4, 8, 16)])
|
||||
|
||||
def testOtherExceptionsAreNotCaught(self):
|
||||
@retry_util.RetryOnException(KeyError, retries=3)
|
||||
def Test(retries=None):
|
||||
del retries
|
||||
self.num_calls += 1
|
||||
raise ValueError('oops!')
|
||||
|
||||
# The exception is raised immediately on the first try.
|
||||
with self.assertRaises(ValueError):
|
||||
Test()
|
||||
self.assertEqual(self.num_calls, 1)
|
||||
|
||||
def testCallerMayOverrideRetries(self):
|
||||
@retry_util.RetryOnException(KeyError, retries=3)
|
||||
def Test(retries=None):
|
||||
del retries
|
||||
self.num_calls += 1
|
||||
raise KeyError('oops!')
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
Test(retries=10)
|
||||
# The value on the caller overrides the default on the decorator.
|
||||
self.assertEqual(self.num_calls, 11)
|
||||
|
||||
def testCanEventuallySucceed(self):
|
||||
@retry_util.RetryOnException(KeyError, retries=5)
|
||||
def Test(retries=None):
|
||||
del retries
|
||||
self.num_calls += 1
|
||||
if self.num_calls < 3:
|
||||
raise KeyError('oops!')
|
||||
else:
|
||||
return 'OK!'
|
||||
|
||||
# The value is returned after the expected number of calls.
|
||||
self.assertEqual(Test(), 'OK!')
|
||||
self.assertEqual(self.num_calls, 3)
|
||||
|
||||
def testRetriesCanBeSwitchedOff(self):
|
||||
@retry_util.RetryOnException(KeyError, retries=5)
|
||||
def Test(retries=None):
|
||||
del retries
|
||||
self.num_calls += 1
|
||||
if self.num_calls < 3:
|
||||
raise KeyError('oops!')
|
||||
else:
|
||||
return 'OK!'
|
||||
|
||||
# We fail immediately on the first try.
|
||||
with self.assertRaises(KeyError):
|
||||
Test(retries=0)
|
||||
self.assertEqual(self.num_calls, 1)
|
||||
|
||||
def testCanRetryOnMultipleExceptions(self):
|
||||
@retry_util.RetryOnException((KeyError, ValueError), retries=3)
|
||||
def Test(retries=None):
|
||||
del retries
|
||||
self.num_calls += 1
|
||||
if self.num_calls == 1:
|
||||
raise KeyError('oops!')
|
||||
elif self.num_calls == 2:
|
||||
raise ValueError('uh oh!')
|
||||
else:
|
||||
return 'OK!'
|
||||
|
||||
# Call eventually succeeds after enough tries.
|
||||
self.assertEqual(Test(retries=5), 'OK!')
|
||||
self.assertEqual(self.num_calls, 3)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,42 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
#
|
||||
# Shell scripting helpers (created for Telemetry dependency roll scripts).
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os as _os
|
||||
import shutil as _shutil
|
||||
import subprocess as _subprocess
|
||||
import tempfile as _tempfile
|
||||
from contextlib import contextmanager as _contextmanager
|
||||
|
||||
@_contextmanager
|
||||
def ScopedChangeDir(new_path):
|
||||
old_path = _os.getcwd()
|
||||
_os.chdir(new_path)
|
||||
print('> cd', _os.getcwd())
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_os.chdir(old_path)
|
||||
print('> cd', old_path)
|
||||
|
||||
@_contextmanager
|
||||
def ScopedTempDir():
|
||||
temp_dir = _tempfile.mkdtemp()
|
||||
try:
|
||||
with ScopedChangeDir(temp_dir):
|
||||
yield
|
||||
finally:
|
||||
_shutil.rmtree(temp_dir)
|
||||
|
||||
def CallProgram(path_parts, *args, **kwargs):
|
||||
'''Call an executable os.path.join(*path_parts) with the arguments specified
|
||||
by *args. Any keyword arguments are passed as environment variables.'''
|
||||
args = [_os.path.join(*path_parts)] + list(args)
|
||||
env = dict(_os.environ)
|
||||
env.update(kwargs)
|
||||
print('>', ' '.join(args))
|
||||
_subprocess.check_call(args, env=env)
|
||||
@@ -0,0 +1,27 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
class SlotsMetaclass(type):
|
||||
"""This metaclass requires all subclasses to define __slots__.
|
||||
|
||||
Usage:
|
||||
class Foo(object):
|
||||
__metaclass__ = slots_metaclass.SlotsMetaclass
|
||||
__slots__ = '_property0', '_property1',
|
||||
|
||||
__slots__ must be a tuple containing string names of all properties that the
|
||||
class contains.
|
||||
Defining __slots__ reduces memory usage, accelerates property access, and
|
||||
prevents dynamically adding unlisted properties.
|
||||
If you need to dynamically add unlisted properties to a class with this
|
||||
metaclass, then take a step back and rethink your goals. If you really really
|
||||
need to dynamically add unlisted properties to a class with this metaclass,
|
||||
add '__dict__' to its __slots__.
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
assert '__slots__' in attrs, 'Class "%s" must define __slots__' % name
|
||||
assert isinstance(attrs['__slots__'], tuple), '__slots__ must be a tuple'
|
||||
|
||||
return super(SlotsMetaclass, mcs).__new__(mcs, name, bases, attrs)
|
||||
@@ -0,0 +1,47 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import unittest
|
||||
|
||||
from py_utils import slots_metaclass
|
||||
import six
|
||||
|
||||
|
||||
class SlotsMetaclassUnittest(unittest.TestCase):
|
||||
|
||||
def testSlotsMetaclass(self):
|
||||
|
||||
class NiceClass(six.with_metaclass(slots_metaclass.SlotsMetaclass, object)):
|
||||
__slots__ = '_nice',
|
||||
|
||||
def __init__(self, nice):
|
||||
self._nice = nice
|
||||
|
||||
NiceClass(42)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
class NaughtyClass(NiceClass):
|
||||
def __init__(self, naughty):
|
||||
super(NaughtyClass, self).__init__(42)
|
||||
self._naughty = naughty
|
||||
|
||||
# Metaclasses are called when the class is defined, so no need to
|
||||
# instantiate it.
|
||||
|
||||
with self.assertRaises(AttributeError):
|
||||
class NaughtyClass2(NiceClass):
|
||||
__slots__ = ()
|
||||
|
||||
def __init__(self, naughty):
|
||||
super(NaughtyClass2, self).__init__(42)
|
||||
self._naughty = naughty # pylint: disable=assigning-non-slot
|
||||
|
||||
# SlotsMetaclass is happy that __slots__ is defined, but python won't be
|
||||
# happy about assigning _naughty when the class is instantiated because it
|
||||
# isn't listed in __slots__, even if you disable the pylint error.
|
||||
NaughtyClass2(666)
|
||||
@@ -0,0 +1,59 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def NamedTemporaryDirectory(suffix='', prefix='tmp', dir=None):
|
||||
"""A context manager that manages a temporary directory.
|
||||
|
||||
This is a context manager version of tempfile.mkdtemp. The arguments to this
|
||||
function are the same as the arguments for that one.
|
||||
|
||||
This can be used to automatically manage the lifetime of a temporary file
|
||||
without maintaining an open file handle on it. Doing so can be useful in
|
||||
scenarios where a parent process calls a child process to create a temporary
|
||||
file and then does something with the resulting file.
|
||||
"""
|
||||
# This uses |dir| as a parameter name for consistency with mkdtemp.
|
||||
# pylint: disable=redefined-builtin
|
||||
|
||||
d = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dir)
|
||||
try:
|
||||
yield d
|
||||
finally:
|
||||
shutil.rmtree(d)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def NamedTemporaryFile(mode='w+b', suffix='', prefix='tmp'):
|
||||
"""A conext manager to hold a named temporary file.
|
||||
|
||||
It's similar to Python's tempfile.NamedTemporaryFile except:
|
||||
- The file is _not_ deleted when you close the temporary file handle, so you
|
||||
can close it and then use the name of the file to re-open it later.
|
||||
- The file *is* always deleted when exiting the context managed code.
|
||||
"""
|
||||
with NamedTemporaryDirectory() as temp_dir:
|
||||
yield tempfile.NamedTemporaryFile(
|
||||
mode=mode, suffix=suffix, prefix=prefix, dir=temp_dir, delete=False)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def TemporaryFileName(prefix='tmp', suffix=''):
|
||||
"""A context manager to just get the path to a file that does not exist.
|
||||
|
||||
The parent directory of the file is a newly clreated temporary directory,
|
||||
and the name of the file is just `prefix + suffix`. The file istelf is not
|
||||
created, you are in fact guaranteed that it does not exit.
|
||||
|
||||
The entire parent directory, possibly including the named temporary file and
|
||||
any sibling files, is entirely deleted when exiting the context managed code.
|
||||
"""
|
||||
with NamedTemporaryDirectory() as temp_dir:
|
||||
yield os.path.join(temp_dir, prefix + suffix)
|
||||
@@ -0,0 +1,74 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from py_utils import tempfile_ext
|
||||
from pyfakefs import fake_filesystem_unittest
|
||||
|
||||
|
||||
class NamedTemporaryDirectoryTest(fake_filesystem_unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.setUpPyfakefs()
|
||||
|
||||
def tearDown(self):
|
||||
self.tearDownPyfakefs()
|
||||
|
||||
def testBasic(self):
|
||||
with tempfile_ext.NamedTemporaryDirectory() as d:
|
||||
self.assertTrue(os.path.exists(d))
|
||||
self.assertTrue(os.path.isdir(d))
|
||||
self.assertFalse(os.path.exists(d))
|
||||
|
||||
def testSuffix(self):
|
||||
test_suffix = 'foo'
|
||||
with tempfile_ext.NamedTemporaryDirectory(suffix=test_suffix) as d:
|
||||
self.assertTrue(os.path.basename(d).endswith(test_suffix))
|
||||
|
||||
def testPrefix(self):
|
||||
test_prefix = 'bar'
|
||||
with tempfile_ext.NamedTemporaryDirectory(prefix=test_prefix) as d:
|
||||
self.assertTrue(os.path.basename(d).startswith(test_prefix))
|
||||
|
||||
def testDir(self):
|
||||
test_dir = '/baz'
|
||||
self.fs.CreateDirectory(test_dir)
|
||||
with tempfile_ext.NamedTemporaryDirectory(dir=test_dir) as d:
|
||||
self.assertEquals(test_dir, os.path.dirname(d))
|
||||
|
||||
|
||||
class TemporaryFilesTest(fake_filesystem_unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.setUpPyfakefs()
|
||||
|
||||
def tearDown(self):
|
||||
self.tearDownPyfakefs()
|
||||
|
||||
def testNamedTemporaryFile(self):
|
||||
with tempfile_ext.NamedTemporaryFile() as f:
|
||||
self.assertTrue(os.path.isfile(f.name))
|
||||
f.write('<data>')
|
||||
f.close()
|
||||
self.assertTrue(os.path.exists(f.name))
|
||||
with open(f.name) as f2:
|
||||
self.assertEqual(f2.read(), '<data>')
|
||||
|
||||
self.assertFalse(os.path.exists(f.name))
|
||||
|
||||
def testTemporaryFileName(self):
|
||||
with tempfile_ext.TemporaryFileName('foo') as filepath:
|
||||
self.assertTrue(os.path.basename(filepath), 'foo')
|
||||
self.assertFalse(os.path.exists(filepath))
|
||||
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('<data>')
|
||||
self.assertTrue(os.path.exists(filepath))
|
||||
|
||||
shutil.copyfile(filepath, filepath + '.bak')
|
||||
self.assertTrue(filecmp.cmp(filepath, filepath + '.bak'))
|
||||
|
||||
self.assertFalse(os.path.exists(filepath))
|
||||
self.assertFalse(os.path.exists(os.path.dirname(filepath)))
|
||||
@@ -0,0 +1,3 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
@@ -0,0 +1,33 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""More dummy exception subclasses used by core/discover.py's unit tests."""
|
||||
|
||||
# Import class instead of module explicitly so that inspect.getmembers() returns
|
||||
# two Exception subclasses in this current file.
|
||||
# Suppress complaints about unable to import class. The directory path is
|
||||
# added at runtime by telemetry test runner.
|
||||
#pylint: disable=import-error
|
||||
from discoverable_classes import discover_dummyclass
|
||||
|
||||
|
||||
class _PrivateDummyException(discover_dummyclass.DummyException):
|
||||
def __init__(self):
|
||||
super(_PrivateDummyException, self).__init__()
|
||||
|
||||
|
||||
class DummyExceptionImpl1(_PrivateDummyException):
|
||||
def __init__(self):
|
||||
super(DummyExceptionImpl1, self).__init__()
|
||||
|
||||
|
||||
class DummyExceptionImpl2(_PrivateDummyException):
|
||||
def __init__(self):
|
||||
super(DummyExceptionImpl2, self).__init__()
|
||||
|
||||
|
||||
class DummyExceptionWithParameterImpl1(_PrivateDummyException):
|
||||
def __init__(self, parameter):
|
||||
super(DummyExceptionWithParameterImpl1, self).__init__()
|
||||
del parameter
|
||||
@@ -0,0 +1,9 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""A dummy exception subclass used by core/discover.py's unit tests."""
|
||||
|
||||
class DummyException(Exception):
|
||||
def __init__(self):
|
||||
super(DummyException, self).__init__()
|
||||
@@ -0,0 +1,11 @@
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""A dummy exception subclass used by core/discover.py's unit tests."""
|
||||
from discoverable_classes import discover_dummyclass
|
||||
|
||||
class DummyExceptionWithParameterImpl2(discover_dummyclass.DummyException):
|
||||
def __init__(self, parameter1, parameter2):
|
||||
super(DummyExceptionWithParameterImpl2, self).__init__()
|
||||
del parameter1, parameter2
|
||||
@@ -0,0 +1 @@
|
||||
This file is not executable.
|
||||
31
tools/adb/systrace/catapult/common/py_utils/py_utils/xvfb.py
Normal file
31
tools/adb/systrace/catapult/common/py_utils/py_utils/xvfb.py
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
import os
|
||||
import logging
|
||||
import subprocess
|
||||
import platform
|
||||
import time
|
||||
|
||||
|
||||
def ShouldStartXvfb():
|
||||
# TODO(crbug.com/973847): Note that you can locally change this to return
|
||||
# False to diagnose timeouts for dev server tests.
|
||||
return platform.system() == 'Linux'
|
||||
|
||||
|
||||
def StartXvfb():
|
||||
display = ':99'
|
||||
xvfb_command = ['Xvfb', display, '-screen', '0', '1024x769x24', '-ac']
|
||||
xvfb_process = subprocess.Popen(
|
||||
xvfb_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
time.sleep(0.2)
|
||||
returncode = xvfb_process.poll()
|
||||
if returncode is None:
|
||||
os.environ['DISPLAY'] = display
|
||||
else:
|
||||
logging.error('Xvfb did not start, returncode: %s, stdout:\n%s',
|
||||
returncode, xvfb_process.stdout.read())
|
||||
xvfb_process = None
|
||||
return xvfb_process
|
||||
Reference in New Issue
Block a user