This commit is contained in:
2021-04-19 20:16:55 +02:00
commit a0ff94dca2
839 changed files with 198976 additions and 0 deletions

View File

@@ -0,0 +1,33 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
def CheckChangeOnUpload(input_api, output_api):
return _CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _CommonChecks(input_api, output_api)
def _CommonChecks(input_api, output_api):
results = []
results += input_api.RunTests(input_api.canned_checks.GetPylint(
input_api, output_api, extra_paths_list=_GetPathsToPrepend(input_api),
pylintrc='pylintrc'))
return results
def _GetPathsToPrepend(input_api):
project_dir = input_api.PresubmitLocalPath()
catapult_dir = input_api.os_path.join(project_dir, '..')
return [
project_dir,
input_api.os_path.join(catapult_dir, 'common', 'py_utils'),
input_api.os_path.join(catapult_dir, 'third_party', 'mock'),
input_api.os_path.join(catapult_dir, 'third_party', 'pyfakefs'),
input_api.os_path.join(catapult_dir, 'third_party', 'zipfile'),
]

View File

@@ -0,0 +1,31 @@
#!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs all Python unit tests in dependency_manager/."""
import os
import sys
_CATAPULT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(os.path.join(_CATAPULT, 'third_party', 'mock'))
def main():
sys.path.append(_CATAPULT)
from hooks import install
if '--no-install-hooks' in sys.argv:
sys.argv.remove('--no-install-hooks')
else:
install.InstallHooks()
from catapult_build import run_with_typ
return run_with_typ.Run(
os.path.join(_CATAPULT, 'dependency_manager'), path=[_CATAPULT])
if __name__ == '__main__':
sys.exit(main())

View File

@@ -0,0 +1,37 @@
#! /usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import sys
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from dependency_manager import base_config
def UpdateDependency(dependency, platform, path, config):
c = base_config.BaseConfig(config, writable=True)
c.AddCloudStorageDependencyUpdateJob(
dependency, platform, path, version=None, execute_job=True)
def main(raw_args):
parser = argparse.ArgumentParser()
parser.add_argument('--config', required=True, type=os.path.realpath,
help='Path to the dependency configuration file.')
parser.add_argument('--dependency', required=True,
help='Dependency name.')
parser.add_argument('--path', required=True, type=os.path.realpath,
help='Path to the new dependency.')
parser.add_argument('--platform', required=True,
help='Platform to update.')
args = parser.parse_args(raw_args)
UpdateDependency(args.dependency, args.platform, args.path, args.config)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@@ -0,0 +1,43 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
CATAPULT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__))))
CATAPULT_THIRD_PARTY_PATH = os.path.join(CATAPULT_PATH, 'third_party')
DEPENDENCY_MANAGER_PATH = os.path.join(CATAPULT_PATH, 'dependency_manager')
def _AddDirToPythonPath(*path_parts):
path = os.path.abspath(os.path.join(*path_parts))
if os.path.isdir(path) and path not in sys.path:
sys.path.append(path)
_AddDirToPythonPath(CATAPULT_PATH, 'common', 'py_utils')
_AddDirToPythonPath(CATAPULT_THIRD_PARTY_PATH, 'mock')
_AddDirToPythonPath(CATAPULT_THIRD_PARTY_PATH, 'pyfakefs')
_AddDirToPythonPath(CATAPULT_THIRD_PARTY_PATH, 'zipfile')
_AddDirToPythonPath(DEPENDENCY_MANAGER_PATH)
# pylint: disable=unused-import,wrong-import-position
from .archive_info import ArchiveInfo
from .base_config import BaseConfig
from .cloud_storage_info import CloudStorageInfo
from .dependency_info import DependencyInfo
from .exceptions import CloudStorageError
from .exceptions import CloudStorageUploadConflictError
from .exceptions import EmptyConfigError
from .exceptions import FileNotFoundError
from .exceptions import NoPathFoundError
from .exceptions import ReadWriteError
from .exceptions import UnsupportedConfigFormatError
from .local_path_info import LocalPathInfo
from .manager import DependencyManager
# pylint: enable=unused-import

View File

@@ -0,0 +1,79 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import glob
import os
import shutil
from dependency_manager import exceptions
from dependency_manager import dependency_manager_util
class ArchiveInfo(object):
def __init__(self, archive_file, unzip_path, path_within_archive,
stale_unzip_path_glob=None):
""" Container for the information needed to unzip a downloaded archive.
Args:
archive_path: Path to the archive file.
unzip_path: Path to unzip the archive into. Assumes that this path
is unique for the archive.
path_within_archive: Specify if and how to handle zip archives
downloaded from cloud_storage. Expected values:
None: Do not unzip the file downloaded from cloud_storage.
'.': Unzip the file downloaded from cloud_storage. The
unzipped file/folder is the expected dependency.
file_path: Unzip the file downloaded from cloud_storage.
|file_path| is the path to the expected dependency,
relative to the unzipped archive path.
stale_unzip_path_glob: Optional argument specifying a glob matching
string which matches directories that should be removed before this
archive is extracted (if it is extracted at all).
"""
self._archive_file = archive_file
self._unzip_path = unzip_path
self._path_within_archive = path_within_archive
self._dependency_path = os.path.join(
self._unzip_path, self._path_within_archive)
self._stale_unzip_path_glob = stale_unzip_path_glob
if not self._has_minimum_data:
raise ValueError(
'Not enough information specified to initialize an archive info.'
' %s' % self)
def GetUnzippedPath(self):
if self.ShouldUnzipArchive():
# Remove stale unzip results
if self._stale_unzip_path_glob:
for path in glob.glob(self._stale_unzip_path_glob):
shutil.rmtree(path, ignore_errors=True)
# TODO(aiolos): Replace UnzipFile with zipfile.extractall once python
# version 2.7.4 or later can safely be assumed.
dependency_manager_util.UnzipArchive(
self._archive_file, self._unzip_path)
if self.ShouldUnzipArchive():
raise exceptions.ArchiveError(
"Expected path '%s' was not extracted from archive '%s'." %
(self._dependency_path, self._archive_file))
return self._dependency_path
def ShouldUnzipArchive(self):
if not self._has_minimum_data:
raise exceptions.ArchiveError(
'Missing needed info to unzip archive. Know data: %s' % self)
return not os.path.exists(self._dependency_path)
@property
def _has_minimum_data(self):
return all([self._archive_file, self._unzip_path,
self._dependency_path])
def __repr__(self):
return (
'ArchiveInfo(archive_file=%s, unzip_path=%s, path_within_archive=%s, '
'dependency_path =%s)' % (
self._archive_file, self._unzip_path, self._path_within_archive,
self._dependency_path))

View File

@@ -0,0 +1,416 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import os
from py_utils import cloud_storage
from dependency_manager import archive_info
from dependency_manager import cloud_storage_info
from dependency_manager import dependency_info
from dependency_manager import exceptions
from dependency_manager import local_path_info
from dependency_manager import uploader
class BaseConfig(object):
"""A basic config class for use with the DependencyManager.
Initiated with a json file in the following format:
{ "config_type": "BaseConfig",
"dependencies": {
"dep_name1": {
"cloud_storage_base_folder": "base_folder1",
"cloud_storage_bucket": "bucket1",
"file_info": {
"platform1": {
"cloud_storage_hash": "hash_for_platform1",
"download_path": "download_path111",
"version_in_cs": "1.11.1.11."
"local_paths": ["local_path1110", "local_path1111"]
},
"platform2": {
"cloud_storage_hash": "hash_for_platform2",
"download_path": "download_path2",
"local_paths": ["local_path20", "local_path21"]
},
...
}
},
"dependency_name_2": {
...
},
...
}
}
Required fields: "dependencies" and "config_type".
Note that config_type must be "BaseConfig"
Assumptions:
"cloud_storage_base_folder" is a top level folder in the given
"cloud_storage_bucket" where all of the dependency files are stored
at "dependency_name"_"cloud_storage_hash".
"download_path" and all paths in "local_paths" are relative to the
config file's location.
All or none of the following cloud storage related fields must be
included in each platform dictionary:
"cloud_storage_hash", "download_path", "cs_remote_path"
"version_in_cs" is an optional cloud storage field, but is dependent
on the above cloud storage related fields.
Also note that platform names are often of the form os_architechture.
Ex: "win_AMD64"
More information on the fields can be found in dependencies_info.py
"""
def __init__(self, file_path, writable=False):
""" Initialize a BaseConfig for the DependencyManager.
Args:
writable: False: This config will be used to lookup information.
True: This config will be used to update information.
file_path: Path to a file containing a json dictionary in the expected
json format for this config class. Base format expected:
{ "config_type": config_type,
"dependencies": dependencies_dict }
config_type: must match the return value of GetConfigType.
dependencies: A dictionary with the information needed to
create dependency_info instances for the given
dependencies.
See dependency_info.py for more information.
"""
self._config_path = file_path
self._writable = writable
self._pending_uploads = []
if not self._config_path:
raise ValueError('Must supply config file path.')
if not os.path.exists(self._config_path):
if not writable:
raise exceptions.EmptyConfigError(file_path)
self._config_data = {}
self._WriteConfigToFile(self._config_path, dependencies=self._config_data)
else:
with open(file_path, 'r') as f:
config_data = json.load(f)
if not config_data:
raise exceptions.EmptyConfigError(file_path)
config_type = config_data.pop('config_type', None)
if config_type != self.GetConfigType():
raise ValueError(
'Supplied config_type (%s) is not the expected type (%s) in file '
'%s' % (config_type, self.GetConfigType(), file_path))
self._config_data = config_data.get('dependencies', {})
def IterDependencyInfo(self):
""" Yields a DependencyInfo for each dependency/platform pair.
Raises:
ReadWriteError: If called when the config is writable.
ValueError: If any of the dependencies contain partial information for
downloading from cloud_storage. (See dependency_info.py)
"""
if self._writable:
raise exceptions.ReadWriteError(
'Trying to read dependency info from a writable config. File for '
'config: %s' % self._config_path)
base_path = os.path.dirname(self._config_path)
for dependency in self._config_data:
dependency_dict = self._config_data.get(dependency)
platforms_dict = dependency_dict.get('file_info', {})
for platform in platforms_dict:
platform_info = platforms_dict.get(platform)
local_info = None
local_paths = platform_info.get('local_paths', [])
if local_paths:
paths = []
for path in local_paths:
path = self._FormatPath(path)
paths.append(os.path.abspath(os.path.join(base_path, path)))
local_info = local_path_info.LocalPathInfo(paths)
cs_info = None
cs_bucket = dependency_dict.get('cloud_storage_bucket')
cs_base_folder = dependency_dict.get('cloud_storage_base_folder', '')
download_path = platform_info.get('download_path')
if download_path:
download_path = self._FormatPath(download_path)
download_path = os.path.abspath(
os.path.join(base_path, download_path))
cs_hash = platform_info.get('cloud_storage_hash')
if not cs_hash:
raise exceptions.ConfigError(
'Dependency %s has cloud storage info on platform %s, but is '
'missing a cloud storage hash.', dependency, platform)
cs_remote_path = self._CloudStorageRemotePath(
dependency, cs_hash, cs_base_folder)
version_in_cs = platform_info.get('version_in_cs')
zip_info = None
path_within_archive = platform_info.get('path_within_archive')
if path_within_archive:
unzip_path = os.path.abspath(
os.path.join(os.path.dirname(download_path),
'%s_%s_%s' % (dependency, platform, cs_hash)))
stale_unzip_path_glob = os.path.abspath(
os.path.join(os.path.dirname(download_path),
'%s_%s_%s' % (dependency, platform,
'[0-9a-f]' * 40)))
zip_info = archive_info.ArchiveInfo(
download_path, unzip_path, path_within_archive,
stale_unzip_path_glob)
cs_info = cloud_storage_info.CloudStorageInfo(
cs_bucket, cs_hash, download_path, cs_remote_path,
version_in_cs=version_in_cs, archive_info=zip_info)
dep_info = dependency_info.DependencyInfo(
dependency, platform, self._config_path,
local_path_info=local_info, cloud_storage_info=cs_info)
yield dep_info
@classmethod
def GetConfigType(cls):
return 'BaseConfig'
@property
def config_path(self):
return self._config_path
def AddNewDependency(
self, dependency, cloud_storage_base_folder, cloud_storage_bucket):
self._ValidateIsConfigWritable()
if dependency in self:
raise ValueError('Config already contains dependency %s' % dependency)
self._config_data[dependency] = {
'cloud_storage_base_folder': cloud_storage_base_folder,
'cloud_storage_bucket': cloud_storage_bucket,
'file_info': {},
}
def SetDownloadPath(self, dependency, platform, download_path):
self._ValidateIsConfigWritable()
if not dependency in self:
raise ValueError('Config does not contain dependency %s' % dependency)
platform_dicts = self._config_data[dependency]['file_info']
if platform not in platform_dicts:
platform_dicts[platform] = {}
platform_dicts[platform]['download_path'] = download_path
def AddCloudStorageDependencyUpdateJob(
self, dependency, platform, dependency_path, version=None,
execute_job=True):
"""Update the file downloaded from cloud storage for a dependency/platform.
Upload a new file to cloud storage for the given dependency and platform
pair and update the cloud storage hash and the version for the given pair.
Example usage:
The following should update the default platform for 'dep_name':
UpdateCloudStorageDependency('dep_name', 'default', 'path/to/file')
The following should update both the mac and win platforms for 'dep_name',
or neither if either update fails:
UpdateCloudStorageDependency(
'dep_name', 'mac_x86_64', 'path/to/mac/file', execute_job=False)
UpdateCloudStorageDependency(
'dep_name', 'win_AMD64', 'path/to/win/file', execute_job=False)
ExecuteUpdateJobs()
Args:
dependency: The dependency to update.
platform: The platform to update the dependency info for.
dependency_path: Path to the new dependency to be used.
version: Version of the updated dependency, for checking future updates
against.
execute_job: True if the config should be written to disk and the file
should be uploaded to cloud storage after the update. False if
multiple updates should be performed atomically. Must call
ExecuteUpdateJobs after all non-executed jobs are added to complete
the update.
Raises:
ReadWriteError: If the config was not initialized as writable, or if
|execute_job| is True but the config has update jobs still pending
execution.
ValueError: If no information exists in the config for |dependency| on
|platform|.
"""
self._ValidateIsConfigUpdatable(
execute_job=execute_job, dependency=dependency, platform=platform)
cs_hash = cloud_storage.CalculateHash(dependency_path)
if version:
self._SetPlatformData(dependency, platform, 'version_in_cs', version)
self._SetPlatformData(dependency, platform, 'cloud_storage_hash', cs_hash)
cs_base_folder = self._GetPlatformData(
dependency, platform, 'cloud_storage_base_folder')
cs_bucket = self._GetPlatformData(
dependency, platform, 'cloud_storage_bucket')
cs_remote_path = self._CloudStorageRemotePath(
dependency, cs_hash, cs_base_folder)
self._pending_uploads.append(uploader.CloudStorageUploader(
cs_bucket, cs_remote_path, dependency_path))
if execute_job:
self.ExecuteUpdateJobs()
def ExecuteUpdateJobs(self, force=False):
"""Write all config changes to the config_path specified in __init__.
Upload all files pending upload and then write the updated config to
file. Attempt to remove all uploaded files on failure.
Args:
force: True if files should be uploaded to cloud storage even if a
file already exists in the upload location.
Returns:
True: if the config was dirty and the upload succeeded.
False: if the config was not dirty.
Raises:
CloudStorageUploadConflictError: If |force| is False and the potential
upload location of a file already exists.
CloudStorageError: If copying an existing file to the backup location
or uploading a new file fails.
"""
self._ValidateIsConfigUpdatable()
if not self._IsDirty():
logging.info('ExecuteUpdateJobs called on clean config')
return False
if not self._pending_uploads:
logging.debug('No files needing upload.')
else:
try:
for item_pending_upload in self._pending_uploads:
item_pending_upload.Upload(force)
self._WriteConfigToFile(self._config_path, self._config_data)
self._pending_uploads = []
except:
# Attempt to rollback the update in any instance of failure, even user
# interrupt via Ctrl+C; but don't consume the exception.
logging.error('Update failed, attempting to roll it back.')
for upload_item in reversed(self._pending_uploads):
upload_item.Rollback()
raise
return True
def GetVersion(self, dependency, platform):
"""Return the Version information for the given dependency."""
return self._GetPlatformData(
dependency, platform, data_type='version_in_cs')
def __contains__(self, dependency):
""" Returns whether this config contains |dependency|
Args:
dependency: the string name of dependency
"""
return dependency in self._config_data
def _IsDirty(self):
with open(self._config_path, 'r') as fstream:
curr_config_data = json.load(fstream)
curr_config_data = curr_config_data.get('dependencies', {})
return self._config_data != curr_config_data
def _SetPlatformData(self, dependency, platform, data_type, data):
self._ValidateIsConfigWritable()
dependency_dict = self._config_data.get(dependency, {})
platform_dict = dependency_dict.get('file_info', {}).get(platform)
if not platform_dict:
raise ValueError('No platform data for platform %s on dependency %s' %
(platform, dependency))
if (data_type == 'cloud_storage_bucket' or
data_type == 'cloud_storage_base_folder'):
self._config_data[dependency][data_type] = data
else:
self._config_data[dependency]['file_info'][platform][data_type] = data
def _GetPlatformData(self, dependency, platform, data_type=None):
dependency_dict = self._config_data.get(dependency, {})
if not dependency_dict:
raise ValueError('Dependency %s is not in config.' % dependency)
platform_dict = dependency_dict.get('file_info', {}).get(platform)
if not platform_dict:
raise ValueError('No platform data for platform %s on dependency %s' %
(platform, dependency))
if data_type:
if (data_type == 'cloud_storage_bucket' or
data_type == 'cloud_storage_base_folder'):
return dependency_dict.get(data_type)
return platform_dict.get(data_type)
return platform_dict
def _ValidateIsConfigUpdatable(
self, execute_job=False, dependency=None, platform=None):
self._ValidateIsConfigWritable()
if self._IsDirty() and execute_job:
raise exceptions.ReadWriteError(
'A change has already been made to this config. Either call without'
'using the execute_job option or first call ExecuteUpdateJobs().')
if dependency and not self._config_data.get(dependency):
raise ValueError('Cannot update information because dependency %s does '
'not exist.' % dependency)
if platform and not self._GetPlatformData(dependency, platform):
raise ValueError('No dependency info is available for the given '
'dependency: %s' % dependency)
def _ValidateIsConfigWritable(self):
if not self._writable:
raise exceptions.ReadWriteError(
'Trying to update the information from a read-only config. '
'File for config: %s' % self._config_path)
@staticmethod
def _CloudStorageRemotePath(dependency, cs_hash, cs_base_folder):
cs_remote_file = '%s_%s' % (dependency, cs_hash)
cs_remote_path = cs_remote_file if not cs_base_folder else (
'%s/%s' % (cs_base_folder, cs_remote_file))
return cs_remote_path
@classmethod
def _FormatPath(cls, file_path):
""" Format |file_path| for the current file system.
We may be downloading files for another platform, so paths must be
downloadable on the current system.
"""
if not file_path:
return file_path
if os.path.sep != '\\':
return file_path.replace('\\', os.path.sep)
elif os.path.sep != '/':
return file_path.replace('/', os.path.sep)
return file_path
@classmethod
def _WriteConfigToFile(cls, file_path, dependencies=None):
json_dict = cls._GetJsonDict(dependencies)
file_dir = os.path.dirname(file_path)
if not os.path.exists(file_dir):
os.makedirs(file_dir)
with open(file_path, 'w') as outfile:
json.dump(
json_dict, outfile, indent=2, sort_keys=True, separators=(',', ': '))
return json_dict
@classmethod
def _GetJsonDict(cls, dependencies=None):
dependencies = dependencies or {}
json_dict = {'config_type': cls.GetConfigType(),
'dependencies': dependencies}
return json_dict

View File

@@ -0,0 +1,110 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import errno
import os
import stat
from py_utils import cloud_storage
from dependency_manager import exceptions
class CloudStorageInfo(object):
def __init__(self, cs_bucket, cs_hash, download_path, cs_remote_path,
version_in_cs=None, archive_info=None):
""" Container for the information needed to download a dependency from
cloud storage.
Args:
cs_bucket: The cloud storage bucket the dependency is located in.
cs_hash: The hash of the file stored in cloud storage.
download_path: Where the file should be downloaded to.
cs_remote_path: Where the file is stored in the cloud storage bucket.
version_in_cs: The version of the file stored in cloud storage.
archive_info: An instance of ArchiveInfo if this dependency is an
archive. Else None.
"""
self._download_path = download_path
self._cs_remote_path = cs_remote_path
self._cs_bucket = cs_bucket
self._cs_hash = cs_hash
self._version_in_cs = version_in_cs
self._archive_info = archive_info
if not self._has_minimum_data:
raise ValueError(
'Not enough information specified to initialize a cloud storage info.'
' %s' % self)
def DependencyExistsInCloudStorage(self):
return cloud_storage.Exists(self._cs_bucket, self._cs_remote_path)
def GetRemotePath(self):
"""Gets the path to a downloaded version of the dependency.
May not download the file if it has already been downloaded.
Will unzip the downloaded file if a non-empty archive_info was passed in at
init.
Returns: A path to an executable that was stored in cloud_storage, or None
if not found.
Raises:
CredentialsError: If cloud_storage credentials aren't configured.
PermissionError: If cloud_storage credentials are configured, but not
with an account that has permission to download the needed file.
NotFoundError: If the needed file does not exist where expected in
cloud_storage or the downloaded zip file.
ServerError: If an internal server error is hit while downloading the
needed file.
CloudStorageError: If another error occured while downloading the remote
path.
FileNotFoundError: If the download was otherwise unsuccessful.
"""
if not self._has_minimum_data:
return None
download_dir = os.path.dirname(self._download_path)
if not os.path.exists(download_dir):
try:
os.makedirs(download_dir)
except OSError as e:
# The logic above is racy, and os.makedirs will raise an OSError if
# the directory exists.
if e.errno != errno.EEXIST:
raise
dependency_path = self._download_path
cloud_storage.GetIfHashChanged(
self._cs_remote_path, self._download_path, self._cs_bucket,
self._cs_hash)
if not os.path.exists(dependency_path):
raise exceptions.FileNotFoundError(dependency_path)
if self.has_archive_info:
dependency_path = self._archive_info.GetUnzippedPath()
else:
mode = os.stat(dependency_path).st_mode
os.chmod(dependency_path, mode | stat.S_IXUSR)
return os.path.abspath(dependency_path)
@property
def version_in_cs(self):
return self._version_in_cs
@property
def _has_minimum_data(self):
return all([self._cs_bucket, self._cs_remote_path, self._download_path,
self._cs_hash])
@property
def has_archive_info(self):
return bool(self._archive_info)
def __repr__(self):
return (
'CloudStorageInfo(download_path=%s, cs_remote_path=%s, cs_bucket=%s, '
'cs_hash=%s, version_in_cs=%s, archive_info=%s)' % (
self._download_path, self._cs_remote_path, self._cs_bucket,
self._cs_hash, self._version_in_cs, self._archive_info))

View File

@@ -0,0 +1,233 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import stat
import unittest
import mock
from pyfakefs import fake_filesystem_unittest
from py_utils import cloud_storage
from dependency_manager import archive_info
from dependency_manager import cloud_storage_info
from dependency_manager import exceptions
class CloudStorageInfoTest(unittest.TestCase):
def testInitCloudStorageInfoErrors(self):
# Must specify cloud storage information atomically.
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
None, None, None, None)
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
'cs_bucket', None, None, None)
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
None, 'cs_hash', None, None)
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
None, None, 'download_path', None)
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
None, None, None, 'cs_remote_path')
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
None, 'cs_hash', 'download_path', 'cs_remote_path')
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
'cs_bucket', None, 'download_path', 'cs_remote_path')
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
'cs_bucket', 'cs_hash', None, 'cs_remote_path')
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
'cs_bucket', 'cs_hash', 'download_path', None)
def testInitWithVersion(self):
self.assertRaises(
ValueError, cloud_storage_info.CloudStorageInfo, None, None, None,
'cs_remote_path', version_in_cs='version_in_cs')
self.assertRaises(
ValueError, cloud_storage_info.CloudStorageInfo, None, 'cs_hash',
'download_path', 'cs_remote_path', version_in_cs='version_in_cs')
cs_info = cloud_storage_info.CloudStorageInfo(
'cs_bucket', 'cs_hash', 'download_path', 'cs_remote_path',
version_in_cs='version_in_cs')
self.assertEqual('cs_hash', cs_info._cs_hash)
self.assertEqual('cs_bucket', cs_info._cs_bucket)
self.assertEqual('cs_remote_path', cs_info._cs_remote_path)
self.assertEqual('download_path', cs_info._download_path)
self.assertEqual('version_in_cs', cs_info._version_in_cs)
def testInitWithArchiveInfoErrors(self):
zip_info = archive_info.ArchiveInfo(
'download_path', 'unzip_location', 'path_within_archive')
self.assertRaises(
ValueError, cloud_storage_info.CloudStorageInfo, None, None, None, None,
archive_info=zip_info)
self.assertRaises(
ValueError, cloud_storage_info.CloudStorageInfo, None, None, None,
'cs_remote_path', archive_info=zip_info)
self.assertRaises(
ValueError, cloud_storage_info.CloudStorageInfo, 'cs_bucket', 'cs_hash',
None, 'cs_remote_path', archive_info=zip_info)
self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo,
'cs_bucket', 'cs_hash',
'cs_remote_path', None, version_in_cs='version',
archive_info=zip_info)
def testInitWithArchiveInfo(self):
zip_info = archive_info.ArchiveInfo(
'download_path', 'unzip_location', 'path_within_archive')
cs_info = cloud_storage_info.CloudStorageInfo(
'cs_bucket', 'cs_hash', 'download_path', 'cs_remote_path',
archive_info=zip_info)
self.assertEqual('cs_hash', cs_info._cs_hash)
self.assertEqual('cs_bucket', cs_info._cs_bucket)
self.assertEqual('cs_remote_path', cs_info._cs_remote_path)
self.assertEqual('download_path', cs_info._download_path)
self.assertEqual(zip_info, cs_info._archive_info)
self.assertFalse(cs_info._version_in_cs)
def testInitWithVersionAndArchiveInfo(self):
zip_info = archive_info.ArchiveInfo(
'download_path', 'unzip_location', 'path_within_archive')
cs_info = cloud_storage_info.CloudStorageInfo(
'cs_bucket', 'cs_hash', 'download_path',
'cs_remote_path', version_in_cs='version_in_cs',
archive_info=zip_info)
self.assertEqual('cs_hash', cs_info._cs_hash)
self.assertEqual('cs_bucket', cs_info._cs_bucket)
self.assertEqual('cs_remote_path', cs_info._cs_remote_path)
self.assertEqual('download_path', cs_info._download_path)
self.assertEqual(zip_info, cs_info._archive_info)
self.assertEqual('version_in_cs', cs_info._version_in_cs)
def testInitMinimumCloudStorageInfo(self):
cs_info = cloud_storage_info.CloudStorageInfo(
'cs_bucket',
'cs_hash', 'download_path',
'cs_remote_path')
self.assertEqual('cs_hash', cs_info._cs_hash)
self.assertEqual('cs_bucket', cs_info._cs_bucket)
self.assertEqual('cs_remote_path', cs_info._cs_remote_path)
self.assertEqual('download_path', cs_info._download_path)
self.assertFalse(cs_info._version_in_cs)
self.assertFalse(cs_info._archive_info)
class TestGetRemotePath(fake_filesystem_unittest.TestCase):
def setUp(self):
self.setUpPyfakefs()
self.config_path = '/test/dep_config.json'
self.fs.CreateFile(self.config_path, contents='{}')
self.download_path = '/foo/download_path'
self.fs.CreateFile(
self.download_path, contents='1010110', st_mode=stat.S_IWOTH)
self.cs_info = cloud_storage_info.CloudStorageInfo(
'cs_bucket', 'cs_hash', self.download_path, 'cs_remote_path',
version_in_cs='1.2.3.4',)
def tearDown(self):
self.tearDownPyfakefs()
@mock.patch(
'py_utils.cloud_storage.GetIfHashChanged')
def testGetRemotePathNoArchive(self, cs_get_mock):
def _GetIfHashChangedMock(cs_path, download_path, bucket, file_hash):
del cs_path, bucket, file_hash
if not os.path.exists(download_path):
self.fs.CreateFile(download_path, contents='1010001010101010110101')
cs_get_mock.side_effect = _GetIfHashChangedMock
# All of the needed information is given, and the downloaded path exists
# after calling cloud storage.
self.assertEqual(
os.path.abspath(self.download_path),
self.cs_info.GetRemotePath())
self.assertTrue(os.stat(self.download_path).st_mode & stat.S_IXUSR)
# All of the needed information is given, but the downloaded path doesn't
# exists after calling cloud storage.
self.fs.RemoveObject(self.download_path)
cs_get_mock.side_effect = [True] # pylint: disable=redefined-variable-type
self.assertRaises(
exceptions.FileNotFoundError, self.cs_info.GetRemotePath)
@mock.patch(
'dependency_manager.dependency_manager_util.UnzipArchive')
@mock.patch(
'dependency_manager.cloud_storage_info.cloud_storage.GetIfHashChanged') # pylint: disable=line-too-long
def testGetRemotePathWithArchive(self, cs_get_mock, unzip_mock):
def _GetIfHashChangedMock(cs_path, download_path, bucket, file_hash):
del cs_path, bucket, file_hash
if not os.path.exists(download_path):
self.fs.CreateFile(download_path, contents='1010001010101010110101')
cs_get_mock.side_effect = _GetIfHashChangedMock
unzip_path = os.path.join(
os.path.dirname(self.download_path), 'unzip_dir')
path_within_archive = os.path.join('path', 'within', 'archive')
dep_path = os.path.join(unzip_path, path_within_archive)
def _UnzipFileMock(archive_file, unzip_location, tmp_location=None):
del archive_file, tmp_location
self.fs.CreateFile(dep_path)
self.fs.CreateFile(os.path.join(unzip_location, 'extra', 'path'))
self.fs.CreateFile(os.path.join(unzip_location, 'another_extra_path'))
unzip_mock.side_effect = _UnzipFileMock
# Create a stale directory that's expected to get deleted
stale_unzip_path_glob = os.path.join(
os.path.dirname(self.download_path), 'unzip_dir_*')
stale_path = os.path.join(
os.path.dirname(self.download_path), 'unzip_dir_stale')
self.fs.CreateDirectory(stale_path)
self.fs.CreateFile(os.path.join(stale_path, 'some_file'))
self.assertFalse(os.path.exists(dep_path))
zip_info = archive_info.ArchiveInfo(
self.download_path, unzip_path, path_within_archive,
stale_unzip_path_glob)
self.cs_info = cloud_storage_info.CloudStorageInfo(
'cs_bucket', 'cs_hash', self.download_path, 'cs_remote_path',
version_in_cs='1.2.3.4', archive_info=zip_info)
self.assertFalse(unzip_mock.called)
self.assertEqual(
os.path.abspath(dep_path),
self.cs_info.GetRemotePath())
self.assertTrue(os.path.exists(dep_path))
self.assertTrue(stat.S_IMODE(os.stat(os.path.abspath(dep_path)).st_mode) &
(stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR))
unzip_mock.assert_called_once_with(self.download_path, unzip_path)
# Stale directory should have been deleted
self.assertFalse(os.path.exists(stale_path))
# Should not need to unzip a second time, but should return the same path.
unzip_mock.reset_mock()
self.assertTrue(os.path.exists(dep_path))
self.assertEqual(
os.path.abspath(dep_path),
self.cs_info.GetRemotePath())
self.assertTrue(stat.S_IMODE(os.stat(os.path.abspath(dep_path)).st_mode) &
(stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR))
self.assertFalse(unzip_mock.called)
@mock.patch(
'py_utils.cloud_storage.GetIfHashChanged')
def testGetRemotePathCloudStorageErrors(self, cs_get_mock):
cs_get_mock.side_effect = cloud_storage.CloudStorageError
self.assertRaises(cloud_storage.CloudStorageError,
self.cs_info.GetRemotePath)
cs_get_mock.side_effect = cloud_storage.ServerError
self.assertRaises(cloud_storage.ServerError,
self.cs_info.GetRemotePath)
cs_get_mock.side_effect = cloud_storage.NotFoundError
self.assertRaises(cloud_storage.NotFoundError,
self.cs_info.GetRemotePath)
cs_get_mock.side_effect = cloud_storage.PermissionError
self.assertRaises(cloud_storage.PermissionError,
self.cs_info.GetRemotePath)
cs_get_mock.side_effect = cloud_storage.CredentialsError
self.assertRaises(cloud_storage.CredentialsError,
self.cs_info.GetRemotePath)

View File

@@ -0,0 +1,128 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class DependencyInfo(object):
def __init__(self, dependency, platform, config_path, local_path_info=None,
cloud_storage_info=None):
""" Container for the information needed for each dependency/platform pair
in the dependency_manager.
Args:
Required:
dependency: Name of the dependency.
platform: Name of the platform to be run on.
config_path: Path to the config_path this information came from. Used
for error messages to improve debugging.
Optional:
local_path_info: A LocalPathInfo instance.
cloud_storage_info: An instance of CloudStorageInfo.
"""
# TODO(aiolos): update the above doc string for A) the usage of zip files
# and B) supporting lists of local_paths to be checked for most recently
# changed files.
if not dependency or not platform:
raise ValueError(
'Must supply both a dependency and platform to DependencyInfo')
self._dependency = dependency
self._platform = platform
self._config_paths = [config_path]
self._local_path_info = local_path_info
self._cloud_storage_info = cloud_storage_info
def Update(self, new_dep_info):
"""Add the information from |new_dep_info| to this instance.
"""
self._config_paths.extend(new_dep_info.config_paths)
if (self.dependency != new_dep_info.dependency or
self.platform != new_dep_info.platform):
raise ValueError(
'Cannot update DependencyInfo with different dependency or platform.'
'Existing dep: %s, existing platform: %s. New dep: %s, new platform:'
'%s. Config_paths conflicting: %s' % (
self.dependency, self.platform, new_dep_info.dependency,
new_dep_info.platform, self.config_paths))
if new_dep_info.has_cloud_storage_info:
if self.has_cloud_storage_info:
raise ValueError(
'Overriding cloud storage data is not allowed when updating a '
'DependencyInfo. Conflict in dependency %s on platform %s in '
'config_paths: %s.' % (self.dependency, self.platform,
self.config_paths))
else:
self._cloud_storage_info = new_dep_info._cloud_storage_info
if not self._local_path_info:
self._local_path_info = new_dep_info._local_path_info
else:
self._local_path_info.Update(new_dep_info._local_path_info)
def GetRemotePath(self):
"""Gets the path to a downloaded version of the dependency.
May not download the file if it has already been downloaded.
Will unzip the downloaded file if specified in the config
via unzipped_hash.
Returns: A path to an executable that was stored in cloud_storage, or None
if not found.
Raises:
CredentialsError: If cloud_storage credentials aren't configured.
PermissionError: If cloud_storage credentials are configured, but not
with an account that has permission to download the needed file.
NotFoundError: If the needed file does not exist where expected in
cloud_storage or the downloaded zip file.
ServerError: If an internal server error is hit while downloading the
needed file.
CloudStorageError: If another error occured while downloading the remote
path.
FileNotFoundError: If the download was otherwise unsuccessful.
"""
if self.has_cloud_storage_info:
return self._cloud_storage_info.GetRemotePath()
return None
def GetRemotePathVersion(self):
if self.has_cloud_storage_info:
return self._cloud_storage_info.version_in_cs
return None
def GetLocalPath(self):
"""Gets the path to a local version of the dependency.
Returns: A path to a local dependency, or None if not found.
"""
if self.has_local_path_info:
return self._local_path_info.GetLocalPath()
return None
@property
def dependency(self):
return self._dependency
@property
def platform(self):
return self._platform
@property
def config_paths(self):
return self._config_paths
@property
def local_path_info(self):
return self._local_path_info
@property
def has_cloud_storage_info(self):
return bool(self._cloud_storage_info)
@property
def has_local_path_info(self):
return bool(self._local_path_info)
@property
def cloud_storage_info(self):
return self._cloud_storage_info

View File

@@ -0,0 +1,234 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import dependency_manager
class DependencyInfoTest(unittest.TestCase):
def testInitRequiredInfo(self):
# Must have a dependency, platform and file_path.
self.assertRaises(ValueError, dependency_manager.DependencyInfo,
None, None, None)
self.assertRaises(ValueError, dependency_manager.DependencyInfo,
'dep', None, None)
self.assertRaises(ValueError, dependency_manager.DependencyInfo,
None, 'plat', None)
self.assertRaises(ValueError, dependency_manager.DependencyInfo,
None, None, 'config_path')
# Empty DependencyInfo.
empty_di = dependency_manager.DependencyInfo('dep', 'plat', 'config_path')
self.assertEqual('dep', empty_di.dependency)
self.assertEqual('plat', empty_di.platform)
self.assertEqual(['config_path'], empty_di.config_paths)
self.assertFalse(empty_di.has_local_path_info)
self.assertFalse(empty_di.has_cloud_storage_info)
def testInitLocalPaths(self):
local_path_info = dependency_manager.LocalPathInfo(['path0', 'path1'])
dep_info = dependency_manager.DependencyInfo(
'dep', 'platform', 'config_path', local_path_info
)
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_path'], dep_info.config_paths)
self.assertEqual(local_path_info, dep_info._local_path_info)
self.assertFalse(dep_info.has_cloud_storage_info)
def testInitCloudStorageInfo(self):
cs_info = dependency_manager.CloudStorageInfo(
'cs_bucket', 'cs_hash', 'dowload_path', 'cs_remote_path')
dep_info = dependency_manager.DependencyInfo(
'dep', 'platform', 'config_path', cloud_storage_info=cs_info)
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_path'], dep_info.config_paths)
self.assertFalse(dep_info.has_local_path_info)
self.assertTrue(dep_info.has_cloud_storage_info)
self.assertEqual(cs_info, dep_info._cloud_storage_info)
def testInitAllInfo(self):
cs_info = dependency_manager.CloudStorageInfo(
'cs_bucket', 'cs_hash', 'dowload_path', 'cs_remote_path')
dep_info = dependency_manager.DependencyInfo(
'dep', 'platform', 'config_path', cloud_storage_info=cs_info)
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_path'], dep_info.config_paths)
self.assertFalse(dep_info.has_local_path_info)
self.assertTrue(dep_info.has_cloud_storage_info)
def testUpdateRequiredArgsConflicts(self):
lp_info = dependency_manager.LocalPathInfo(['path0', 'path2'])
dep_info1 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path1', local_path_info=lp_info)
dep_info2 = dependency_manager.DependencyInfo(
'dep1', 'platform2', 'config_path2', local_path_info=lp_info)
dep_info3 = dependency_manager.DependencyInfo(
'dep2', 'platform1', 'config_path3', local_path_info=lp_info)
self.assertRaises(ValueError, dep_info1.Update, dep_info2)
self.assertRaises(ValueError, dep_info1.Update, dep_info3)
self.assertRaises(ValueError, dep_info3.Update, dep_info2)
def testUpdateMinimumCloudStorageInfo(self):
dep_info1 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path1')
cs_info2 = dependency_manager.CloudStorageInfo(
cs_bucket='cs_bucket2', cs_hash='cs_hash2',
download_path='download_path2', cs_remote_path='cs_remote_path2')
dep_info2 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path2', cloud_storage_info=cs_info2)
dep_info3 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path3')
cs_info4 = dependency_manager.CloudStorageInfo(
cs_bucket='cs_bucket4', cs_hash='cs_hash4',
download_path='download_path4', cs_remote_path='cs_remote_path4')
dep_info4 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path4', cloud_storage_info=cs_info4)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1'], dep_info1.config_paths)
dep_info1.Update(dep_info2)
self.assertFalse(dep_info1.has_local_path_info)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1', 'config_path2'], dep_info1.config_paths)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
dep_info1.Update(dep_info3)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1', 'config_path2', 'config_path3'],
dep_info1.config_paths)
self.assertFalse(dep_info1.has_local_path_info)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertRaises(ValueError, dep_info1.Update, dep_info4)
def testUpdateMaxCloudStorageInfo(self):
dep_info1 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path1')
zip_info2 = dependency_manager.ArchiveInfo(
'archive_path2', 'unzip_path2', 'path_withing_archive2')
cs_info2 = dependency_manager.CloudStorageInfo(
'cs_bucket2', 'cs_hash2', 'download_path2', 'cs_remote_path2',
version_in_cs='2.1.1', archive_info=zip_info2)
dep_info2 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path2', cloud_storage_info=cs_info2)
dep_info3 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path3')
zip_info4 = dependency_manager.ArchiveInfo(
'archive_path4', 'unzip_path4', 'path_withing_archive4')
cs_info4 = dependency_manager.CloudStorageInfo(
'cs_bucket4', 'cs_hash4', 'download_path4', 'cs_remote_path4',
version_in_cs='4.2.1', archive_info=zip_info4)
dep_info4 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path4', cloud_storage_info=cs_info4)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1'], dep_info1.config_paths)
dep_info1.Update(dep_info2)
self.assertFalse(dep_info1.has_local_path_info)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1', 'config_path2'], dep_info1.config_paths)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
dep_info1.Update(dep_info3)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1', 'config_path2', 'config_path3'],
dep_info1.config_paths)
self.assertFalse(dep_info1.has_local_path_info)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertRaises(ValueError, dep_info1.Update, dep_info4)
def testUpdateAllInfo(self):
lp_info1 = dependency_manager.LocalPathInfo(['path1'])
dep_info1 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path1', local_path_info=lp_info1)
cs_info2 = dependency_manager.CloudStorageInfo(
cs_bucket='cs_bucket2', cs_hash='cs_hash2',
download_path='download_path2', cs_remote_path='cs_remote_path2')
lp_info2 = dependency_manager.LocalPathInfo(['path2'])
dep_info2 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path2', local_path_info=lp_info2,
cloud_storage_info=cs_info2)
lp_info3 = dependency_manager.LocalPathInfo(['path3'])
dep_info3 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path3', local_path_info=lp_info3)
lp_info4 = dependency_manager.LocalPathInfo(['path4'])
cs_info4 = dependency_manager.CloudStorageInfo(
cs_bucket='cs_bucket4', cs_hash='cs_hash4',
download_path='download_path4', cs_remote_path='cs_remote_path4')
dep_info4 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path4', local_path_info=lp_info4,
cloud_storage_info=cs_info4)
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path1'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path2'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path3'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path4'))
dep_info1.Update(dep_info2)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path1'))
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path2'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path3'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path4'))
dep_info1.Update(dep_info3)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path1'))
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path2'))
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path3'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path4'))
self.assertRaises(ValueError, dep_info1.Update, dep_info4)

View File

@@ -0,0 +1,527 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=unused-argument
import mock
from pyfakefs import fake_filesystem_unittest
from py_utils import cloud_storage
import dependency_manager
from dependency_manager import exceptions
class DependencyManagerTest(fake_filesystem_unittest.TestCase):
def setUp(self):
self.lp_info012 = dependency_manager.LocalPathInfo(
['path0', 'path1', 'path2'])
self.cloud_storage_info = dependency_manager.CloudStorageInfo(
'cs_bucket', 'cs_hash', 'download_path', 'cs_remote_path')
self.dep_info = dependency_manager.DependencyInfo(
'dep', 'platform', 'config_file', local_path_info=self.lp_info012,
cloud_storage_info=self.cloud_storage_info)
self.setUpPyfakefs()
def tearDown(self):
self.tearDownPyfakefs()
# TODO(nednguyen): add a test that construct
# dependency_manager.DependencyManager from a list of DependencyInfo.
def testErrorInit(self):
with self.assertRaises(ValueError):
dependency_manager.DependencyManager(None)
with self.assertRaises(ValueError):
dependency_manager.DependencyManager('config_file?')
def testInitialUpdateDependencies(self):
dep_manager = dependency_manager.DependencyManager([])
# Empty BaseConfig.
dep_manager._lookup_dict = {}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
base_config_mock.IterDependencyInfo.return_value = iter([])
dep_manager._UpdateDependencies(base_config_mock)
self.assertFalse(dep_manager._lookup_dict)
# One dependency/platform in a BaseConfig.
dep_manager._lookup_dict = {}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
dep_info = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep = 'dependency'
plat = 'platform'
dep_info.dependency = dep
dep_info.platform = plat
base_config_mock.IterDependencyInfo.return_value = iter([dep_info])
expected_lookup_dict = {dep: {plat: dep_info}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info.Update.called)
# One dependency multiple platforms in a BaseConfig.
dep_manager._lookup_dict = {}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
dep = 'dependency'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep
dep_info2.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter([dep_info1,
dep_info2])
expected_lookup_dict = {dep: {plat1: dep_info1,
plat2: dep_info2}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
# Multiple dependencies, multiple platforms in a BaseConfig.
dep_manager._lookup_dict = {}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
dep1 = 'dependency1'
dep2 = 'dependency2'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep1
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep1
dep_info2.platform = plat2
dep_info3 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info3.dependency = dep2
dep_info3.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter(
[dep_info1, dep_info2, dep_info3])
expected_lookup_dict = {dep1: {plat1: dep_info1,
plat2: dep_info2},
dep2: {plat2: dep_info3}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info3.Update.called)
def testFollowupUpdateDependenciesNoOverlap(self):
dep_manager = dependency_manager.DependencyManager([])
dep = 'dependency'
dep1 = 'dependency1'
dep2 = 'dependency2'
dep3 = 'dependency3'
plat1 = 'platform1'
plat2 = 'platform2'
plat3 = 'platform3'
dep_info_a = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_a.dependency = dep1
dep_info_a.platform = plat1
dep_info_b = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_b.dependency = dep1
dep_info_b.platform = plat2
dep_info_c = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_c.dependency = dep
dep_info_c.platform = plat1
start_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
# Empty BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
base_config_mock.IterDependencyInfo.return_value = iter([])
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(start_lookup_dict, dep_manager._lookup_dict)
# One dependency/platform in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info.dependency = dep3
dep_info.platform = plat1
base_config_mock.IterDependencyInfo.return_value = iter([dep_info])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep3: {plat3: dep_info}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertItemsEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info.Update.called)
self.assertFalse(dep_info_a.Update.called)
self.assertFalse(dep_info_b.Update.called)
self.assertFalse(dep_info_c.Update.called)
# One dependency multiple platforms in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep2
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep2
dep_info2.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter([dep_info1,
dep_info2])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep2: {plat1: dep_info1,
plat2: dep_info2}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info_a.Update.called)
self.assertFalse(dep_info_b.Update.called)
self.assertFalse(dep_info_c.Update.called)
# Multiple dependencies, multiple platforms in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep1 = 'dependency1'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep2
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep2
dep_info2.platform = plat2
dep_info3 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info3.dependency = dep3
dep_info3.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter(
[dep_info1, dep_info2, dep_info3])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep2: {plat1: dep_info1,
plat2: dep_info2},
dep3: {plat2: dep_info3}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info3.Update.called)
self.assertFalse(dep_info_a.Update.called)
self.assertFalse(dep_info_b.Update.called)
self.assertFalse(dep_info_c.Update.called)
# Ensure the testing data wasn't corrupted.
self.assertEqual(start_lookup_dict,
{dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}})
def testFollowupUpdateDependenciesWithCollisions(self):
dep_manager = dependency_manager.DependencyManager([])
dep = 'dependency'
dep1 = 'dependency1'
dep2 = 'dependency2'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info_a = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_a.dependency = dep1
dep_info_a.platform = plat1
dep_info_b = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_b.dependency = dep1
dep_info_b.platform = plat2
dep_info_c = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_c.dependency = dep
dep_info_c.platform = plat1
start_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
# One dependency/platform.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info.dependency = dep
dep_info.platform = plat1
base_config_mock.IterDependencyInfo.return_value = iter([dep_info])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertItemsEqual(expected_lookup_dict, dep_manager._lookup_dict)
dep_info_a.Update.assert_called_once_with(dep_info)
self.assertFalse(dep_info.Update.called)
self.assertFalse(dep_info_b.Update.called)
self.assertFalse(dep_info_c.Update.called)
dep_info_a.reset_mock()
dep_info_b.reset_mock()
dep_info_c.reset_mock()
# One dependency multiple platforms in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep1
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep2
dep_info2.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter([dep_info1,
dep_info2])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep2: {plat2: dep_info2}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info_a.Update.called)
self.assertFalse(dep_info_b.Update.called)
dep_info_c.Update.assert_called_once_with(dep_info1)
dep_info_a.reset_mock()
dep_info_b.reset_mock()
dep_info_c.reset_mock()
# Multiple dependencies, multiple platforms in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep1 = 'dependency1'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep1
dep_info2.platform = plat1
dep_info3 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info3.dependency = dep2
dep_info3.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter(
[dep_info1, dep_info2, dep_info3])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep2: {plat2: dep_info3}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info3.Update.called)
self.assertFalse(dep_info_b.Update.called)
dep_info_a.Update.assert_called_once_with(dep_info1)
dep_info_c.Update.assert_called_once_with(dep_info2)
# Collision error.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info.dependency = dep
dep_info.platform = plat1
base_config_mock.IterDependencyInfo.return_value = iter([dep_info])
dep_info_a.Update.side_effect = ValueError
self.assertRaises(ValueError,
dep_manager._UpdateDependencies, base_config_mock)
# Ensure the testing data wasn't corrupted.
self.assertEqual(start_lookup_dict,
{dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}})
def testGetDependencyInfo(self):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(dep_manager._lookup_dict)
# No dependencies in the dependency manager.
self.assertEqual(None, dep_manager._GetDependencyInfo('missing_dep',
'missing_plat'))
dep_manager._lookup_dict = {'dep1': {'plat1': 'dep_info11',
'plat2': 'dep_info12',
'plat3': 'dep_info13'},
'dep2': {'plat1': 'dep_info11',
'plat2': 'dep_info21',
'plat3': 'dep_info23',
'default': 'dep_info2d'},
'dep3': {'plat1': 'dep_info31',
'plat2': 'dep_info32',
'default': 'dep_info3d'}}
# Dependency not in the dependency manager.
self.assertEqual(None, dep_manager._GetDependencyInfo(
'missing_dep', 'missing_plat'))
# Dependency in the dependency manager, but not the platform. No default.
self.assertEqual(None, dep_manager._GetDependencyInfo(
'dep1', 'missing_plat'))
# Dependency in the dependency manager, but not the platform, but a default
# exists.
self.assertEqual('dep_info2d', dep_manager._GetDependencyInfo(
'dep2', 'missing_plat'))
# Dependency and platform in the dependency manager. A default exists.
self.assertEqual('dep_info23', dep_manager._GetDependencyInfo(
'dep2', 'plat3'))
# Dependency and platform in the dependency manager. No default exists.
self.assertEqual('dep_info12', dep_manager._GetDependencyInfo(
'dep1', 'plat2'))
@mock.patch(
'dependency_manager.dependency_info.DependencyInfo.GetRemotePath') # pylint: disable=line-too-long
def testFetchPathUnititializedDependency(
self, cs_path_mock):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(cs_path_mock.call_args)
cs_path = 'cs_path'
cs_path_mock.return_value = cs_path
# Empty lookup_dict
with self.assertRaises(exceptions.NoPathFoundError):
dep_manager.FetchPath('dep', 'plat_arch_x86')
# Non-empty lookup dict that doesn't contain the dependency we're looking
# for.
dep_manager._lookup_dict = {'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
with self.assertRaises(exceptions.NoPathFoundError):
dep_manager.FetchPath('dep', 'plat_arch_x86')
@mock.patch('os.path')
@mock.patch(
'dependency_manager.DependencyManager._GetDependencyInfo')
@mock.patch(
'dependency_manager.dependency_info.DependencyInfo.GetRemotePath') # pylint: disable=line-too-long
def testFetchPathLocalFile(self, cs_path_mock, dep_info_mock, path_mock):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(cs_path_mock.call_args)
cs_path = 'cs_path'
dep_info = self.dep_info
cs_path_mock.return_value = cs_path
# The DependencyInfo returned should be passed through to LocalPath.
dep_info_mock.return_value = dep_info
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path exists.
dep_manager._lookup_dict = {'dep': {'platform' : self.dep_info},
'dep2': mock.MagicMock()}
self.fs.CreateFile('path1')
found_path = dep_manager.FetchPath('dep', 'platform')
self.assertEqual('path1', found_path)
self.assertFalse(cs_path_mock.call_args)
@mock.patch(
'dependency_manager.dependency_info.DependencyInfo.GetRemotePath') # pylint: disable=line-too-long
def testFetchPathRemoteFile(
self, cs_path_mock):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(cs_path_mock.call_args)
cs_path = 'cs_path'
def FakeCSPath():
self.fs.CreateFile(cs_path)
return cs_path
cs_path_mock.side_effect = FakeCSPath
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path doesn't exist, but cloud_storage_path is downloaded.
dep_manager._lookup_dict = {'dep': {'platform' : self.dep_info,
'plat1': mock.MagicMock()},
'dep2': {'plat2': mock.MagicMock()}}
found_path = dep_manager.FetchPath('dep', 'platform')
self.assertEqual(cs_path, found_path)
@mock.patch(
'dependency_manager.dependency_info.DependencyInfo.GetRemotePath') # pylint: disable=line-too-long
def testFetchPathError(
self, cs_path_mock):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(cs_path_mock.call_args)
cs_path_mock.return_value = None
dep_manager._lookup_dict = {'dep': {'platform' : self.dep_info,
'plat1': mock.MagicMock()},
'dep2': {'plat2': mock.MagicMock()}}
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path doesn't exist, and cloud_storage path wasn't successfully
# found.
self.assertRaises(exceptions.NoPathFoundError,
dep_manager.FetchPath, 'dep', 'platform')
cs_path_mock.side_effect = cloud_storage.CredentialsError
self.assertRaises(cloud_storage.CredentialsError,
dep_manager.FetchPath, 'dep', 'platform')
cs_path_mock.side_effect = cloud_storage.CloudStorageError
self.assertRaises(cloud_storage.CloudStorageError,
dep_manager.FetchPath, 'dep', 'platform')
cs_path_mock.side_effect = cloud_storage.PermissionError
self.assertRaises(cloud_storage.PermissionError,
dep_manager.FetchPath, 'dep', 'platform')
def testLocalPath(self):
dep_manager = dependency_manager.DependencyManager([])
# Empty lookup_dict
with self.assertRaises(exceptions.NoPathFoundError):
dep_manager.LocalPath('dep', 'plat')
def testLocalPathNoDependency(self):
# Non-empty lookup dict that doesn't contain the dependency we're looking
# for.
dep_manager = dependency_manager.DependencyManager([])
dep_manager._lookup_dict = {'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
with self.assertRaises(exceptions.NoPathFoundError):
dep_manager.LocalPath('dep', 'plat')
def testLocalPathExists(self):
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path exists.
dep_manager = dependency_manager.DependencyManager([])
dep_manager._lookup_dict = {'dependency' : {'platform': self.dep_info},
'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
self.fs.CreateFile('path1')
found_path = dep_manager.LocalPath('dependency', 'platform')
self.assertEqual('path1', found_path)
def testLocalPathMissingPaths(self):
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path is found but doesn't exist.
dep_manager = dependency_manager.DependencyManager([])
dep_manager._lookup_dict = {'dependency' : {'platform': self.dep_info},
'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
self.assertRaises(exceptions.NoPathFoundError,
dep_manager.LocalPath, 'dependency', 'platform')
def testLocalPathNoPaths(self):
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path isn't found.
dep_manager = dependency_manager.DependencyManager([])
dep_info = dependency_manager.DependencyInfo(
'dep', 'platform', 'config_file',
cloud_storage_info=self.cloud_storage_info)
dep_manager._lookup_dict = {'dependency' : {'platform': dep_info},
'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
self.assertRaises(exceptions.NoPathFoundError,
dep_manager.LocalPath, 'dependency', 'platform')

View File

@@ -0,0 +1,113 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import stat
import subprocess
import sys
import zipfile_2_7_13 as zipfile
from dependency_manager import exceptions
def _WinReadOnlyHandler(func, path, execinfo):
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWRITE)
func(path)
else:
raise execinfo[0], execinfo[1], execinfo[2]
def RemoveDir(dir_path):
assert os.path.isabs(dir_path)
if sys.platform.startswith('win'):
dir_path = u'\\\\?\\' + dir_path
if os.path.isdir(dir_path):
shutil.rmtree(dir_path, onerror=_WinReadOnlyHandler)
def VerifySafeArchive(archive):
def ResolvePath(path_name):
return os.path.realpath(os.path.abspath(path_name))
# Must add pathsep to avoid false positives.
# Ex: /tmp/abc/bad_file.py starts with /tmp/a but not /tmp/a/
base_path = ResolvePath(os.getcwd()) + os.path.sep
for member in archive.namelist():
if not ResolvePath(os.path.join(base_path, member)).startswith(base_path):
raise exceptions.ArchiveError(
'Archive %s contains a bad member: %s.' % (archive.filename, member))
def GetModeFromPath(file_path):
return stat.S_IMODE(os.stat(file_path).st_mode)
def GetModeFromZipInfo(zip_info):
return zip_info.external_attr >> 16
def SetUnzippedDirPermissions(archive, unzipped_dir):
"""Set the file permissions in an unzipped archive.
Designed to be called right after extractall() was called on |archive|.
Noop on Win. Otherwise sets the executable bit on files where needed.
Args:
archive: A zipfile.ZipFile object opened for reading.
unzipped_dir: A path to a directory containing the unzipped contents
of |archive|.
"""
if sys.platform.startswith('win'):
# Windows doesn't have an executable bit, so don't mess with the ACLs.
return
for zip_info in archive.infolist():
archive_acls = GetModeFromZipInfo(zip_info)
if archive_acls & stat.S_IXUSR:
# Only preserve owner execurable permissions.
unzipped_path = os.path.abspath(
os.path.join(unzipped_dir, zip_info.filename))
mode = GetModeFromPath(unzipped_path)
os.chmod(unzipped_path, mode | stat.S_IXUSR)
def UnzipArchive(archive_path, unzip_path):
"""Unzips a file if it is a zip file.
Args:
archive_path: The downloaded file to unzip.
unzip_path: The destination directory to unzip to.
Raises:
ValueError: If |archive_path| is not a zipfile.
"""
# TODO(aiolos): Add tests once the refactor is completed. crbug.com/551158
if not (archive_path and zipfile.is_zipfile(archive_path)):
raise ValueError(
'Attempting to unzip a non-archive file at %s' % archive_path)
if not os.path.exists(unzip_path):
os.makedirs(unzip_path)
# The Python ZipFile does not support symbolic links, which makes it
# unsuitable for Mac builds. so use ditto instead. crbug.com/700097.
if sys.platform.startswith('darwin'):
assert os.path.isabs(unzip_path)
unzip_cmd = ['ditto', '-x', '-k', archive_path, unzip_path]
proc = subprocess.Popen(unzip_cmd, bufsize=0, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
proc.communicate()
return
try:
with zipfile.ZipFile(archive_path, 'r') as archive:
VerifySafeArchive(archive)
assert os.path.isabs(unzip_path)
unzip_path_without_prefix = unzip_path
if sys.platform.startswith('win'):
unzip_path = u'\\\\?\\' + unzip_path
archive.extractall(path=unzip_path)
SetUnzippedDirPermissions(archive, unzip_path)
except:
# Hack necessary because isdir doesn't work with escaped paths on Windows.
if unzip_path_without_prefix and os.path.isdir(unzip_path_without_prefix):
RemoveDir(unzip_path_without_prefix)
raise

View File

@@ -0,0 +1,196 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import stat
import sys
import tempfile
import unittest
import uuid
import zipfile
import mock
from dependency_manager import dependency_manager_util
from dependency_manager import exceptions
class DependencyManagerUtilTest(unittest.TestCase):
# This class intentionally uses actual file I/O to test real system behavior.
def setUp(self):
self.tmp_dir = os.path.abspath(tempfile.mkdtemp(prefix='telemetry'))
self.sub_dir = os.path.join(self.tmp_dir, 'sub_dir')
os.mkdir(self.sub_dir)
self.read_only_path = (os.path.join(self.tmp_dir, 'read_only'))
with open(self.read_only_path, 'w+') as read_file:
read_file.write('Read-only file')
os.chmod(self.read_only_path, stat.S_IRUSR)
self.writable_path = (os.path.join(self.tmp_dir, 'writable'))
with open(self.writable_path, 'w+') as writable_file:
writable_file.write('Writable file')
os.chmod(self.writable_path, stat.S_IRUSR | stat.S_IWUSR)
self.executable_path = (os.path.join(self.tmp_dir, 'executable'))
with open(self.executable_path, 'w+') as executable_file:
executable_file.write('Executable file')
os.chmod(self.executable_path, stat.S_IRWXU)
self.sub_read_only_path = (os.path.join(self.sub_dir, 'read_only'))
with open(self.sub_read_only_path, 'w+') as read_file:
read_file.write('Read-only sub file')
os.chmod(self.sub_read_only_path, stat.S_IRUSR)
self.sub_writable_path = (os.path.join(self.sub_dir, 'writable'))
with open(self.sub_writable_path, 'w+') as writable_file:
writable_file.write('Writable sub file')
os.chmod(self.sub_writable_path, stat.S_IRUSR | stat.S_IWUSR)
self.sub_executable_path = (os.path.join(self.sub_dir, 'executable'))
with open(self.sub_executable_path, 'w+') as executable_file:
executable_file.write('Executable sub file')
os.chmod(self.sub_executable_path, stat.S_IRWXU)
self.AssertExpectedDirFiles(self.tmp_dir)
self.archive_path = self.CreateZipArchiveFromDir(self.tmp_dir)
def tearDown(self):
if os.path.isdir(self.tmp_dir):
dependency_manager_util.RemoveDir(self.tmp_dir)
if os.path.isfile(self.archive_path):
os.remove(self.archive_path)
def AssertExpectedDirFiles(self, top_dir):
sub_dir = os.path.join(top_dir, 'sub_dir')
read_only_path = (os.path.join(top_dir, 'read_only'))
writable_path = (os.path.join(top_dir, 'writable'))
executable_path = (os.path.join(top_dir, 'executable'))
sub_read_only_path = (os.path.join(sub_dir, 'read_only'))
sub_writable_path = (os.path.join(sub_dir, 'writable'))
sub_executable_path = (os.path.join(sub_dir, 'executable'))
# assert contents as expected
self.assertTrue(os.path.isdir(top_dir))
self.assertTrue(os.path.isdir(sub_dir))
self.assertTrue(os.path.isfile(read_only_path))
self.assertTrue(os.path.isfile(writable_path))
self.assertTrue(os.path.isfile(executable_path))
self.assertTrue(os.path.isfile(sub_read_only_path))
self.assertTrue(os.path.isfile(sub_writable_path))
self.assertTrue(os.path.isfile(sub_executable_path))
# assert permissions as expected
self.assertTrue(
stat.S_IRUSR & stat.S_IMODE(os.stat(read_only_path).st_mode))
self.assertTrue(
stat.S_IRUSR & stat.S_IMODE(os.stat(sub_read_only_path).st_mode))
self.assertTrue(
stat.S_IRUSR & stat.S_IMODE(os.stat(writable_path).st_mode))
self.assertTrue(
stat.S_IWUSR & stat.S_IMODE(os.stat(writable_path).st_mode))
self.assertTrue(
stat.S_IRUSR & stat.S_IMODE(os.stat(sub_writable_path).st_mode))
self.assertTrue(
stat.S_IWUSR & stat.S_IMODE(os.stat(sub_writable_path).st_mode))
if not sys.platform.startswith('win'):
self.assertEqual(
stat.S_IRWXU,
stat.S_IRWXU & stat.S_IMODE(os.stat(executable_path).st_mode))
self.assertEqual(
stat.S_IRWXU,
stat.S_IRWXU & stat.S_IMODE(os.stat(sub_executable_path).st_mode))
def CreateZipArchiveFromDir(self, dir_path):
try:
base_path = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
archive_path = shutil.make_archive(base_path, 'zip', dir_path)
self.assertTrue(os.path.exists(archive_path))
self.assertTrue(zipfile.is_zipfile(archive_path))
except:
if os.path.isfile(archive_path):
os.remove(archive_path)
raise
return archive_path
def testRemoveDirWithSubDir(self):
dependency_manager_util.RemoveDir(self.tmp_dir)
self.assertFalse(os.path.exists(self.tmp_dir))
self.assertFalse(os.path.exists(self.sub_dir))
self.assertFalse(os.path.exists(self.read_only_path))
self.assertFalse(os.path.exists(self.writable_path))
self.assertFalse(os.path.isfile(self.executable_path))
self.assertFalse(os.path.exists(self.sub_read_only_path))
self.assertFalse(os.path.exists(self.sub_writable_path))
self.assertFalse(os.path.isfile(self.sub_executable_path))
def testUnzipFile(self):
self.AssertExpectedDirFiles(self.tmp_dir)
unzip_path = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
dependency_manager_util.UnzipArchive(self.archive_path, unzip_path)
self.AssertExpectedDirFiles(unzip_path)
self.AssertExpectedDirFiles(self.tmp_dir)
dependency_manager_util.RemoveDir(unzip_path)
def testUnzipFileContainingLongPath(self):
try:
dir_path = self.tmp_dir
if sys.platform.startswith('win'):
dir_path = u'\\\\?\\' + dir_path
archive_suffix = ''
# 260 is the Windows API path length limit.
while len(archive_suffix) < 260:
archive_suffix = os.path.join(archive_suffix, 'really')
contents_dir_path = os.path.join(dir_path, archive_suffix)
os.makedirs(contents_dir_path)
filename = os.path.join(contents_dir_path, 'longpath.txt')
open(filename, 'a').close()
base_path = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
archive_path = shutil.make_archive(base_path, 'zip', dir_path)
self.assertTrue(os.path.exists(archive_path))
self.assertTrue(zipfile.is_zipfile(archive_path))
except:
if os.path.isfile(archive_path):
os.remove(archive_path)
raise
unzip_path = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
dependency_manager_util.UnzipArchive(archive_path, unzip_path)
dependency_manager_util.RemoveDir(unzip_path)
def testUnzipFileFailure(self):
# zipfile is not used on MacOS. See crbug.com/700097.
if sys.platform.startswith('darwin'):
return
unzip_path = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
self.assertFalse(os.path.exists(unzip_path))
with mock.patch(
'dependency_manager.dependency_manager_util.zipfile.ZipFile.extractall' # pylint: disable=line-too-long
) as zipfile_mock:
zipfile_mock.side_effect = IOError
self.assertRaises(
IOError, dependency_manager_util.UnzipArchive, self.archive_path,
unzip_path)
self.AssertExpectedDirFiles(self.tmp_dir)
self.assertFalse(os.path.exists(unzip_path))
def testVerifySafeArchivePasses(self):
with zipfile.ZipFile(self.archive_path) as archive:
dependency_manager_util.VerifySafeArchive(archive)
def testVerifySafeArchiveFailsOnRelativePathWithPardir(self):
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file_name = tmp_file.name
tmp_file.write('Bad file!')
tmp_file.close()
with zipfile.ZipFile(self.archive_path, 'w') as archive:
archive.write(tmp_file_name, '../../foo')
self.assertRaises(
exceptions.ArchiveError, dependency_manager_util.VerifySafeArchive,
archive)

View File

@@ -0,0 +1,52 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from py_utils import cloud_storage
CloudStorageError = cloud_storage.CloudStorageError
class UnsupportedConfigFormatError(ValueError):
def __init__(self, config_type, config_file):
if not config_type:
message = ('The json file at %s is unsupported by the dependency_manager '
'due to no specified config type' % config_file)
else:
message = ('The json file at %s has config type %s, which is unsupported '
'by the dependency manager.' % (config_file, config_type))
super(UnsupportedConfigFormatError, self).__init__(message)
class EmptyConfigError(ValueError):
def __init__(self, file_path):
super(EmptyConfigError, self).__init__('Empty config at %s.' % file_path)
class FileNotFoundError(Exception):
def __init__(self, file_path):
super(FileNotFoundError, self).__init__('No file found at %s' % file_path)
class NoPathFoundError(Exception):
def __init__(self, dependency, platform):
super(NoPathFoundError, self).__init__(
'No file could be found locally, and no file to download from cloud '
'storage for %s on platform %s' % (dependency, platform))
class ReadWriteError(Exception):
pass
class CloudStorageUploadConflictError(CloudStorageError):
def __init__(self, bucket, path):
super(CloudStorageUploadConflictError, self).__init__(
'File location %s already exists in bucket %s' % (path, bucket))
class ArchiveError(Exception):
def __init__(self, msg):
super(ArchiveError, self).__init__(msg)

View File

@@ -0,0 +1,69 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
class LocalPathInfo(object):
def __init__(self, path_priority_groups):
"""Container for a set of local file paths where a given dependency
can be stored.
Organized as a list of groups, where each group is itself a file path list.
See GetLocalPath() to understand how they are used.
Args:
path_priority_groups: Can be either None, or a list of file path
strings (corresponding to a list of groups, where each group has
a single file path), or a list of a list of file path strings
(i.e. a list of groups).
"""
self._path_priority_groups = self._ParseLocalPaths(path_priority_groups)
def GetLocalPath(self):
"""Look for a local file, and return its path.
Looks for the first group which has at least one existing file path. Then
returns the most-recent of these files.
Returns:
Local file path, if found, or None otherwise.
"""
for priority_group in self._path_priority_groups:
priority_group = [g for g in priority_group if os.path.exists(g)]
if not priority_group:
continue
return max(priority_group, key=lambda path: os.stat(path).st_mtime)
return None
def IsPathInLocalPaths(self, path):
"""Returns true if |path| is in one of this instance's file path lists."""
return any(
path in priority_group for priority_group in self._path_priority_groups)
def Update(self, local_path_info):
"""Update this object from the content of another LocalPathInfo instance.
Any file path from |local_path_info| that is not already contained in the
current instance will be added into new groups to it.
Args:
local_path_info: Another LocalPathInfo instance, or None.
"""
if not local_path_info:
return
for priority_group in local_path_info._path_priority_groups:
group_list = []
for path in priority_group:
if not self.IsPathInLocalPaths(path):
group_list.append(path)
if group_list:
self._path_priority_groups.append(group_list)
@staticmethod
def _ParseLocalPaths(local_paths):
if not local_paths:
return []
return [[e] if isinstance(e, basestring) else e for e in local_paths]

View File

@@ -0,0 +1,136 @@
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from pyfakefs import fake_filesystem_unittest
import dependency_manager
def _CreateFile(path):
"""Create file at specific |path|, with specific |content|."""
with open(path, 'wb') as f:
f.write('x')
def _ChangeFileTime(path, time0, days):
new_time = time0 + (days * 24 * 60 * 60)
os.utime(path, (new_time, new_time))
class LocalPathInfoTest(fake_filesystem_unittest.TestCase):
def setUp(self):
self.setUpPyfakefs()
def tearDown(self):
self.tearDownPyfakefs()
def testEmptyInstance(self):
path_info = dependency_manager.LocalPathInfo(None)
self.assertIsNone(path_info.GetLocalPath())
self.assertFalse(path_info.IsPathInLocalPaths('file.txt'))
def testSimpleGroupWithOnePath(self):
path_info = dependency_manager.LocalPathInfo(['file.txt'])
self.assertTrue(path_info.IsPathInLocalPaths('file.txt'))
self.assertFalse(path_info.IsPathInLocalPaths('other.txt'))
# GetLocalPath returns None if the file doesn't exist.
# Otherwise it will return the file path.
self.assertIsNone(path_info.GetLocalPath())
_CreateFile('file.txt')
self.assertEqual('file.txt', path_info.GetLocalPath())
def testSimpleGroupsWithMultiplePaths(self):
path_info = dependency_manager.LocalPathInfo(
[['file1', 'file2', 'file3']])
self.assertTrue(path_info.IsPathInLocalPaths('file1'))
self.assertTrue(path_info.IsPathInLocalPaths('file2'))
self.assertTrue(path_info.IsPathInLocalPaths('file3'))
_CreateFile('file1')
_CreateFile('file2')
_CreateFile('file3')
s = os.stat('file1')
time0 = s.st_mtime
_ChangeFileTime('file1', time0, 4)
_ChangeFileTime('file2', time0, 2)
_ChangeFileTime('file3', time0, 0)
self.assertEqual('file1', path_info.GetLocalPath())
_ChangeFileTime('file1', time0, 0)
_ChangeFileTime('file2', time0, 4)
_ChangeFileTime('file3', time0, 2)
self.assertEqual('file2', path_info.GetLocalPath())
_ChangeFileTime('file1', time0, 2)
_ChangeFileTime('file2', time0, 0)
_ChangeFileTime('file3', time0, 4)
self.assertEqual('file3', path_info.GetLocalPath())
def testMultipleGroupsWithSinglePaths(self):
path_info = dependency_manager.LocalPathInfo(
['file1', 'file2', 'file3'])
self.assertTrue(path_info.IsPathInLocalPaths('file1'))
self.assertTrue(path_info.IsPathInLocalPaths('file2'))
self.assertTrue(path_info.IsPathInLocalPaths('file3'))
self.assertIsNone(path_info.GetLocalPath())
_CreateFile('file3')
self.assertEqual('file3', path_info.GetLocalPath())
_CreateFile('file2')
self.assertEqual('file2', path_info.GetLocalPath())
_CreateFile('file1')
self.assertEqual('file1', path_info.GetLocalPath())
def testMultipleGroupsWithMultiplePaths(self):
path_info = dependency_manager.LocalPathInfo([
['file1', 'file2'],
['file3', 'file4']])
self.assertTrue(path_info.IsPathInLocalPaths('file1'))
self.assertTrue(path_info.IsPathInLocalPaths('file2'))
self.assertTrue(path_info.IsPathInLocalPaths('file3'))
self.assertTrue(path_info.IsPathInLocalPaths('file4'))
_CreateFile('file1')
_CreateFile('file3')
s = os.stat('file1')
time0 = s.st_mtime
# Check that file1 is always returned, even if it is not the most recent
# file, because it is part of the first group and exists.
_ChangeFileTime('file1', time0, 2)
_ChangeFileTime('file3', time0, 0)
self.assertEqual('file1', path_info.GetLocalPath())
_ChangeFileTime('file1', time0, 0)
_ChangeFileTime('file3', time0, 2)
self.assertEqual('file1', path_info.GetLocalPath())
def testUpdate(self):
path_info1 = dependency_manager.LocalPathInfo(
[['file1', 'file2']]) # One group with two files.
path_info2 = dependency_manager.LocalPathInfo(
['file1', 'file2', 'file3']) # Three groups
self.assertTrue(path_info1.IsPathInLocalPaths('file1'))
self.assertTrue(path_info1.IsPathInLocalPaths('file2'))
self.assertFalse(path_info1.IsPathInLocalPaths('file3'))
_CreateFile('file3')
self.assertIsNone(path_info1.GetLocalPath())
path_info1.Update(path_info2)
self.assertTrue(path_info1.IsPathInLocalPaths('file1'))
self.assertTrue(path_info1.IsPathInLocalPaths('file2'))
self.assertTrue(path_info1.IsPathInLocalPaths('file3'))
self.assertEqual('file3', path_info1.GetLocalPath())
_CreateFile('file1')
time0 = os.stat('file1').st_mtime
_ChangeFileTime('file3', time0, 2) # Make file3 more recent.
# Check that file3 is in a later group.
self.assertEqual('file1', path_info1.GetLocalPath())

View File

@@ -0,0 +1,246 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
from dependency_manager import base_config
from dependency_manager import exceptions
DEFAULT_TYPE = 'default'
class DependencyManager(object):
def __init__(self, configs, supported_config_types=None):
"""Manages file dependencies found locally or in cloud_storage.
Args:
configs: A list of instances of BaseConfig or it's subclasses, passed
in decreasing order of precedence.
supported_config_types: A list of whitelisted config_types.
No restrictions if None is specified.
Raises:
ValueError: If |configs| is not a list of instances of BaseConfig or
its subclasses.
UnsupportedConfigFormatError: If supported_config_types is specified and
configs contains a config not in the supported config_types.
Example: DependencyManager([config1, config2, config3])
No requirements on the type of Config, and any dependencies that have
local files for the same platform will first look in those from
config1, then those from config2, and finally those from config3.
"""
if configs is None or not isinstance(configs, list):
raise ValueError(
'Must supply a list of config files to DependencyManager')
# self._lookup_dict is a dictionary with the following format:
# { dependency1: {platform1: dependency_info1,
# platform2: dependency_info2}
# dependency2: {platform1: dependency_info3,
# ...}
# ...}
#
# Where the dependencies and platforms are strings, and the
# dependency_info's are DependencyInfo instances.
self._lookup_dict = {}
self.supported_configs = supported_config_types or []
for config in configs:
self._UpdateDependencies(config)
def FetchPathWithVersion(self, dependency, platform):
"""Get a path to an executable for |dependency|, downloading as needed.
A path to a default executable may be returned if a platform specific
version is not specified in the config(s).
Args:
dependency: Name of the desired dependency, as given in the config(s)
used in this DependencyManager.
platform: Name of the platform the dependency will run on. Often of the
form 'os_architecture'. Must match those specified in the config(s)
used in this DependencyManager.
Returns:
<path>, <version> where:
<path> is the path to an executable of |dependency| that will run
on |platform|, downloading from cloud storage if needed.
<version> is the version of the executable at <path> or None.
Raises:
NoPathFoundError: If a local copy of the executable cannot be found and
a remote path could not be downloaded from cloud_storage.
CredentialsError: If cloud_storage credentials aren't configured.
PermissionError: If cloud_storage credentials are configured, but not
with an account that has permission to download the remote file.
NotFoundError: If the remote file does not exist where expected in
cloud_storage.
ServerError: If an internal server error is hit while downloading the
remote file.
CloudStorageError: If another error occured while downloading the remote
path.
FileNotFoundError: If an attempted download was otherwise unsuccessful.
"""
dependency_info = self._GetDependencyInfo(dependency, platform)
if not dependency_info:
raise exceptions.NoPathFoundError(dependency, platform)
path = dependency_info.GetLocalPath()
version = None
if not path or not os.path.exists(path):
path = dependency_info.GetRemotePath()
if not path or not os.path.exists(path):
raise exceptions.NoPathFoundError(dependency, platform)
version = dependency_info.GetRemotePathVersion()
return path, version
def FetchPath(self, dependency, platform):
"""Get a path to an executable for |dependency|, downloading as needed.
A path to a default executable may be returned if a platform specific
version is not specified in the config(s).
Args:
dependency: Name of the desired dependency, as given in the config(s)
used in this DependencyManager.
platform: Name of the platform the dependency will run on. Often of the
form 'os_architecture'. Must match those specified in the config(s)
used in this DependencyManager.
Returns:
A path to an executable of |dependency| that will run on |platform|,
downloading from cloud storage if needed.
Raises:
NoPathFoundError: If a local copy of the executable cannot be found and
a remote path could not be downloaded from cloud_storage.
CredentialsError: If cloud_storage credentials aren't configured.
PermissionError: If cloud_storage credentials are configured, but not
with an account that has permission to download the remote file.
NotFoundError: If the remote file does not exist where expected in
cloud_storage.
ServerError: If an internal server error is hit while downloading the
remote file.
CloudStorageError: If another error occured while downloading the remote
path.
FileNotFoundError: If an attempted download was otherwise unsuccessful.
"""
path, _ = self.FetchPathWithVersion(dependency, platform)
return path
def LocalPath(self, dependency, platform):
"""Get a path to a locally stored executable for |dependency|.
A path to a default executable may be returned if a platform specific
version is not specified in the config(s).
Will not download the executable.
Args:
dependency: Name of the desired dependency, as given in the config(s)
used in this DependencyManager.
platform: Name of the platform the dependency will run on. Often of the
form 'os_architecture'. Must match those specified in the config(s)
used in this DependencyManager.
Returns:
A path to an executable for |dependency| that will run on |platform|.
Raises:
NoPathFoundError: If a local copy of the executable cannot be found.
"""
dependency_info = self._GetDependencyInfo(dependency, platform)
if not dependency_info:
raise exceptions.NoPathFoundError(dependency, platform)
local_path = dependency_info.GetLocalPath()
if not local_path or not os.path.exists(local_path):
raise exceptions.NoPathFoundError(dependency, platform)
return local_path
def PrefetchPaths(self, platform, dependencies=None, cloud_storage_retries=3):
if not dependencies:
dependencies = self._lookup_dict.keys()
skipped_deps = []
found_deps = []
missing_deps = []
for dependency in dependencies:
dependency_info = self._GetDependencyInfo(dependency, platform)
if not dependency_info:
# The dependency is only configured for other platforms.
skipped_deps.append(dependency)
continue
local_path = dependency_info.GetLocalPath()
if local_path:
found_deps.append(dependency)
continue
fetched_path = None
cloud_storage_error = None
for _ in range(0, cloud_storage_retries + 1):
try:
fetched_path = dependency_info.GetRemotePath()
except exceptions.CloudStorageError as e:
cloud_storage_error = e
break
if fetched_path:
found_deps.append(dependency)
else:
missing_deps.append(dependency)
logging.error(
'Dependency %s could not be found or fetched from cloud storage for'
' platform %s. Error: %s', dependency, platform,
cloud_storage_error)
if missing_deps:
raise exceptions.NoPathFoundError(', '.join(missing_deps), platform)
return (found_deps, skipped_deps)
def _UpdateDependencies(self, config):
"""Add the dependency information stored in |config| to this instance.
Args:
config: An instances of BaseConfig or a subclasses.
Raises:
UnsupportedConfigFormatError: If supported_config_types was specified
and config is not in the supported config_types.
"""
if not isinstance(config, base_config.BaseConfig):
raise ValueError('Must use a BaseConfig or subclass instance with the '
'DependencyManager.')
if (self.supported_configs and
config.GetConfigType() not in self.supported_configs):
raise exceptions.UnsupportedConfigFormatError(config.GetConfigType(),
config.config_path)
for dep_info in config.IterDependencyInfo():
dependency = dep_info.dependency
platform = dep_info.platform
if dependency not in self._lookup_dict:
self._lookup_dict[dependency] = {}
if platform not in self._lookup_dict[dependency]:
self._lookup_dict[dependency][platform] = dep_info
else:
self._lookup_dict[dependency][platform].Update(dep_info)
def _GetDependencyInfo(self, dependency, platform):
"""Get information for |dependency| on |platform|, or a default if needed.
Args:
dependency: Name of the desired dependency, as given in the config(s)
used in this DependencyManager.
platform: Name of the platform the dependency will run on. Often of the
form 'os_architecture'. Must match those specified in the config(s)
used in this DependencyManager.
Returns: The dependency_info for |dependency| on |platform| if it exists.
Or the default version of |dependency| if it exists, or None if neither
exist.
"""
if not self._lookup_dict or dependency not in self._lookup_dict:
return None
dependency_dict = self._lookup_dict[dependency]
device_type = platform
if not device_type in dependency_dict:
device_type = DEFAULT_TYPE
return dependency_dict.get(device_type)

View File

@@ -0,0 +1,108 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
from py_utils import cloud_storage
from dependency_manager import exceptions
BACKUP_PATH_EXTENSION = 'old'
class CloudStorageUploader(object):
def __init__(self, bucket, remote_path, local_path, cs_backup_path=None):
if not bucket or not remote_path or not local_path:
raise ValueError(
'Attempted to partially initialize upload data with bucket %s, '
'remote_path %s, and local_path %s', bucket, remote_path, local_path)
if not os.path.exists(local_path):
raise ValueError('Attempting to initilize UploadInfo with missing '
'local path %s', local_path)
self._cs_bucket = bucket
self._cs_remote_path = remote_path
self._local_path = local_path
self._cs_backup_path = (cs_backup_path or
'%s.%s' % (self._cs_remote_path,
BACKUP_PATH_EXTENSION))
self._updated = False
self._backed_up = False
def Upload(self, force=False):
"""Upload all pending files and then write the updated config to disk.
Will attempt to copy files existing in the upload location to a backup
location in the same bucket in cloud storage if |force| is True.
Args:
force: True if files should be uploaded to cloud storage even if a
file already exists in the upload location.
Raises:
CloudStorageUploadConflictError: If |force| is False and the potential
upload location of a file already exists.
CloudStorageError: If copying an existing file to the backup location
or uploading the new file fails.
"""
if cloud_storage.Exists(self._cs_bucket, self._cs_remote_path):
if not force:
#pylint: disable=nonstandard-exception
raise exceptions.CloudStorageUploadConflictError(self._cs_bucket,
self._cs_remote_path)
#pylint: enable=nonstandard-exception
logging.debug('A file already exists at upload path %s in self.cs_bucket'
' %s', self._cs_remote_path, self._cs_bucket)
try:
cloud_storage.Copy(self._cs_bucket, self._cs_bucket,
self._cs_remote_path, self._cs_backup_path)
self._backed_up = True
except cloud_storage.CloudStorageError:
logging.error('Failed to copy existing file %s in cloud storage bucket '
'%s to backup location %s', self._cs_remote_path,
self._cs_bucket, self._cs_backup_path)
raise
try:
cloud_storage.Insert(
self._cs_bucket, self._cs_remote_path, self._local_path)
except cloud_storage.CloudStorageError:
logging.error('Failed to upload %s to %s in cloud_storage bucket %s',
self._local_path, self._cs_remote_path, self._cs_bucket)
raise
self._updated = True
def Rollback(self):
"""Attempt to undo the previous call to Upload.
Does nothing if no previous call to Upload was made, or if nothing was
successfully changed.
Returns:
True iff changes were successfully rolled back.
Raises:
CloudStorageError: If copying the backed up file to its original
location or removing the uploaded file fails.
"""
cloud_storage_changed = False
if self._backed_up:
cloud_storage.Copy(self._cs_bucket, self._cs_bucket, self._cs_backup_path,
self._cs_remote_path)
cloud_storage_changed = True
self._cs_backup_path = None
elif self._updated:
cloud_storage.Delete(self._cs_bucket, self._cs_remote_path)
cloud_storage_changed = True
self._updated = False
return cloud_storage_changed
def __eq__(self, other, msg=None):
if not isinstance(self, type(other)):
return False
return (self._local_path == other._local_path and
self._cs_remote_path == other._cs_remote_path and
self._cs_bucket == other._cs_bucket)

View File

@@ -0,0 +1,91 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from pyfakefs import fake_filesystem_unittest
from dependency_manager import uploader
class CloudStorageUploaderTest(fake_filesystem_unittest.TestCase):
def setUp(self):
self.setUpPyfakefs()
self.bucket = 'cloud_storage_bucket'
self.local_path = os.path.abspath(os.path.join('path', 'to', 'dependency'))
self.fs.CreateFile(self.local_path)
self.remote_path = 'config_folder/remote_path'
def testCloudStorageUploaderMissingData(self):
self.assertRaises(ValueError, uploader.CloudStorageUploader,
None, self.remote_path, self.local_path)
self.assertRaises(ValueError, uploader.CloudStorageUploader,
self.bucket, None, self.local_path)
self.assertRaises(ValueError, uploader.CloudStorageUploader,
self.bucket, self.remote_path, None)
def testCloudStorageUploaderLocalFileMissing(self):
self.fs.RemoveObject(self.local_path)
self.assertRaises(ValueError, uploader.CloudStorageUploader,
self.bucket, self.remote_path, self.local_path)
def testCloudStorageUploaderCreation(self):
upload_data = uploader.CloudStorageUploader(
self.bucket, self.remote_path, self.local_path)
expected_bucket = self.bucket
expected_remote_path = self.remote_path
expected_cs_backup_path = '%s.old' % expected_remote_path
expected_local_path = self.local_path
self.assertEqual(expected_bucket, upload_data._cs_bucket)
self.assertEqual(expected_remote_path, upload_data._cs_remote_path)
self.assertEqual(expected_local_path, upload_data._local_path)
self.assertEqual(expected_cs_backup_path, upload_data._cs_backup_path)
def testCloudStorageUploaderEquality(self):
upload_data = uploader.CloudStorageUploader(
self.bucket, self.remote_path, self.local_path)
upload_data_exact = uploader.CloudStorageUploader(
self.bucket, self.remote_path, self.local_path)
upload_data_equal = uploader.CloudStorageUploader(
'cloud_storage_bucket',
'config_folder/remote_path',
os.path.abspath(os.path.join('path', 'to', 'dependency')))
self.assertEqual(upload_data, upload_data)
self.assertEqual(upload_data, upload_data_exact)
self.assertEqual(upload_data_exact, upload_data)
self.assertEqual(upload_data, upload_data_equal)
self.assertEqual(upload_data_equal, upload_data)
def testCloudStorageUploaderInequality(self):
new_local_path = os.path.abspath(os.path.join('new', 'local', 'path'))
self.fs.CreateFile(new_local_path)
new_bucket = 'new_bucket'
new_remote_path = 'new_remote/path'
upload_data = uploader.CloudStorageUploader(
self.bucket, self.remote_path, self.local_path)
upload_data_all_different = uploader.CloudStorageUploader(
new_bucket, new_remote_path, new_local_path)
upload_data_different_bucket = uploader.CloudStorageUploader(
new_bucket, self.remote_path, self.local_path)
upload_data_different_remote_path = uploader.CloudStorageUploader(
self.bucket, new_remote_path, self.local_path)
upload_data_different_local_path = uploader.CloudStorageUploader(
self.bucket, self.remote_path, new_local_path)
self.assertNotEqual(upload_data, 'a string!')
self.assertNotEqual(upload_data, 0)
self.assertNotEqual(upload_data, 2354)
self.assertNotEqual(upload_data, None)
self.assertNotEqual(upload_data, upload_data_all_different)
self.assertNotEqual(upload_data_all_different, upload_data)
self.assertNotEqual(upload_data, upload_data_different_bucket)
self.assertNotEqual(upload_data_different_bucket, upload_data)
self.assertNotEqual(upload_data, upload_data_different_remote_path)
self.assertNotEqual(upload_data_different_remote_path, upload_data)
self.assertNotEqual(upload_data, upload_data_different_local_path)
self.assertNotEqual(upload_data_different_local_path, upload_data)
#TODO: write unittests for upload and rollback

View File

@@ -0,0 +1,68 @@
[MESSAGES CONTROL]
# Disable the message, report, category or checker with the given id(s).
# TODO: Shrink this list to as small as possible.
disable=
design,
similarities,
fixme,
locally-disabled,
locally-enabled,
missing-docstring,
no-member,
no-self-use,
protected-access,
star-args,
[REPORTS]
# Don't write out full reports, just messages.
reports=no
[BASIC]
# Regular expression which should only match correct function names.
function-rgx=^(?:(?P<exempt>setUp|tearDown|setUpModule|tearDownModule)|(?P<camel_case>_?[A-Z][a-zA-Z0-9]*))$
# Regular expression which should only match correct method names.
method-rgx=^(?:(?P<exempt>_[a-z0-9_]+__|get|post|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass)|(?P<camel_case>(_{0,2}|test|assert)[A-Z][a-zA-Z0-9_]*))$
# Regular expression which should only match correct argument names.
argument-rgx=^[a-z][a-z0-9_]*$
# Regular expression which should only match correct variable names.
variable-rgx=^[a-z][a-z0-9_]*$
# Good variable names which should always be accepted, separated by a comma.
good-names=main,_
# List of builtins function names that should not be used, separated by a comma.
bad-functions=apply,input,reduce
[VARIABLES]
# Tells wether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching names used for dummy variables (i.e. not used).
dummy-variables-rgx=^\*{0,2}(_$|unused_)
[TYPECHECK]
# Tells wether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
[FORMAT]
# Maximum number of lines in a module.
max-module-lines=2000
# We use two spaces for indents, instead of the usual four spaces or tab.
indent-string=' '