Update sdk/platform-tools to version 26.0.0.
[android_tools.git] / sdk / platform-tools / systrace / catapult / common / py_utils / py_utils / cloud_storage_unittest.py
bloba513b262f40e09e7d7d093868551ff91e8803080
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 import os
6 import shutil
7 import sys
8 import tempfile
9 import unittest
11 import mock
12 from pyfakefs import fake_filesystem_unittest
14 import py_utils
15 from py_utils import cloud_storage
16 from py_utils import lock
18 _CLOUD_STORAGE_GLOBAL_LOCK_PATH = os.path.join(
19 os.path.dirname(__file__), 'cloud_storage_global_lock.py')
21 def _FakeReadHash(_):
22 return 'hashthis!'
25 def _FakeCalulateHashMatchesRead(_):
26 return 'hashthis!'
29 def _FakeCalulateHashNewHash(_):
30 return 'omgnewhash'
33 class CloudStorageFakeFsUnitTest(fake_filesystem_unittest.TestCase):
35 def setUp(self):
36 self.original_environ = os.environ.copy()
37 os.environ['DISABLE_CLOUD_STORAGE_IO'] = ''
38 self.setUpPyfakefs()
39 self.fs.CreateFile(
40 os.path.join(py_utils.GetCatapultDir(),
41 'third_party', 'gsutil', 'gsutil'))
43 def CreateFiles(self, file_paths):
44 for f in file_paths:
45 self.fs.CreateFile(f)
47 def tearDown(self):
48 self.tearDownPyfakefs()
49 os.environ = self.original_environ
51 def _FakeRunCommand(self, cmd):
52 pass
54 def _FakeGet(self, bucket, remote_path, local_path):
55 pass
57 def _AssertRunCommandRaisesError(self, communicate_strs, error):
58 with mock.patch('py_utils.cloud_storage.subprocess.Popen') as popen:
59 p_mock = mock.Mock()
60 popen.return_value = p_mock
61 p_mock.returncode = 1
62 for stderr in communicate_strs:
63 p_mock.communicate.return_value = ('', stderr)
64 self.assertRaises(error, cloud_storage._RunCommand, [])
66 def testRunCommandCredentialsError(self):
67 strs = ['You are attempting to access protected data with no configured',
68 'Failure: No handler was ready to authenticate.']
69 self._AssertRunCommandRaisesError(strs, cloud_storage.CredentialsError)
71 def testRunCommandPermissionError(self):
72 strs = ['status=403', 'status 403', '403 Forbidden']
73 self._AssertRunCommandRaisesError(strs, cloud_storage.PermissionError)
75 def testRunCommandNotFoundError(self):
76 strs = ['InvalidUriError', 'No such object', 'No URLs matched',
77 'One or more URLs matched no', 'InvalidUriError']
78 self._AssertRunCommandRaisesError(strs, cloud_storage.NotFoundError)
80 def testRunCommandServerError(self):
81 strs = ['500 Internal Server Error']
82 self._AssertRunCommandRaisesError(strs, cloud_storage.ServerError)
84 def testRunCommandGenericError(self):
85 strs = ['Random string']
86 self._AssertRunCommandRaisesError(strs, cloud_storage.CloudStorageError)
88 def testInsertCreatesValidCloudUrl(self):
89 orig_run_command = cloud_storage._RunCommand
90 try:
91 cloud_storage._RunCommand = self._FakeRunCommand
92 remote_path = 'test-remote-path.html'
93 local_path = 'test-local-path.html'
94 cloud_url = cloud_storage.Insert(cloud_storage.PUBLIC_BUCKET,
95 remote_path, local_path)
96 self.assertEqual('https://console.developers.google.com/m/cloudstorage'
97 '/b/chromium-telemetry/o/test-remote-path.html',
98 cloud_url)
99 finally:
100 cloud_storage._RunCommand = orig_run_command
102 @mock.patch('py_utils.cloud_storage.subprocess')
103 def testExistsReturnsFalse(self, subprocess_mock):
104 p_mock = mock.Mock()
105 subprocess_mock.Popen.return_value = p_mock
106 p_mock.communicate.return_value = (
108 'CommandException: One or more URLs matched no objects.\n')
109 p_mock.returncode_result = 1
110 self.assertFalse(cloud_storage.Exists('fake bucket',
111 'fake remote path'))
113 @mock.patch('py_utils.cloud_storage.CalculateHash')
114 @mock.patch('py_utils.cloud_storage._GetLocked')
115 @mock.patch('py_utils.cloud_storage._FileLock')
116 @mock.patch('py_utils.cloud_storage.os.path')
117 def testGetIfHashChanged(self, path_mock, unused_lock_mock, get_mock,
118 calc_hash_mock):
119 path_mock.exists.side_effect = [False, True, True]
120 calc_hash_mock.return_value = 'hash'
122 # The file at |local_path| doesn't exist. We should download file from cs.
123 ret = cloud_storage.GetIfHashChanged(
124 'remote_path', 'local_path', 'cs_bucket', 'hash')
125 self.assertTrue(ret)
126 get_mock.assert_called_once_with('cs_bucket', 'remote_path', 'local_path')
127 get_mock.reset_mock()
128 self.assertFalse(calc_hash_mock.call_args)
129 calc_hash_mock.reset_mock()
131 # A local file exists at |local_path| but has the wrong hash.
132 # We should download file from cs.
133 ret = cloud_storage.GetIfHashChanged(
134 'remote_path', 'local_path', 'cs_bucket', 'new_hash')
135 self.assertTrue(ret)
136 get_mock.assert_called_once_with('cs_bucket', 'remote_path', 'local_path')
137 get_mock.reset_mock()
138 calc_hash_mock.assert_called_once_with('local_path')
139 calc_hash_mock.reset_mock()
141 # Downloaded file exists locally and has the right hash. Don't download.
142 ret = cloud_storage.GetIfHashChanged(
143 'remote_path', 'local_path', 'cs_bucket', 'hash')
144 self.assertFalse(get_mock.call_args)
145 self.assertFalse(ret)
146 calc_hash_mock.reset_mock()
147 get_mock.reset_mock()
149 @mock.patch('py_utils.cloud_storage._FileLock')
150 def testGetIfChanged(self, unused_lock_mock):
151 orig_get = cloud_storage._GetLocked
152 orig_read_hash = cloud_storage.ReadHash
153 orig_calculate_hash = cloud_storage.CalculateHash
154 cloud_storage.ReadHash = _FakeReadHash
155 cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
156 file_path = 'test-file-path.wpr'
157 hash_path = file_path + '.sha1'
158 try:
159 cloud_storage._GetLocked = self._FakeGet
160 # hash_path doesn't exist.
161 self.assertFalse(cloud_storage.GetIfChanged(file_path,
162 cloud_storage.PUBLIC_BUCKET))
163 # hash_path exists, but file_path doesn't.
164 self.CreateFiles([hash_path])
165 self.assertTrue(cloud_storage.GetIfChanged(file_path,
166 cloud_storage.PUBLIC_BUCKET))
167 # hash_path and file_path exist, and have same hash.
168 self.CreateFiles([file_path])
169 self.assertFalse(cloud_storage.GetIfChanged(file_path,
170 cloud_storage.PUBLIC_BUCKET))
171 # hash_path and file_path exist, and have different hashes.
172 cloud_storage.CalculateHash = _FakeCalulateHashNewHash
173 self.assertTrue(cloud_storage.GetIfChanged(file_path,
174 cloud_storage.PUBLIC_BUCKET))
175 finally:
176 cloud_storage._GetLocked = orig_get
177 cloud_storage.CalculateHash = orig_calculate_hash
178 cloud_storage.ReadHash = orig_read_hash
180 @unittest.skipIf(sys.platform.startswith('win'),
181 'https://github.com/catapult-project/catapult/issues/1861')
182 def testGetFilesInDirectoryIfChanged(self):
183 self.CreateFiles([
184 'real_dir_path/dir1/1file1.sha1',
185 'real_dir_path/dir1/1file2.txt',
186 'real_dir_path/dir1/1file3.sha1',
187 'real_dir_path/dir2/2file.txt',
188 'real_dir_path/dir3/3file1.sha1'])
190 def IncrementFilesUpdated(*_):
191 IncrementFilesUpdated.files_updated += 1
192 IncrementFilesUpdated.files_updated = 0
193 orig_get_if_changed = cloud_storage.GetIfChanged
194 cloud_storage.GetIfChanged = IncrementFilesUpdated
195 try:
196 self.assertRaises(ValueError, cloud_storage.GetFilesInDirectoryIfChanged,
197 os.path.abspath(os.sep), cloud_storage.PUBLIC_BUCKET)
198 self.assertEqual(0, IncrementFilesUpdated.files_updated)
199 self.assertRaises(ValueError, cloud_storage.GetFilesInDirectoryIfChanged,
200 'fake_dir_path', cloud_storage.PUBLIC_BUCKET)
201 self.assertEqual(0, IncrementFilesUpdated.files_updated)
202 cloud_storage.GetFilesInDirectoryIfChanged('real_dir_path',
203 cloud_storage.PUBLIC_BUCKET)
204 self.assertEqual(3, IncrementFilesUpdated.files_updated)
205 finally:
206 cloud_storage.GetIfChanged = orig_get_if_changed
208 def testCopy(self):
209 orig_run_command = cloud_storage._RunCommand
211 def AssertCorrectRunCommandArgs(args):
212 self.assertEqual(expected_args, args)
213 cloud_storage._RunCommand = AssertCorrectRunCommandArgs
214 expected_args = ['cp', 'gs://bucket1/remote_path1',
215 'gs://bucket2/remote_path2']
216 try:
217 cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2')
218 finally:
219 cloud_storage._RunCommand = orig_run_command
222 @mock.patch('py_utils.cloud_storage._FileLock')
223 def testDisableCloudStorageIo(self, unused_lock_mock):
224 os.environ['DISABLE_CLOUD_STORAGE_IO'] = '1'
225 dir_path = 'real_dir_path'
226 self.fs.CreateDirectory(dir_path)
227 file_path = os.path.join(dir_path, 'file1')
228 file_path_sha = file_path + '.sha1'
229 self.CreateFiles([file_path, file_path_sha])
230 with open(file_path_sha, 'w') as f:
231 f.write('hash1234')
232 with self.assertRaises(cloud_storage.CloudStorageIODisabled):
233 cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2')
234 with self.assertRaises(cloud_storage.CloudStorageIODisabled):
235 cloud_storage.Get('bucket', 'foo', file_path)
236 with self.assertRaises(cloud_storage.CloudStorageIODisabled):
237 cloud_storage.GetIfChanged(file_path, 'foo')
238 with self.assertRaises(cloud_storage.CloudStorageIODisabled):
239 cloud_storage.GetIfHashChanged('bar', file_path, 'bucket', 'hash1234')
240 with self.assertRaises(cloud_storage.CloudStorageIODisabled):
241 cloud_storage.Insert('bucket', 'foo', file_path)
242 with self.assertRaises(cloud_storage.CloudStorageIODisabled):
243 cloud_storage.GetFilesInDirectoryIfChanged(dir_path, 'bucket')
246 class CloudStorageRealFsUnitTest(unittest.TestCase):
248 def setUp(self):
249 self.original_environ = os.environ.copy()
250 os.environ['DISABLE_CLOUD_STORAGE_IO'] = ''
252 def tearDown(self):
253 os.environ = self.original_environ
255 @mock.patch('py_utils.cloud_storage.LOCK_ACQUISITION_TIMEOUT', .005)
256 def testGetPseudoLockUnavailableCausesTimeout(self):
257 with tempfile.NamedTemporaryFile(suffix='.pseudo_lock') as pseudo_lock_fd:
258 with lock.FileLock(pseudo_lock_fd, lock.LOCK_EX | lock.LOCK_NB):
259 with self.assertRaises(py_utils.TimeoutException):
260 file_path = pseudo_lock_fd.name.replace('.pseudo_lock', '')
261 cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)
263 @mock.patch('py_utils.cloud_storage.LOCK_ACQUISITION_TIMEOUT', .005)
264 def testGetGlobalLockUnavailableCausesTimeout(self):
265 with open(_CLOUD_STORAGE_GLOBAL_LOCK_PATH) as global_lock_fd:
266 with lock.FileLock(global_lock_fd, lock.LOCK_EX | lock.LOCK_NB):
267 tmp_dir = tempfile.mkdtemp()
268 try:
269 file_path = os.path.join(tmp_dir, 'foo')
270 with self.assertRaises(py_utils.TimeoutException):
271 cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)
272 finally:
273 shutil.rmtree(tmp_dir)
276 class CloudStorageErrorHandlingTest(unittest.TestCase):
277 def runTest(self):
278 self.assertIsInstance(cloud_storage.GetErrorObjectForCloudStorageStderr(
279 'ServiceException: 401 Anonymous users does not have '
280 'storage.objects.get access to object chrome-partner-telemetry'),
281 cloud_storage.CredentialsError)
282 self.assertIsInstance(cloud_storage.GetErrorObjectForCloudStorageStderr(
283 '403 Caller does not have storage.objects.list access to bucket '
284 'chrome-telemetry'), cloud_storage.PermissionError)