|
| 1 | +import re |
| 2 | + |
| 3 | +from smashbox.owncloudorg.locking import * |
| 4 | +from smashbox.utilities import * |
| 5 | +import os |
| 6 | +import signal |
| 7 | + |
| 8 | +__doc__ = """ |
| 9 | +
|
| 10 | +Test locking enforcement |
| 11 | ++------+------------------------------------+ |
| 12 | +| Step | User | |
| 13 | ++------+------------------------------------+ |
| 14 | +| 2 | Enable QA testing app | |
| 15 | +| 3 | Create dir/subdir/ | |
| 16 | +| 4 | Populate locks | |
| 17 | +| 5 | Try to upload dir/subdir/file2.dat | |
| 18 | +| 6 | Remove locks | |
| 19 | +| 7 | Upload dir/subdir/file2.dat | |
| 20 | ++------+------------------------------------+ |
| 21 | +
|
| 22 | +""" |
| 23 | + |
| 24 | + |
| 25 | +DIR_NAME = 'dir' |
| 26 | +SUBDIR_NAME = os.path.join(DIR_NAME, 'subdir') |
| 27 | + |
| 28 | +testsets = [ |
| 29 | + { |
| 30 | + 'locks': [ |
| 31 | + { |
| 32 | + 'lock': LockProvider.LOCK_EXCLUSIVE, |
| 33 | + 'path': DIR_NAME |
| 34 | + } |
| 35 | + ], |
| 36 | + 'can_upload': False |
| 37 | + }, |
| 38 | + { |
| 39 | + 'locks': [ |
| 40 | + { |
| 41 | + 'lock': LockProvider.LOCK_SHARED, |
| 42 | + 'path': DIR_NAME |
| 43 | + } |
| 44 | + ], |
| 45 | + 'can_upload': True |
| 46 | + }, |
| 47 | + { |
| 48 | + 'locks': [ |
| 49 | + { |
| 50 | + 'lock': LockProvider.LOCK_EXCLUSIVE, |
| 51 | + 'path': SUBDIR_NAME |
| 52 | + } |
| 53 | + ], |
| 54 | + 'can_upload': False |
| 55 | + }, |
| 56 | + { |
| 57 | + 'locks': [ |
| 58 | + { |
| 59 | + 'lock': LockProvider.LOCK_SHARED, |
| 60 | + 'path': SUBDIR_NAME |
| 61 | + } |
| 62 | + ], |
| 63 | + 'can_upload': True |
| 64 | + }, |
| 65 | + { |
| 66 | + 'locks': [ |
| 67 | + { |
| 68 | + 'lock': LockProvider.LOCK_EXCLUSIVE, |
| 69 | + 'path': DIR_NAME |
| 70 | + }, |
| 71 | + { |
| 72 | + 'lock': LockProvider.LOCK_SHARED, |
| 73 | + 'path': SUBDIR_NAME |
| 74 | + } |
| 75 | + ], |
| 76 | + 'can_upload': False |
| 77 | + }, |
| 78 | + { |
| 79 | + 'locks': [ |
| 80 | + { |
| 81 | + 'lock': LockProvider.LOCK_SHARED, |
| 82 | + 'path': DIR_NAME |
| 83 | + }, |
| 84 | + { |
| 85 | + 'lock': LockProvider.LOCK_EXCLUSIVE, |
| 86 | + 'path': SUBDIR_NAME |
| 87 | + } |
| 88 | + ], |
| 89 | + 'can_upload': False |
| 90 | + }, |
| 91 | + { |
| 92 | + 'locks': [ |
| 93 | + { |
| 94 | + 'lock': LockProvider.LOCK_SHARED, |
| 95 | + 'path': DIR_NAME |
| 96 | + }, |
| 97 | + { |
| 98 | + 'lock': LockProvider.LOCK_SHARED, |
| 99 | + 'path': SUBDIR_NAME |
| 100 | + } |
| 101 | + ], |
| 102 | + 'can_upload': True |
| 103 | + } |
| 104 | +] |
| 105 | + |
| 106 | +use_locks = config.get('locks', testsets[0]['locks']) |
| 107 | +can_upload = config.get('can_upload', testsets[0]['can_upload']) |
| 108 | +original_cmd = config.oc_sync_cmd |
| 109 | + |
| 110 | + |
| 111 | +@add_worker |
| 112 | +def owner_worker(step): |
| 113 | + |
| 114 | + if compare_client_version('2.1.1', '<='): |
| 115 | + # The client has a bug with permissions of folders on the first sync before 2.1.2 |
| 116 | + logger.warning('Skipping test, because the client version is known to behave incorrectly') |
| 117 | + return |
| 118 | + |
| 119 | + if compare_oc_version('9.0', '<='): |
| 120 | + # The server has no fake locking support |
| 121 | + logger.warning('Skipping test, because the server has no fake locking support') |
| 122 | + return |
| 123 | + |
| 124 | + oc_api = get_oc_api() |
| 125 | + oc_api.login(config.oc_admin_user, config.oc_admin_password) |
| 126 | + lock_provider = LockProvider(oc_api) |
| 127 | + lock_provider.enable_testing_app() |
| 128 | + |
| 129 | + if not lock_provider.isUsingDBLocking(): |
| 130 | + logger.warning('Skipping test, because DB Locking is not enabled or lock provisioning is not supported') |
| 131 | + return |
| 132 | + |
| 133 | + step(2, 'Create workdir') |
| 134 | + d = make_workdir() |
| 135 | + |
| 136 | + from owncloud import OCSResponseError |
| 137 | + try: |
| 138 | + lock_provider.unlock() |
| 139 | + except OCSResponseError: |
| 140 | + fatal_check(False, 'Testing App seems to not be enabled') |
| 141 | + |
| 142 | + step(3, 'Create test folder') |
| 143 | + |
| 144 | + mkdir(os.path.join(d, DIR_NAME)) |
| 145 | + mkdir(os.path.join(d, SUBDIR_NAME)) |
| 146 | + createfile(os.path.join(d, DIR_NAME, 'file.dat'), '0', count=1000, bs=1) |
| 147 | + createfile(os.path.join(d, SUBDIR_NAME, 'sub_file.dat'), '0', count=1000, bs=1) |
| 148 | + |
| 149 | + run_ocsync(d) |
| 150 | + |
| 151 | + step(4, 'Lock items') |
| 152 | + |
| 153 | + for lock in use_locks: |
| 154 | + fatal_check( |
| 155 | + lock_provider.is_locked(lock['lock'], config.oc_account_name, lock['path']) == False, |
| 156 | + 'Resource is already locked' |
| 157 | + ) |
| 158 | + |
| 159 | + lock_provider.lock(lock['lock'], config.oc_account_name, lock['path']) |
| 160 | + |
| 161 | + fatal_check( |
| 162 | + lock_provider.is_locked(lock['lock'], config.oc_account_name, lock['path']), |
| 163 | + 'Resource should be locked' |
| 164 | + ) |
| 165 | + |
| 166 | + step(5, 'Try to upload a file in locked item') |
| 167 | + |
| 168 | + createfile(os.path.join(d, SUBDIR_NAME, 'file2.dat'), '0', count=1000, bs=1) |
| 169 | + |
| 170 | + try: |
| 171 | + save_run_ocsync(d, seconds=10, max_sync_retries=1) |
| 172 | + except TimeoutError as err: |
| 173 | + if compare_client_version('2.1.0', '>='): |
| 174 | + # Max retries should terminate in time |
| 175 | + error_check(False, err.message) |
| 176 | + else: |
| 177 | + # Client does not terminate before 2.1: https://github.com/owncloud/client/issues/4037 |
| 178 | + logger.warning(err.message) |
| 179 | + |
| 180 | + if can_upload: |
| 181 | + expect_webdav_exist(os.path.join(SUBDIR_NAME, 'file2.dat')) |
| 182 | + else: |
| 183 | + expect_webdav_does_not_exist(os.path.join(SUBDIR_NAME, 'file2.dat')) |
| 184 | + |
| 185 | + step(6, 'Unlock item and sync again') |
| 186 | + |
| 187 | + for lock in use_locks: |
| 188 | + fatal_check( |
| 189 | + lock_provider.is_locked(lock['lock'], config.oc_account_name, lock['path']), |
| 190 | + 'Resource is already locked' |
| 191 | + ) |
| 192 | + |
| 193 | + lock_provider.unlock(lock['lock'], config.oc_account_name, lock['path']) |
| 194 | + |
| 195 | + fatal_check( |
| 196 | + lock_provider.is_locked(lock['lock'], config.oc_account_name, lock['path']) == False, |
| 197 | + 'Resource should be locked' |
| 198 | + ) |
| 199 | + |
| 200 | + step(7, 'Upload a file in unlocked item') |
| 201 | + |
| 202 | + run_ocsync(d) |
| 203 | + |
| 204 | + expect_webdav_exist(os.path.join(SUBDIR_NAME, 'file2.dat')) |
| 205 | + |
| 206 | + step(8, 'Final - Unlock everything') |
| 207 | + |
| 208 | + lock_provider.unlock() |
| 209 | + lock_provider.disable_testing_app() |
| 210 | + |
| 211 | + |
| 212 | +class TimeoutError(Exception): |
| 213 | + pass |
| 214 | + |
| 215 | + |
| 216 | +def handler(signum, frame): |
| 217 | + config.oc_sync_cmd = original_cmd |
| 218 | + raise TimeoutError('Sync client did not terminate in time') |
| 219 | + |
| 220 | + |
| 221 | +def save_run_ocsync(local_folder, seconds=10, max_sync_retries=1, remote_folder="", n=None, user_num=None): |
| 222 | + """ |
| 223 | + A save variation of run_ocsync, that terminates after n seconds or x retries depending on the client version |
| 224 | +
|
| 225 | + :param local_folder: The local folder to sync |
| 226 | + :param seconds: Number of seconds until the request should be terminated |
| 227 | + :param max_sync_retries: Number of retries for each sync |
| 228 | + :param remote_folder: The remote target folder to sync to |
| 229 | + :param n: Number of syncs |
| 230 | + :param user_num: User number |
| 231 | + """ |
| 232 | + |
| 233 | + if compare_client_version('2.1.0', '>='): |
| 234 | + pattern = re.compile(r' \-\-max\-sync\-retries \d+') |
| 235 | + config.oc_sync_cmd = pattern.sub('', config.oc_sync_cmd) |
| 236 | + config.oc_sync_cmd += ' --max-sync-retries %i' % max_sync_retries |
| 237 | + |
| 238 | + signal.signal(signal.SIGALRM, handler) |
| 239 | + signal.alarm(seconds) |
| 240 | + |
| 241 | + # This run_ocsync() may hang indefinitely |
| 242 | + run_ocsync(local_folder, remote_folder, n, user_num) |
| 243 | + |
| 244 | + signal.alarm(0) |
| 245 | + config.oc_sync_cmd = original_cmd |
0 commit comments