Open
Description
This happens in the very specific case when:
- the recording is reading from a remote nwb file
- we're running sorting using docker
The problem happens because the docker api receives a volume targeting a path name formed with the file url, and doesn't accept it.
It seems to be basically a problem of naming paths when reading from remote files.
Example:
import spikeinterface.full as si
from spikeinterface.extractors import NwbRecordingExtractor
recording = NwbRecordingExtractor(
file_path="https://dandi-api-staging-dandisets.s3.amazonaws.com/blobs/1ed/41e/1ed41e35-8445-4608-b327-b30f74388bea",
electrical_series_name="ElectricalSeriesRaw",
stream_mode="remfile"
)
si.get_default_sorter_params('kilosort2_5')
params_kilosort2_5 = {'do_correction': False}
sorting = si.run_sorter(
'kilosort2_5',
recording,
output_folder='kilosort2.5_output',
docker_image=True,
verbose=True,
**params_kilosort2_5
)
error:
APIError Traceback (most recent call last)
Cell In[5], line 13
10 si.get_default_sorter_params('kilosort2_5')
11 params_kilosort2_5 = {'do_correction': False}
---> 13 sorting = si.run_sorter(
14 'kilosort2_5',
15 recording,
16 output_folder='kilosort2.5_output',
17 docker_image=True,
18 verbose=True,
19 **params_kilosort2_5
20 )
File /mnt/shared_storage/Github/spikeinterface/src/spikeinterface/sorters/runsorter.py:142, in run_sorter(sorter_name, recording, output_folder, remove_existing_folder, delete_output_folder, verbose, raise_error, docker_image, singularity_image, delete_container_files, with_output, **sorter_params)
140 else:
141 container_image = singularity_image
--> 142 return run_sorter_container(
143 container_image=container_image,
144 mode=mode,
145 **common_kwargs,
146 )
148 return run_sorter_local(**common_kwargs)
File /mnt/shared_storage/Github/spikeinterface/src/spikeinterface/sorters/runsorter.py:519, in run_sorter_container(sorter_name, recording, mode, container_image, output_folder, remove_existing_folder, delete_output_folder, verbose, raise_error, with_output, delete_container_files, extra_requirements, **sorter_params)
517 else:
518 si_source_folder = "/sources"
--> 519 container_client = ContainerClient(mode, container_image, volumes, py_user_base_unix, extra_kwargs)
520 if verbose:
521 print("Starting container")
File /mnt/shared_storage/Github/spikeinterface/src/spikeinterface/sorters/runsorter.py:270, in ContainerClient.__init__(self, mode, container_image, volumes, py_user_base, extra_kwargs)
267 print(f"Docker: pulling image {container_image}")
268 client.images.pull(container_image)
--> 270 self.docker_container = client.containers.create(container_image, tty=True, volumes=volumes, **extra_kwargs)
272 elif mode == "singularity":
273 assert self.py_user_base, "py_user_base folder must be set in singularity mode"
File ~/anaconda3/envs/env_dendro/lib/python3.10/site-packages/docker/models/containers.py:932, in ContainerCollection.create(self, image, command, **kwargs)
930 kwargs['version'] = self.client.api._version
931 create_kwargs = _create_container_args(kwargs)
--> 932 resp = self.client.api.create_container(**create_kwargs)
933 return self.get(resp['Id'])
File ~/anaconda3/envs/env_dendro/lib/python3.10/site-packages/docker/api/container.py:439, in ContainerApiMixin.create_container(self, image, command, hostname, user, detach, stdin_open, tty, ports, environment, volumes, network_disabled, name, entrypoint, working_dir, domainname, host_config, mac_address, labels, stop_signal, networking_config, healthcheck, stop_timeout, runtime, use_config_proxy, platform)
427 environment = self._proxy_configs.inject_proxy_environment(
428 environment
429 ) or None
431 config = self.create_container_config(
432 image, command, hostname, user, detach, stdin_open, tty,
433 ports, environment, volumes,
(...)
437 stop_timeout, runtime
438 )
--> 439 return self.create_container_from_config(config, name, platform)
File ~/anaconda3/envs/env_dendro/lib/python3.10/site-packages/docker/api/container.py:456, in ContainerApiMixin.create_container_from_config(self, config, name, platform)
454 params['platform'] = platform
455 res = self._post_json(u, data=config, params=params)
--> 456 return self._result(res, True)
File ~/anaconda3/envs/env_dendro/lib/python3.10/site-packages/docker/api/client.py:271, in APIClient._result(self, response, json, binary)
269 def _result(self, response, json=False, binary=False):
270 assert not (json and binary)
--> 271 self._raise_for_status(response)
273 if json:
274 return response.json()
File ~/anaconda3/envs/env_dendro/lib/python3.10/site-packages/docker/api/client.py:267, in APIClient._raise_for_status(self, response)
265 response.raise_for_status()
266 except requests.exceptions.HTTPError as e:
--> 267 raise create_api_error_from_http_exception(e) from e
File ~/anaconda3/envs/env_dendro/lib/python3.10/site-packages/docker/errors.py:39, in create_api_error_from_http_exception(e)
37 else:
38 cls = NotFound
---> 39 raise cls(e, response=response, explanation=explanation) from e
APIError: 500 Server Error for http+docker://localhost/v1.41/containers/create: Internal Server Error ("invalid volume specification: '/mnt/shared_storage/taufferconsulting/client_catalystneuro/project_jaz/https:/dandi-api-staging-dandisets.s3.amazonaws.com/blobs/1ed/41e:/mnt/shared_storage/taufferconsulting/client_catalystneuro/project_jaz/https:/dandi-api-staging-dandisets.s3.amazonaws.com/blobs/1ed/41e:ro'")