-
Notifications
You must be signed in to change notification settings - Fork 299
[ADD] Forkserver as default multiprocessing strategy #223
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
9b60cd0
f43b0f2
8fefbbf
c6e3872
6355e47
7d0d17d
f94f4a2
e634adf
900f05c
3276529
138ac76
db73585
a83864e
1e86cd1
44d79c8
090dc55
8b78fe2
f9899d9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,106 @@ | ||
import os | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The series of changes you made should be achieved by writing the following in each
If there are any intentions behind the changes, let me know the reasons as a response here. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I wrote this in the PR description, but let me state it here again. The problem is that forkserver, when communicating the pre-loaded modules from the parent process to the forked server client (the entity that spawns new process) seems to require the Defining the choice in the init is not sufficient, as the problem is how python organized the loading of the modules. Yes, maybe we could do some intelligent sourcing the choices via relative paths, but the criteria that I follow was to be similar to auto-sklearn so that we share both the same code (and solution to problems). There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thanks for the response and sorry for the late reply.
It is not a good practice to write many codes in
Could you give me the error you got when you run? |
||
from collections import OrderedDict | ||
from typing import Any, Dict, List, Optional | ||
|
||
import ConfigSpace.hyperparameters as CSH | ||
from ConfigSpace.configuration_space import ConfigurationSpace | ||
|
||
from autoPyTorch.pipeline.components.base_choice import autoPyTorchChoice | ||
from autoPyTorch.pipeline.components.base_component import ( | ||
ThirdPartyComponents, | ||
autoPyTorchComponent, | ||
find_components, | ||
) | ||
from autoPyTorch.pipeline.components.preprocessing.image_preprocessing.normalise.base_normalizer import BaseNormalizer | ||
|
||
|
||
normalise_directory = os.path.split(__file__)[0] | ||
_normalizers = find_components(__package__, | ||
normalise_directory, | ||
BaseNormalizer) | ||
|
||
_addons = ThirdPartyComponents(BaseNormalizer) | ||
|
||
|
||
def add_normalizer(normalizer: BaseNormalizer) -> None: | ||
_addons.add_component(normalizer) | ||
|
||
|
||
class NormalizerChoice(autoPyTorchChoice): | ||
""" | ||
Allows for dynamically choosing normalizer component at runtime | ||
""" | ||
|
||
def get_components(self) -> Dict[str, autoPyTorchComponent]: | ||
"""Returns the available normalizer components | ||
|
||
Args: | ||
None | ||
|
||
Returns: | ||
Dict[str, autoPyTorchComponent]: all BaseNormalizer components available | ||
as choices for encoding the categorical columns | ||
""" | ||
components = OrderedDict() | ||
components.update(_normalizers) | ||
components.update(_addons.components) | ||
return components | ||
|
||
def get_hyperparameter_search_space(self, | ||
dataset_properties: Optional[Dict[str, Any]] = None, | ||
default: Optional[str] = None, | ||
include: Optional[List[str]] = None, | ||
exclude: Optional[List[str]] = None) -> ConfigurationSpace: | ||
cs = ConfigurationSpace() | ||
|
||
if dataset_properties is None: | ||
dataset_properties = dict() | ||
|
||
dataset_properties = {**self.dataset_properties, **dataset_properties} | ||
|
||
available_preprocessors = self.get_available_components(dataset_properties=dataset_properties, | ||
include=include, | ||
exclude=exclude) | ||
|
||
if len(available_preprocessors) == 0: | ||
raise ValueError("no image normalizers found, please add an image normalizer") | ||
|
||
if default is None: | ||
defaults = ['ImageNormalizer', 'NoNormalizer'] | ||
for default_ in defaults: | ||
if default_ in available_preprocessors: | ||
if include is not None and default_ not in include: | ||
continue | ||
if exclude is not None and default_ in exclude: | ||
continue | ||
default = default_ | ||
break | ||
|
||
updates = self._get_search_space_updates() | ||
if '__choice__' in updates.keys(): | ||
choice_hyperparameter = updates['__choice__'] | ||
if not set(choice_hyperparameter.value_range).issubset(available_preprocessors): | ||
raise ValueError("Expected given update for {} to have " | ||
"choices in {} got {}".format(self.__class__.__name__, | ||
available_preprocessors, | ||
choice_hyperparameter.value_range)) | ||
preprocessor = CSH.CategoricalHyperparameter('__choice__', | ||
choice_hyperparameter.value_range, | ||
default_value=choice_hyperparameter.default_value) | ||
else: | ||
preprocessor = CSH.CategoricalHyperparameter('__choice__', | ||
list(available_preprocessors.keys()), | ||
default_value=default) | ||
cs.add_hyperparameter(preprocessor) | ||
|
||
# add only child hyperparameters of preprocessor choices | ||
for name in preprocessor.choices: | ||
preprocessor_configuration_space = available_preprocessors[name].\ | ||
get_hyperparameter_search_space(dataset_properties) | ||
parent_hyperparameter = {'parent': preprocessor, 'value': name} | ||
cs.add_configuration_space(name, preprocessor_configuration_space, | ||
parent_hyperparameter=parent_hyperparameter) | ||
|
||
self.configuration_space = cs | ||
self.dataset_properties = dataset_properties | ||
return cs |
Uh oh!
There was an error while loading. Please reload this page.