Summary of changes:
* [x] Update dependencies to ConfigSpace >= 1.0.
* [x] Update dependencies to SMAC3 >= 2.2.
* [x] Migrate to new ConfigSpace API.
* [x] Better definition of ConfigSpace fixture in unit tests.
* [x] Fix mypy and pylint issues triggered by the migration.

Notes:
* Side effect: **Works with Python 3.12!**
* ~Supersedes #799~
* ~Depends on upcoming SMAC3 release with [SMAC3
#1124](https://github.com/automl/SMAC3/pull/1124) merged in and a new
release cut with it.~
* Quantization will be added back later:
    * #803 
* ~Tests fail due to LlamaTune issue:~
    * #805 

Closes #727

---------

Co-authored-by: Brian Kroth <bpkroth@users.noreply.github.com>
Co-authored-by: Brian Kroth <bpkroth@microsoft.com>
This commit is contained in:
Sergiy Matusevych 2024-08-02 16:37:41 -07:00 коммит произвёл GitHub
Родитель 1ad725a69b
Коммит a5f36a85f2
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
18 изменённых файлов: 184 добавлений и 145 удалений

Просмотреть файл

@ -16,7 +16,8 @@ dependencies:
- jupyter
- ipykernel
- nb_conda_kernels
- matplotlib
- matplotlib<3.9
- matplotlib-base<3.9
- seaborn
- pandas
- pyarrow

Просмотреть файл

@ -7,10 +7,12 @@ optimizers.
"""
import logging
from typing import Dict, List, Optional, Tuple, Union
from typing import Dict, Hashable, List, Optional, Tuple, Union
from ConfigSpace import (
Beta,
BetaFloatHyperparameter,
BetaIntegerHyperparameter,
CategoricalHyperparameter,
Configuration,
ConfigurationSpace,
@ -18,12 +20,17 @@ from ConfigSpace import (
Float,
Integer,
Normal,
NormalFloatHyperparameter,
NormalIntegerHyperparameter,
Uniform,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
)
from ConfigSpace.types import NotSet
from mlos_bench.tunables.tunable import Tunable, TunableValue
from mlos_bench.tunables.tunable_groups import TunableGroups
from mlos_bench.util import nullable, try_parse_val
from mlos_bench.util import try_parse_val
_LOG = logging.getLogger(__name__)
@ -70,7 +77,9 @@ def _tunable_to_configspace(
cs : ConfigurationSpace
A ConfigurationSpace object that corresponds to the Tunable.
"""
meta = {"group": group_name, "cost": cost} # {"scaling": ""}
meta: Dict[Hashable, TunableValue] = {"cost": cost}
if group_name is not None:
meta["group"] = group_name
if tunable.type == "categorical":
return ConfigurationSpace(
@ -101,12 +110,20 @@ def _tunable_to_configspace(
elif tunable.distribution is not None:
raise TypeError(f"Invalid Distribution Type: {tunable.distribution}")
range_hp: Union[
BetaFloatHyperparameter,
BetaIntegerHyperparameter,
NormalFloatHyperparameter,
NormalIntegerHyperparameter,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
]
if tunable.type == "int":
range_hp = Integer(
name=tunable.name,
bounds=(int(tunable.range[0]), int(tunable.range[1])),
log=bool(tunable.is_log),
q=nullable(int, tunable.quantization),
# TODO: Restore quantization support (#803).
distribution=distribution,
default=(
int(tunable.default)
@ -120,8 +137,8 @@ def _tunable_to_configspace(
name=tunable.name,
bounds=tunable.range,
log=bool(tunable.is_log),
q=tunable.quantization, # type: ignore[arg-type]
distribution=distribution, # type: ignore[arg-type]
# TODO: Restore quantization support (#803).
distribution=distribution,
default=(
float(tunable.default)
if tunable.in_range(tunable.default) and tunable.default is not None
@ -152,7 +169,7 @@ def _tunable_to_configspace(
name=special_name,
choices=tunable.special,
weights=special_weights,
default_value=tunable.default if tunable.default in tunable.special else None,
default_value=tunable.default if tunable.default in tunable.special else NotSet,
meta=meta,
),
type_name: CategoricalHyperparameter(
@ -163,10 +180,10 @@ def _tunable_to_configspace(
),
}
)
conf_space.add_condition(
conf_space.add(
EqualsCondition(conf_space[special_name], conf_space[type_name], TunableValueKind.SPECIAL)
)
conf_space.add_condition(
conf_space.add(
EqualsCondition(conf_space[tunable.name], conf_space[type_name], TunableValueKind.RANGE)
)

Просмотреть файл

@ -9,6 +9,7 @@ from ConfigSpace import (
CategoricalHyperparameter,
ConfigurationSpace,
EqualsCondition,
Integer,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
)
@ -40,45 +41,66 @@ def configuration_space() -> ConfigurationSpace:
special_param_names("kernel_sched_migration_cost_ns")
)
spaces = ConfigurationSpace(
space={
"vmSize": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"],
"idle": ["halt", "mwait", "noidle"],
"kernel_sched_migration_cost_ns": (0, 500000),
kernel_sched_migration_cost_ns_special: [-1, 0],
kernel_sched_migration_cost_ns_type: [
TunableValueKind.SPECIAL,
TunableValueKind.RANGE,
],
"kernel_sched_latency_ns": (0, 1000000000),
}
)
# TODO: Add quantization support tests (#803).
# NOTE: FLAML requires distribution to be uniform
spaces["vmSize"].default_value = "Standard_B4ms"
spaces["idle"].default_value = "halt"
spaces["kernel_sched_migration_cost_ns"].default_value = 250000
spaces[kernel_sched_migration_cost_ns_special].default_value = -1
spaces[kernel_sched_migration_cost_ns_special].probabilities = (0.5, 0.5)
spaces[kernel_sched_migration_cost_ns_type].default_value = TunableValueKind.SPECIAL
spaces[kernel_sched_migration_cost_ns_type].probabilities = (0.5, 0.5)
spaces["kernel_sched_latency_ns"].default_value = 2000000
spaces.add_condition(
spaces = ConfigurationSpace(
{
"vmSize": CategoricalHyperparameter(
name="vmSize",
choices=["Standard_B2s", "Standard_B2ms", "Standard_B4ms"],
default_value="Standard_B4ms",
meta={"group": "provision", "cost": 0},
),
"idle": CategoricalHyperparameter(
name="idle",
choices=["halt", "mwait", "noidle"],
default_value="halt",
meta={"group": "boot", "cost": 0},
),
"kernel_sched_latency_ns": Integer(
name="kernel_sched_latency_ns",
bounds=(0, 1000000000),
log=False,
default=2000000,
meta={"group": "kernel", "cost": 0},
),
"kernel_sched_migration_cost_ns": Integer(
name="kernel_sched_migration_cost_ns",
bounds=(0, 500000),
log=False,
default=250000,
meta={"group": "kernel", "cost": 0},
),
kernel_sched_migration_cost_ns_special: CategoricalHyperparameter(
name=kernel_sched_migration_cost_ns_special,
choices=[-1, 0],
weights=[0.5, 0.5],
default_value=-1,
meta={"group": "kernel", "cost": 0},
),
kernel_sched_migration_cost_ns_type: CategoricalHyperparameter(
name=kernel_sched_migration_cost_ns_type,
choices=[TunableValueKind.SPECIAL, TunableValueKind.RANGE],
weights=[0.5, 0.5],
default_value=TunableValueKind.SPECIAL,
),
}
)
spaces.add(
EqualsCondition(
spaces[kernel_sched_migration_cost_ns_special],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.SPECIAL,
)
)
spaces.add_condition(
spaces.add(
EqualsCondition(
spaces["kernel_sched_migration_cost_ns"],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.RANGE,
)
)
return spaces

Просмотреть файл

@ -23,6 +23,7 @@ from mlos_core.optimizers.bayesian_optimizers.bayesian_optimizer import (
)
from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter
from mlos_core.spaces.adapters.identity_adapter import IdentityAdapter
from mlos_core.util import drop_nulls
class SmacOptimizer(BaseBayesianOptimizer):
@ -350,8 +351,11 @@ class SmacOptimizer(BaseBayesianOptimizer):
warn(f"Not Implemented: Ignoring context {list(context.columns)}", UserWarning)
trial: TrialInfo = self.base_optimizer.ask()
trial.config.is_valid_configuration()
self.optimizer_parameter_space.check_configuration(trial.config)
trial.config.check_valid_configuration()
ConfigSpace.Configuration(
self.optimizer_parameter_space,
values=trial.config,
).check_valid_configuration()
assert trial.config.config_space == self.optimizer_parameter_space
self.trial_info_map[trial.config] = trial
config_df = pd.DataFrame(
@ -441,6 +445,11 @@ class SmacOptimizer(BaseBayesianOptimizer):
List of ConfigSpace configs.
"""
return [
ConfigSpace.Configuration(self.optimizer_parameter_space, values=config.to_dict())
ConfigSpace.Configuration(
self.optimizer_parameter_space,
# Remove None values for inactive parameters
values=drop_nulls(config.to_dict()),
allow_inactive_with_values=False,
)
for (_, config) in configs.astype("O").iterrows()
]

Просмотреть файл

@ -13,7 +13,7 @@ import pandas as pd
from mlos_core.optimizers.optimizer import BaseOptimizer
from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter
from mlos_core.util import normalize_config
from mlos_core.util import drop_nulls, normalize_config
class EvaluatedSample(NamedTuple):
@ -124,13 +124,16 @@ class FlamlOptimizer(BaseOptimizer):
warn(f"Not Implemented: Ignoring metadata {list(metadata.columns)}", UserWarning)
for (_, config), (_, score) in zip(configs.astype("O").iterrows(), scores.iterrows()):
# Remove None values for inactive config parameters
config_dict = drop_nulls(config.to_dict())
cs_config: ConfigSpace.Configuration = ConfigSpace.Configuration(
self.optimizer_parameter_space, values=config.to_dict()
self.optimizer_parameter_space,
values=config_dict,
)
if cs_config in self.evaluated_samples:
warn(f"Configuration {config} was already registered", UserWarning)
self.evaluated_samples[cs_config] = EvaluatedSample(
config=config.to_dict(),
config=config_dict,
score=float(np.average(score.astype(float), weights=self._objective_weights)),
)

Просмотреть файл

@ -4,7 +4,7 @@
#
"""Implementation of LlamaTune space adapter."""
import os
from typing import Dict, Optional
from typing import Dict, List, Optional, Union
from warnings import warn
import ConfigSpace
@ -16,7 +16,7 @@ from ConfigSpace.hyperparameters import NumericalHyperparameter
from sklearn.preprocessing import MinMaxScaler
from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter
from mlos_core.util import normalize_config
from mlos_core.util import drop_nulls, normalize_config
class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-attributes
@ -102,7 +102,7 @@ class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-a
for _, config in configurations.astype("O").iterrows():
configuration = ConfigSpace.Configuration(
self.orig_parameter_space,
values=config.to_dict(),
values=drop_nulls(config.to_dict()),
)
target_config = self._suggested_configs.get(configuration, None)
@ -222,7 +222,10 @@ class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-a
# But the inverse mapping should at least be valid in the target space.
try:
self.target_parameter_space.check_configuration(target_config)
ConfigSpace.Configuration(
self.target_parameter_space,
values=target_config,
).check_valid_configuration()
except ConfigSpace.exceptions.IllegalValueError as e:
raise ValueError(
f"Invalid configuration {target_config} generated by "
@ -249,7 +252,10 @@ class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-a
# Validate that the configuration is in the original space.
try:
self.orig_parameter_space.check_configuration(orig_configuration)
ConfigSpace.Configuration(
self.orig_parameter_space,
values=orig_configuration,
).check_valid_configuration()
except ConfigSpace.exceptions.IllegalValueError as e:
raise ValueError(
f"Invalid configuration {orig_configuration} generated by "
@ -282,6 +288,9 @@ class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-a
"""
# Define target space parameters
q_scaler = None
hyperparameters: List[
Union[ConfigSpace.UniformFloatHyperparameter, ConfigSpace.UniformIntegerHyperparameter]
]
if max_unique_values_per_param is None:
hyperparameters = [
ConfigSpace.UniformFloatHyperparameter(name=f"dim_{idx}", lower=-1, upper=1)
@ -316,7 +325,7 @@ class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-a
config_space = ConfigSpace.ConfigurationSpace(name=self.orig_parameter_space.name)
# use same random state as in original parameter space
config_space.random = self._random_state
config_space.add_hyperparameters(hyperparameters)
config_space.add(hyperparameters)
self._target_config_space = config_space
def _transform(self, configuration: dict) -> dict:
@ -366,7 +375,7 @@ class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-a
if param.name in self._special_param_values_dict:
value = self._special_param_value_scaler(param, value)
orig_value = param._transform(value) # pylint: disable=protected-access
orig_value = param.to_value(value)
orig_value = np.clip(orig_value, param.lower, param.upper)
else:
raise NotImplementedError(
@ -379,7 +388,7 @@ class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-a
def _special_param_value_scaler(
self,
param: ConfigSpace.UniformIntegerHyperparameter,
param: NumericalHyperparameter,
input_value: float,
) -> float:
"""
@ -388,7 +397,7 @@ class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-a
Parameters
----------
param: ConfigSpace.UniformIntegerHyperparameter
param: NumericalHyperparameter
Parameter of the original parameter space.
input_value: float
@ -403,19 +412,13 @@ class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-a
# Check if input value corresponds to some special value
perc_sum = 0.0
ret: float
for special_value, biasing_perc in special_values_list:
perc_sum += biasing_perc
if input_value < perc_sum:
ret = param._inverse_transform(special_value) # pylint: disable=protected-access
return ret
return float(param.to_vector(special_value))
# Scale input value uniformly to non-special values
# pylint: disable=protected-access
ret = param._inverse_transform(
param._transform_scalar((input_value - perc_sum) / (1 - perc_sum))
)
return ret
return float(param.to_vector((input_value - perc_sum) / (1 - perc_sum)))
# pylint: disable=too-complex,too-many-branches
def _validate_special_param_values(self, special_param_values_dict: dict) -> None:

Просмотреть файл

@ -14,9 +14,9 @@ def configuration_space() -> CS.ConfigurationSpace:
# Start defining a ConfigurationSpace for the Optimizer to search.
space = CS.ConfigurationSpace(seed=1234)
# Add a continuous input dimension between 0 and 1.
space.add_hyperparameter(CS.UniformFloatHyperparameter(name="x", lower=0, upper=1))
space.add(CS.UniformFloatHyperparameter(name="x", lower=0, upper=1))
# Add a categorical hyperparameter with 3 possible values.
space.add_hyperparameter(CS.CategoricalHyperparameter(name="y", choices=["a", "b", "c"]))
space.add(CS.CategoricalHyperparameter(name="y", choices=["a", "b", "c"]))
# Add a discrete input dimension between 0 and 10.
space.add_hyperparameter(CS.UniformIntegerHyperparameter(name="z", lower=0, upper=10))
space.add(CS.UniformIntegerHyperparameter(name="z", lower=0, upper=10))
return space

Просмотреть файл

@ -76,8 +76,8 @@ def test_multi_target_opt(
input_space = CS.ConfigurationSpace(seed=SEED)
# add a mix of numeric datatypes
input_space.add_hyperparameter(CS.UniformIntegerHyperparameter(name="x", lower=0, upper=5))
input_space.add_hyperparameter(CS.UniformFloatHyperparameter(name="y", lower=0.0, upper=5.0))
input_space.add(CS.UniformIntegerHyperparameter(name="x", lower=0, upper=5))
input_space.add(CS.UniformFloatHyperparameter(name="y", lower=0.0, upper=5.0))
optimizer = optimizer_class(
parameter_space=input_space,
@ -105,7 +105,7 @@ def test_multi_target_opt(
optimizer.parameter_space, suggestion.astype("O").iloc[0].to_dict()
)
# Raises an error if outside of configuration space
test_configuration.is_valid_configuration()
test_configuration.check_valid_configuration()
# Test registering the suggested configuration with a score.
observation = objective(suggestion)
assert isinstance(observation, pd.DataFrame)

Просмотреть файл

@ -112,7 +112,7 @@ def test_basic_interface_toy_problem(
# check that suggestion is in the space
configuration = CS.Configuration(optimizer.parameter_space, suggestion.iloc[0].to_dict())
# Raises an error if outside of configuration space
configuration.is_valid_configuration()
configuration.check_valid_configuration()
observation = objective(suggestion["x"])
assert isinstance(observation, pd.DataFrame)
optimizer.register(configs=suggestion, scores=observation, metadata=metadata)
@ -234,8 +234,8 @@ def test_optimizer_with_llamatune(optimizer_type: OptimizerType, kwargs: Optiona
input_space = CS.ConfigurationSpace(seed=1234)
# Add two continuous inputs
input_space.add_hyperparameter(CS.UniformFloatHyperparameter(name="x", lower=0, upper=3))
input_space.add_hyperparameter(CS.UniformFloatHyperparameter(name="y", lower=0, upper=3))
input_space.add(CS.UniformFloatHyperparameter(name="x", lower=0, upper=3))
input_space.add(CS.UniformFloatHyperparameter(name="y", lower=0, upper=3))
# Initialize an optimizer that uses LlamaTune space adapter
space_adapter_kwargs = {
@ -387,8 +387,8 @@ def test_mixed_numerics_type_input_space_types(
input_space = CS.ConfigurationSpace(seed=SEED)
# add a mix of numeric datatypes
input_space.add_hyperparameter(CS.UniformIntegerHyperparameter(name="x", lower=0, upper=5))
input_space.add_hyperparameter(CS.UniformFloatHyperparameter(name="y", lower=0.0, upper=5.0))
input_space.add(CS.UniformIntegerHyperparameter(name="x", lower=0, upper=5))
input_space.add(CS.UniformFloatHyperparameter(name="y", lower=0.0, upper=5.0))
if optimizer_type is None:
optimizer = OptimizerFactory.create(
@ -422,7 +422,7 @@ def test_mixed_numerics_type_input_space_types(
optimizer.parameter_space, suggestion.astype("O").iloc[0].to_dict()
)
# Raises an error if outside of configuration space
test_configuration.is_valid_configuration()
test_configuration.check_valid_configuration()
# Test registering the suggested configuration with a score.
observation = objective(suggestion)
assert isinstance(observation, pd.DataFrame)

Просмотреть файл

@ -15,15 +15,9 @@ from mlos_core.spaces.adapters import IdentityAdapter
def test_identity_adapter() -> None:
"""Tests identity adapter."""
input_space = CS.ConfigurationSpace(seed=1234)
input_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(name="int_1", lower=0, upper=100)
)
input_space.add_hyperparameter(
CS.UniformFloatHyperparameter(name="float_1", lower=0, upper=100)
)
input_space.add_hyperparameter(
CS.CategoricalHyperparameter(name="str_1", choices=["on", "off"])
)
input_space.add(CS.UniformIntegerHyperparameter(name="int_1", lower=0, upper=100))
input_space.add(CS.UniformFloatHyperparameter(name="float_1", lower=0, upper=100))
input_space.add(CS.CategoricalHyperparameter(name="str_1", choices=["on", "off"]))
adapter = IdentityAdapter(orig_parameter_space=input_space)

Просмотреть файл

@ -25,15 +25,11 @@ def construct_parameter_space(
input_space = CS.ConfigurationSpace(seed=seed)
for idx in range(n_continuous_params):
input_space.add_hyperparameter(
CS.UniformFloatHyperparameter(name=f"cont_{idx}", lower=0, upper=64)
)
input_space.add(CS.UniformFloatHyperparameter(name=f"cont_{idx}", lower=0, upper=64))
for idx in range(n_integer_params):
input_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(name=f"int_{idx}", lower=-1, upper=256)
)
input_space.add(CS.UniformIntegerHyperparameter(name=f"int_{idx}", lower=-1, upper=256))
for idx in range(n_categorical_params):
input_space.add_hyperparameter(
input_space.add(
CS.CategoricalHyperparameter(
name=f"str_{idx}", choices=[f"option_{idx}" for idx in range(5)]
)
@ -94,7 +90,7 @@ def test_num_low_dims(
# High-dim (i.e., original) config should be valid
orig_config = CS.Configuration(input_space, values=orig_config_df.iloc[0].to_dict())
input_space.check_configuration(orig_config)
orig_config.check_valid_configuration()
# Transform high-dim config back to low-dim
target_config_df = adapter.inverse_transform(orig_config_df)
@ -130,11 +126,11 @@ def test_special_parameter_values_validation() -> None:
dictionary.
"""
input_space = CS.ConfigurationSpace(seed=1234)
input_space.add_hyperparameter(
input_space.add(
CS.CategoricalHyperparameter(name="str", choices=[f"choice_{idx}" for idx in range(5)])
)
input_space.add_hyperparameter(CS.UniformFloatHyperparameter(name="cont", lower=-1, upper=100))
input_space.add_hyperparameter(CS.UniformIntegerHyperparameter(name="int", lower=0, upper=100))
input_space.add(CS.UniformFloatHyperparameter(name="cont", lower=-1, upper=100))
input_space.add(CS.UniformIntegerHyperparameter(name="int", lower=0, upper=100))
# Only UniformIntegerHyperparameters are currently supported
with pytest.raises(NotImplementedError):
@ -224,12 +220,8 @@ def gen_random_configs(adapter: LlamaTuneAdapter, num_configs: int) -> Iterator[
def test_special_parameter_values_biasing() -> None: # pylint: disable=too-complex
"""Tests LlamaTune's special parameter values biasing methodology."""
input_space = CS.ConfigurationSpace(seed=1234)
input_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(name="int_1", lower=0, upper=100)
)
input_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(name="int_2", lower=0, upper=100)
)
input_space.add(CS.UniformIntegerHyperparameter(name="int_1", lower=0, upper=100))
input_space.add(CS.UniformIntegerHyperparameter(name="int_2", lower=0, upper=100))
num_configs = 400
bias_percentage = LlamaTuneAdapter.DEFAULT_SPECIAL_PARAM_VALUE_BIASING_PERCENTAGE
@ -323,22 +315,14 @@ def test_max_unique_values_per_param() -> None:
"""Tests LlamaTune's parameter values discretization implementation."""
# Define config space with a mix of different parameter types
input_space = CS.ConfigurationSpace(seed=1234)
input_space.add_hyperparameter(
input_space.add(
CS.UniformFloatHyperparameter(name="cont_1", lower=0, upper=5),
)
input_space.add_hyperparameter(
CS.UniformFloatHyperparameter(name="cont_2", lower=1, upper=100)
)
input_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(name="int_1", lower=1, upper=10)
)
input_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(name="int_2", lower=0, upper=2048)
)
input_space.add_hyperparameter(
CS.CategoricalHyperparameter(name="str_1", choices=["on", "off"])
)
input_space.add_hyperparameter(
input_space.add(CS.UniformFloatHyperparameter(name="cont_2", lower=1, upper=100))
input_space.add(CS.UniformIntegerHyperparameter(name="int_1", lower=1, upper=10))
input_space.add(CS.UniformIntegerHyperparameter(name="int_2", lower=0, upper=2048))
input_space.add(CS.CategoricalHyperparameter(name="str_1", choices=["on", "off"]))
input_space.add(
CS.CategoricalHyperparameter(name="str_2", choices=[f"choice_{idx}" for idx in range(10)])
)
@ -430,7 +414,7 @@ def test_approx_inverse_mapping(
adapter.target_parameter_space,
values=target_config_df.iloc[0].to_dict(),
)
adapter.target_parameter_space.check_configuration(target_config)
target_config.check_valid_configuration()
# Test inverse transform with 100 random configs
for _ in range(100):
@ -444,7 +428,7 @@ def test_approx_inverse_mapping(
adapter.target_parameter_space,
values=target_config_df.iloc[0].to_dict(),
)
adapter.target_parameter_space.check_configuration(target_config)
target_config.check_valid_configuration()
@pytest.mark.parametrize(
@ -503,7 +487,7 @@ def test_llamatune_pipeline(
orig_config_df = adapter.transform(sampled_config_df)
# High-dim (i.e., original) config should be valid
orig_config = CS.Configuration(input_space, values=orig_config_df.iloc[0].to_dict())
input_space.check_configuration(orig_config)
orig_config.check_valid_configuration()
# Transform high-dim config back to low-dim
target_config_df = adapter.inverse_transform(orig_config_df)

Просмотреть файл

@ -52,9 +52,9 @@ def test_create_space_adapter_with_factory_method(
input_space = CS.ConfigurationSpace(seed=1234)
# Add a single continuous input dimension between 0 and 1.
input_space.add_hyperparameter(CS.UniformFloatHyperparameter(name="x", lower=0, upper=1))
input_space.add(CS.UniformFloatHyperparameter(name="x", lower=0, upper=1))
# Add a single continuous input dimension between 0 and 1.
input_space.add_hyperparameter(CS.UniformFloatHyperparameter(name="y", lower=0, upper=1))
input_space.add(CS.UniformFloatHyperparameter(name="y", lower=0, upper=1))
# Adjust some kwargs for specific space adapters
if space_adapter_type is SpaceAdapterType.LLAMATUNE:

Просмотреть файл

@ -75,7 +75,7 @@ class BaseConversion(metaclass=ABCMeta):
conversion_function: Callable[..., OptimizerSpace] = invalid_conversion_function
@abstractmethod
def sample(self, config_space: OptimizerSpace, n_samples: int = 1) -> OptimizerParam:
def sample(self, config_space: OptimizerSpace, n_samples: int = 1) -> npt.NDArray:
"""
Sample from the given configuration space.
@ -115,14 +115,14 @@ class BaseConversion(metaclass=ABCMeta):
def test_unsupported_hyperparameter(self) -> None:
input_space = CS.ConfigurationSpace()
input_space.add_hyperparameter(NormalIntegerHyperparameter("a", 2, 1))
input_space.add(NormalIntegerHyperparameter("a", mu=50, sigma=5, lower=0, upper=99))
with pytest.raises(ValueError, match="NormalIntegerHyperparameter"):
self.conversion_function(input_space)
def test_continuous_bounds(self) -> None:
input_space = CS.ConfigurationSpace()
input_space.add_hyperparameter(CS.UniformFloatHyperparameter("a", lower=100, upper=200))
input_space.add_hyperparameter(CS.UniformIntegerHyperparameter("b", lower=-10, upper=-5))
input_space.add(CS.UniformFloatHyperparameter("a", lower=100, upper=200))
input_space.add(CS.UniformIntegerHyperparameter("b", lower=-10, upper=-5))
converted_space = self.conversion_function(input_space)
assert self.get_parameter_names(converted_space) == [ # pylint: disable=unreachable
@ -134,9 +134,8 @@ class BaseConversion(metaclass=ABCMeta):
assert -10 <= point[1] <= -5
def test_uniform_samples(self) -> None:
input_space = CS.ConfigurationSpace()
input_space.add_hyperparameter(CS.UniformFloatHyperparameter("a", lower=1, upper=5))
input_space.add_hyperparameter(CS.UniformIntegerHyperparameter("c", lower=1, upper=20))
c = CS.UniformIntegerHyperparameter("c", lower=1, upper=20)
input_space = CS.ConfigurationSpace({"a": (1.0, 5.0), "c": c})
converted_space = self.conversion_function(input_space)
np.random.seed(42) # pylint: disable=unreachable
@ -146,14 +145,14 @@ class BaseConversion(metaclass=ABCMeta):
assert_is_uniform(uniform)
# Check that we get both ends of the sampled range returned to us.
assert input_space["c"].lower in integer_uniform
assert input_space["c"].upper in integer_uniform
assert c.upper in integer_uniform
assert c.lower in integer_uniform
# integer uniform
assert_is_uniform(integer_uniform)
def test_uniform_categorical(self) -> None:
input_space = CS.ConfigurationSpace()
input_space.add_hyperparameter(CS.CategoricalHyperparameter("c", choices=["foo", "bar"]))
input_space.add(CS.CategoricalHyperparameter("c", choices=["foo", "bar"]))
converted_space = self.conversion_function(input_space)
points = self.sample(converted_space, n_samples=100) # pylint: disable=unreachable
counts = self.categorical_counts(points)
@ -199,16 +198,16 @@ class TestFlamlConversion(BaseConversion):
def test_dimensionality(self) -> None:
input_space = CS.ConfigurationSpace()
input_space.add_hyperparameter(CS.UniformIntegerHyperparameter("a", lower=1, upper=10))
input_space.add_hyperparameter(CS.CategoricalHyperparameter("b", choices=["bof", "bum"]))
input_space.add_hyperparameter(CS.CategoricalHyperparameter("c", choices=["foo", "bar"]))
input_space.add(CS.UniformIntegerHyperparameter("a", lower=1, upper=10))
input_space.add(CS.CategoricalHyperparameter("b", choices=["bof", "bum"]))
input_space.add(CS.CategoricalHyperparameter("c", choices=["foo", "bar"]))
output_space = configspace_to_flaml_space(input_space)
assert len(output_space) == 3
def test_weighted_categorical(self) -> None:
np.random.seed(42)
input_space = CS.ConfigurationSpace()
input_space.add_hyperparameter(
input_space.add(
CS.CategoricalHyperparameter("c", choices=["foo", "bar"], weights=[0.9, 0.1])
)
with pytest.raises(ValueError, match="non-uniform"):
@ -219,9 +218,7 @@ class TestFlamlConversion(BaseConversion):
np.random.seed(42)
# integer is supported
input_space = CS.ConfigurationSpace()
input_space.add_hyperparameter(
CS.UniformIntegerHyperparameter("d", lower=1, upper=20, log=True)
)
input_space.add(CS.UniformIntegerHyperparameter("d", lower=1, upper=20, log=True))
converted_space = configspace_to_flaml_space(input_space)
# test log integer sampling
@ -239,9 +236,7 @@ class TestFlamlConversion(BaseConversion):
# continuous is supported
input_space = CS.ConfigurationSpace()
input_space.add_hyperparameter(
CS.UniformFloatHyperparameter("b", lower=1, upper=5, log=True)
)
input_space.add(CS.UniformFloatHyperparameter("b", lower=1, upper=5, log=True))
converted_space = configspace_to_flaml_space(input_space)
# test log integer sampling

Просмотреть файл

@ -27,6 +27,23 @@ def config_to_dataframe(config: Configuration) -> pd.DataFrame:
return pd.DataFrame([dict(config)])
def drop_nulls(d: dict) -> dict:
"""
Remove all key-value pairs where the value is None.
Parameters
----------
d : dict
The dictionary to clean.
Returns
-------
dict
The cleaned dictionary.
"""
return {k: v for k, v in d.items() if v is not None}
def normalize_config(
config_space: ConfigurationSpace,
config: Union[Configuration, dict],

Просмотреть файл

@ -93,7 +93,7 @@
"input_space = CS.ConfigurationSpace(seed=1234)\n",
"\n",
"# Add a single continuous input dimension between 0 and 1.\n",
"input_space.add_hyperparameter(CS.UniformFloatHyperparameter(name='x', lower=0, upper=1))"
"input_space.add(CS.UniformFloatHyperparameter(name='x', lower=0, upper=1))"
]
},
{

Просмотреть файл

@ -69,7 +69,8 @@ def _get_long_desc_from_readme(base_url: str) -> dict:
extra_requires: Dict[str, List[str]] = { # pylint: disable=consider-using-namedtuple-or-dataclass
"flaml": ["flaml[blendsearch]"],
"smac": ["smac>=2.0.0"], # NOTE: Major refactoring on SMAC starting from v2.0.0
# NOTE: Major refactoring on SMAC and ConfigSpace v1.0 starting from v2.2
"smac": ["smac>=2.2.0"],
}
# construct special 'full' extra that adds requirements for all built-in
@ -97,7 +98,7 @@ setup(
'pandas >= 2.2.0;python_version>="3.9"',
'Bottleneck > 1.3.5;python_version>="3.9"',
'pandas >= 1.0.3;python_version<"3.9"',
"ConfigSpace==0.7.1", # Temporarily restrict ConfigSpace version.
"ConfigSpace>=1.0",
],
extras_require=extra_requires,
**_get_long_desc_from_readme("https://github.com/microsoft/MLOS/tree/main/mlos_core"),

Просмотреть файл

@ -30,9 +30,6 @@ fail-on = [
"unused-import",
]
# Ignore pylint complaints about an upstream dependency.
ignored-modules = ["ConfigSpace.hyperparameters"]
# Help inform pylint where to find the project's source code without needing to relyon PYTHONPATH.
#init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc())); from logging import warning; warning(sys.path)"
init-hook = "from logging import warning; warning(sys.path)"

Просмотреть файл

@ -84,10 +84,6 @@ hide_error_codes = False
# We also skip several vendor files that currently throw errors.
exclude = (mlos_(core|bench|viz)/setup.py)|(doc/)|(/build/)|(-packages/_pytest/)
# https://github.com/automl/ConfigSpace/issues/293
[mypy-ConfigSpace.*]
ignore_missing_imports = True
[mypy-fasteners.*]
ignore_missing_imports = True