diff --git a/README.md b/README.md index beae6c8..51d235a 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,6 @@ modules: type: evonet dim: [4, 6, 2] activation: [linear, tanh, tanh] - initializer: normal_evonet mutation: strategy: constant probability: 1.0 diff --git a/docs/config_guide.md b/docs/config_guide.md index 12feb1b..e2e2444 100644 --- a/docs/config_guide.md +++ b/docs/config_guide.md @@ -145,7 +145,6 @@ modules: type: evonet dim: [2, 0, 0, 1] # hidden layers start empty activation: [linear, tanh, tanh, sigmoid] - initializer: normal_evonet weights: initializer: normal std: 0.5 @@ -161,7 +160,7 @@ modules: # If `recurrent` is not enabled, this block has no effect. delay: initializer: uniform # uniform | fixed - bounds: [1, 8] # only for random + bounds: [1, 8] # value: 3 # only for fixed mutation: diff --git a/docs/config_parameter.md b/docs/config_parameter.md index e862e4a..64af800 100644 --- a/docs/config_parameter.md +++ b/docs/config_parameter.md @@ -288,7 +288,7 @@ modules: |-------------------|---------------------|---------|-------------| | `dim` | list[int] | — | Layer sizes, e.g. `[4, 0, 0, 2]`. Hidden layers can start empty (0) and grow through structural mutation. | | `activation` | str \| list[str] | — | If list: activation per layer. If str: used for non-input layers; input layer is treated as linear. | -| `initializer` | str | — | Network initialization method (e.g. `normal_evonet`, `unconnected_evonet`). | +| `initializer` | str | default_evonet | Topology preset (e.g. `default_evonet`, `unconnected_evonet`, `identity_evonet`). Parameter initialization is configured via `weights`, `bias`, and `delay`. | | `weights` | dict | — | Weight init and bounds configuration (initializer, bounds, optional params). | | `bias` | dict | — | Bias init and bounds configuration (initializer, bounds, optional params). | | `neuron_dynamics` | list[dict] \| null | null | Optional per-layer neuron dynamics specification. Must match `len(dim)`. | @@ -299,30 +299,34 @@ modules: |Parameter | Type | Default | Explanation | |------------|-------|-----------|-------------| |initializer | str | "normal" | Weight initializer preset (normal, uniform, zero, …). | -|bounds | list[float] | [-1.0, 1.0] | Hard clipping bounds [min_w, max_w]. | +|bounds | list[float] | [-0.5, 0.5] | Hard clipping bounds. | |std | float | null |null | Std-dev for normal (if used). | ##### bias block |Parameter | Type | Default | Explanation | |------------|-------|-----------|-------------| -|initializer | str | "normal" | Weight initializer preset (normal, uniform, zero, …). | -|bounds | list[float] | [-1.0, 1.0] | Hard clipping bounds [min_w, max_w]. | -|std | float | null |null | Std-dev for normal (if used). | +|initializer | str | "normal" | Bias initializer preset (normal, uniform, zero, …). | +|bounds | list[float] | [-1.0, 1.0] | Hard clipping bounds. | +|std | float | null | Std-dev for normal (if used). | --- -#### EvoNet Initializer +#### EvoNet Initializer (Topology Presets) + +`initializer` selects a **topology preset**. -The EvoNet module uses: +Parameter initialization is configured via: +- `weights` +- `bias` +- `delay` (recurrent connections only) -| Initializer | Weights | Biases | Notes | -|---------------------|------------------------------------|----------------------------------|---------------------------------------------| -| `normal_evonet` | Normal(0, 0.5) | Normal(0, 0.5) | Default initializer for general use | -| `unconnected_evonet`| None | 0 | For pure structural growth; empty topology | -| `random_evonet` | Random | Uniform(bias bounds) | For broader stochastic exploration | -| `zero_evonet` | 0 | 0 | Deterministic baseline; debugging | -| `identity_evonet` | Small random | Small random | Designed for stable recurrent memory | +Allowed presets: +| Initializer | Meaning (topology only) | +|------------------------|-------------------------| +| `default_evonet` | Standard EvoNet topology preset (uses `connection_scope`, `connection_density`, and `recurrent`). | +| `unconnected_evonet` | Creates neurons/layers but starts with **no connections** (use structural mutation to grow). | +| `identity_evonet` | Special preset intended for stable recurrent memory (may override parameters internally; see notes below). | --- @@ -348,7 +352,6 @@ modules: type: evonet dim: [1, 16, 1] activation: [linear, tanh, sigmoid] - initializer: normal_evonet neuron_dynamics: - name: standard params: {} @@ -378,7 +381,7 @@ modules: ... delay: initializer: uniform # uniform | fixed - bounds: [1, 8] # only for random + bounds: [1, 8] # value: 3 # only for fixed ``` diff --git a/evolib/config/evonet_component_config.py b/evolib/config/evonet_component_config.py index 9b6b32a..75891d3 100644 --- a/evolib/config/evonet_component_config.py +++ b/evolib/config/evonet_component_config.py @@ -189,7 +189,6 @@ class EvoNetComponentConfig(BaseModel): type: evonet dim: [4, 6, 2] # input, hidden, output activation: [linear, relu, sigmoid] # single activation or list per layer - initializer: normal_evonet # weight/bias initializer weights: initializer: normal @@ -228,7 +227,9 @@ class EvoNetComponentConfig(BaseModel): recurrent: Optional[Literal["none", "direct", "local", "all"]] = "none" # Name of the initializer function (resolved via initializer registry) - initializer: str = Field(..., description="Name of the initializer to use") + initializer: str = Field( + default="default_evonet", description="Name of the initializer to use" + ) # Connection topology for initialization connection_scope: Literal["adjacent", "crosslayer"] = Field( @@ -266,6 +267,29 @@ class EvoNetComponentConfig(BaseModel): structural: Optional[StructuralMutationConfig] = None # Validators + @field_validator("initializer") + @classmethod + def validate_initializer(cls, name: str) -> str: + """ + Validate that the initializer is one of the allowed topology presets. + + Parameter initialization is handled exclusively via weights/bias/delay blocks. + """ + allowed = { + "default_evonet", + "unconnected_evonet", + "identity_evonet", + } + + if name not in allowed: + raise ValueError( + f"Unknown EvoNet initializer '{name}'. " + f"Allowed values: {sorted(allowed)}. " + "Parameter initialization is configured via " + "'weights', 'bias', and 'delay'." + ) + + return name @field_validator("neuron_dynamics") @classmethod diff --git a/evolib/initializers/evonet_initializers.py b/evolib/initializers/evonet_initializers.py index 051e44d..b435544 100644 --- a/evolib/initializers/evonet_initializers.py +++ b/evolib/initializers/evonet_initializers.py @@ -173,28 +173,7 @@ def _build_architecture( ) -def initializer_unconnected_evonet(config: FullConfig, module: str) -> EvoNet: - """ - Initializes an EvoNet without connections. - - Args: - config (FullConfig): Full experiment configuration - module (str): Module name (e.g. "brain") - - Returns: - EvoNet: Initialized EvoNet representation - """ - para = EvoNet() - cfg = config.modules[module].model_copy(deep=True) - para.apply_config(cfg) - - _build_architecture(para, cfg, connection_init="none") - _apply_bias_init(para, cfg) - - return para - - -def initializer_normal_evonet(config: FullConfig, module: str) -> EvoNet: +def initializer_default_evonet(config: FullConfig, module: str) -> EvoNet: """ Build a standard EvoNet architecture and initialize parameters according to the explicit configuration blocks. @@ -203,9 +182,6 @@ def initializer_normal_evonet(config: FullConfig, module: str) -> EvoNet: - Weights are initialized using `cfg.weights`. - Biases are initialized using `cfg.bias`. - Delay (if configured) is initialized using `cfg.delay`. - - No implicit parameter initialization is performed here. - All parameter distributions are controlled explicitly via the config. """ para = EvoNet() @@ -219,36 +195,23 @@ def initializer_normal_evonet(config: FullConfig, module: str) -> EvoNet: return para -def initializer_random_evonet(config: FullConfig, module: str) -> EvoNet: - """ - Backward-compatible alias for the standard EvoNet initializer. Will be removed. - - Parameter initialization is controlled by `cfg.weights`, `cfg.bias`, - and `cfg.delay`. - """ - return initializer_normal_evonet(config, module) - - -def initializer_zero_evonet(config: FullConfig, module: str) -> EvoNet: +def initializer_unconnected_evonet(config: FullConfig, module: str) -> EvoNet: """ - Build a standard EvoNet architecture and initialize all parameters to zero. + Initializes an EvoNet without connections. - - All connection weights are set to 0. - - All biases are set to 0. - - Delay initialization follows `cfg.delay` if applicable. + Args: + config (FullConfig): Full experiment configuration + module (str): Module name (e.g. "brain") - This initializer ignores `cfg.weights` and `cfg.bias` distributions. + Returns: + EvoNet: Initialized EvoNet representation """ - para = EvoNet() cfg = config.modules[module].model_copy(deep=True) para.apply_config(cfg) - _build_architecture(para, cfg, connection_init="zero") - _apply_delay_init(para, cfg) - - para.net.set_weights(np.zeros(para.net.num_weights)) - para.net.set_biases(np.zeros(para.net.num_biases)) + _build_architecture(para, cfg, connection_init="none") + _apply_bias_init(para, cfg) return para diff --git a/evolib/initializers/registry.py b/evolib/initializers/registry.py index 77d3281..f7e5765 100644 --- a/evolib/initializers/registry.py +++ b/evolib/initializers/registry.py @@ -17,11 +17,9 @@ # EvoNet-based initializer from evolib.initializers.evonet_initializers import ( + initializer_default_evonet, initializer_identity_evonet, - initializer_normal_evonet, - initializer_random_evonet, initializer_unconnected_evonet, - initializer_zero_evonet, ) # NetVector-based initializer @@ -50,9 +48,7 @@ "fixed_vector": initializer_fixed_vector, "adaptive_vector": initializer_adaptive_vector, "normal_net": initializer_normal_net, - "normal_evonet": initializer_normal_evonet, - "random_evonet": initializer_random_evonet, - "zero_evonet": initializer_zero_evonet, + "default_evonet": initializer_default_evonet, "identity_evonet": initializer_identity_evonet, "unconnected_evonet": initializer_unconnected_evonet, } diff --git a/examples/07_evonet/configs/01_sine_approximation.yaml b/examples/07_evonet/configs/01_sine_approximation.yaml index 4a2bac3..2cfa103 100644 --- a/examples/07_evonet/configs/01_sine_approximation.yaml +++ b/examples/07_evonet/configs/01_sine_approximation.yaml @@ -14,7 +14,6 @@ modules: type: evonet dim: [1, 6, 6, 1] activation: ["tanh", "tanh", "tanh", "tanh"] - initializer: normal_evonet weights: initializer: normal diff --git a/examples/07_evonet/configs/02_sine_delay.yaml b/examples/07_evonet/configs/02_sine_delay.yaml index 03934fa..1bbf466 100644 --- a/examples/07_evonet/configs/02_sine_delay.yaml +++ b/examples/07_evonet/configs/02_sine_delay.yaml @@ -16,7 +16,6 @@ modules: type: evonet dim: [1, 1, 1] activation: ["linear", "linear", "linear"] - initializer: normal_evonet weights: initializer: normal diff --git a/examples/07_evonet/configs/03_delay_bitseq_echo.yaml b/examples/07_evonet/configs/03_delay_bitseq_echo.yaml index 025d4bd..d9820b5 100644 --- a/examples/07_evonet/configs/03_delay_bitseq_echo.yaml +++ b/examples/07_evonet/configs/03_delay_bitseq_echo.yaml @@ -16,7 +16,6 @@ modules: type: evonet dim: [1, 3, 1] activation: ["linear", "tanh", "sigmoid"] - initializer: normal_evonet weights: initializer: normal diff --git a/examples/07_evonet/configs/04_temporal_smoothing_leaky.yaml b/examples/07_evonet/configs/04_temporal_smoothing_leaky.yaml index 939e4e3..bcb8b74 100644 --- a/examples/07_evonet/configs/04_temporal_smoothing_leaky.yaml +++ b/examples/07_evonet/configs/04_temporal_smoothing_leaky.yaml @@ -14,7 +14,6 @@ modules: type: evonet dim: [1, 6, 1] activation: [linear, tanh, sigmoid] - initializer: random_evonet weights: initializer: uniform diff --git a/examples/07_evonet/configs/04_temporal_smoothing_standard.yaml b/examples/07_evonet/configs/04_temporal_smoothing_standard.yaml index bc7c1bd..4461fb2 100644 --- a/examples/07_evonet/configs/04_temporal_smoothing_standard.yaml +++ b/examples/07_evonet/configs/04_temporal_smoothing_standard.yaml @@ -14,7 +14,6 @@ modules: type: evonet dim: [1, 6, 1] activation: [linear, tanh, sigmoid] - initializer: random_evonet weights: initializer: uniform diff --git a/examples/07_evonet/configs/05_image_approximation.yaml b/examples/07_evonet/configs/05_image_approximation.yaml index 9c50ff7..a4efa8e 100644 --- a/examples/07_evonet/configs/05_image_approximation.yaml +++ b/examples/07_evonet/configs/05_image_approximation.yaml @@ -17,7 +17,6 @@ modules: type: evonet dim: [2, 16, 8, 1] activation: ["linear", "tanh", "tanh", "sigmoid"] - initializer: normal_evonet weights: initializer: normal diff --git a/examples/07_evonet/configs/07_recurrent_bit_prediction.yaml b/examples/07_evonet/configs/07_recurrent_bit_prediction.yaml index c20ee5c..a2060d0 100644 --- a/examples/07_evonet/configs/07_recurrent_bit_prediction.yaml +++ b/examples/07_evonet/configs/07_recurrent_bit_prediction.yaml @@ -17,7 +17,6 @@ modules: dim: [1, 8, 1] activation: [linear, tanh, sigmoid] recurrent: "local" - initializer: zero_evonet weights: initializer: zero diff --git a/examples/08_gym/configs/05_bipedal_walker.yaml b/examples/08_gym/configs/05_bipedal_walker.yaml index 408731e..8dbc069 100644 --- a/examples/08_gym/configs/05_bipedal_walker.yaml +++ b/examples/08_gym/configs/05_bipedal_walker.yaml @@ -3,6 +3,10 @@ offspring_pool_size: 250 max_generations: 200 num_elites: 5 +parallel: + backend: ray + address: ray://10.17.5.10:10001 + evolution: strategy: mu_plus_lambda @@ -13,16 +17,15 @@ modules: dim: [24, 8, 4] # 24 inputs (state), 4 outputs (joint torques) activation: [linear, tanh, linear] recurrent: direct - initializer: normal_evonet weights: initializer: normal - std: 0.2 + std: 0.5 bounds: [-1.0, 1.0] bias: initializer: normal - std: 0.1 + std: 0.5 bounds: [-0.5, 0.5] mutation: diff --git a/tests/operators/test_structural_mutation.py b/tests/operators/test_structural_mutation.py index cdb028f..983c6cc 100644 --- a/tests/operators/test_structural_mutation.py +++ b/tests/operators/test_structural_mutation.py @@ -7,7 +7,7 @@ StructuralTopology, ) from evolib.config.schema import FullConfig -from evolib.initializers.evonet_initializers import initializer_normal_evonet +from evolib.initializers.evonet_initializers import initializer_default_evonet from evolib.operators.evonet_structural_mutation import ( mutate_structure, ) @@ -28,7 +28,6 @@ def make_minimal_evonet() -> EvoNet: "type": "evonet", "dim": [2, 3, 1], "activation": ["linear", "tanh", "sigmoid"], - "initializer": "normal_evonet", "weights": { "initializer": "normal", "std": 0.5, @@ -48,7 +47,7 @@ def make_minimal_evonet() -> EvoNet: }, } full_config = FullConfig.model_validate(config_dict) - return initializer_normal_evonet(full_config, module="brain") + return initializer_default_evonet(full_config, module="brain") def test_add_connection() -> None: diff --git a/tests/test_evonet_neuron_dynamics.py b/tests/test_evonet_neuron_dynamics.py index bf0f918..ee428bf 100644 --- a/tests/test_evonet_neuron_dynamics.py +++ b/tests/test_evonet_neuron_dynamics.py @@ -4,7 +4,7 @@ from evolib.config.evonet_component_config import EvoNetComponentConfig from evolib.config.schema import FullConfig -from evolib.initializers.evonet_initializers import initializer_zero_evonet +from evolib.initializers.evonet_initializers import initializer_default_evonet def test_neuron_dynamics_config_length_matches_dim() -> None: @@ -16,7 +16,6 @@ def test_neuron_dynamics_config_length_matches_dim() -> None: type="evonet", dim=[1, 2, 1], activation=["linear", "tanh", "tanh"], - initializer="zero_evonet", weights={ "initializer": "normal", "std": 0.5, @@ -48,7 +47,7 @@ def test_neuron_dynamics_applied_to_neurons() -> None: "type": "evonet", "dim": [1, 3, 1], "activation": ["linear", "tanh", "tanh"], - "initializer": "zero_evonet", + "initializer": "default_evonet", "recurrent": "local", "connection_scope": "adjacent", "connection_density": 1.0, @@ -82,7 +81,7 @@ def test_neuron_dynamics_applied_to_neurons() -> None: parallel=None, ) - para = initializer_zero_evonet(cfg, "brain") + para = initializer_default_evonet(cfg, "brain") # Layer 0 (input): standard for n in para.net.layers[0].neurons: diff --git a/tests/test_initializer_evonet.py b/tests/test_initializer_evonet.py index 5d37812..7057220 100644 --- a/tests/test_initializer_evonet.py +++ b/tests/test_initializer_evonet.py @@ -3,7 +3,7 @@ from evolib.representation.evonet import EvoNet -def test_normal_initializer_evonet_builds_expected_structure() -> None: +def test_default_initializer_evonet_builds_expected_structure() -> None: config = FullConfig( parent_pool_size=1, offspring_pool_size=1, @@ -15,7 +15,6 @@ def test_normal_initializer_evonet_builds_expected_structure() -> None: "type": "evonet", "dim": [2, 3, 1], "activation": "linear", - "initializer": "normal_evonet", "weights": { "initializer": "normal", "std": 0.5, @@ -35,7 +34,7 @@ def test_normal_initializer_evonet_builds_expected_structure() -> None: }, ) - init_fn = get_initializer("normal_evonet") + init_fn = get_initializer("default_evonet") para = init_fn(config, "brain") assert isinstance(para, EvoNet) net = para.net