From e2ac400fa32e72f6e125c2ec645e4bfbfd30cf9f Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 26 Aug 2023 14:59:18 +0200 Subject: [PATCH 01/13] improve tests for early-stop in opt. strat. --- .../test_early_stopping.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/tests/test_optimization_strategies/test_early_stopping.py b/tests/test_optimization_strategies/test_early_stopping.py index 7d5d6e2a..c7d0b636 100644 --- a/tests/test_optimization_strategies/test_early_stopping.py +++ b/tests/test_optimization_strategies/test_early_stopping.py @@ -9,8 +9,19 @@ from ._parametrize import optimizers +n_iter_no_change_parametr = ( + "n_iter_no_change", + [ + (5), + (10), + (15), + ], +) + + +@pytest.mark.parametrize(*n_iter_no_change_parametr) @pytest.mark.parametrize(*optimizers) -def test_strategy_early_stopping_0(Optimizer): +def test_strategy_early_stopping_0(Optimizer, n_iter_no_change): def objective_function(para): score = -para["x1"] * para["x1"] return score @@ -19,7 +30,7 @@ def objective_function(para): "x1": list(np.arange(0, 100, 0.1)), } - n_iter_no_change = 5 + # n_iter_no_change = 5 early_stopping = { "n_iter_no_change": n_iter_no_change, } @@ -28,8 +39,8 @@ def objective_function(para): optimizer2 = RandomSearchOptimizer() opt_strat = CustomOptimizationStrategy() - opt_strat.add_optimizer(optimizer1, duration=0.5, early_stopping=early_stopping) - opt_strat.add_optimizer(optimizer2, duration=0.5) + opt_strat.add_optimizer(optimizer1, duration=0.9, early_stopping=early_stopping) + opt_strat.add_optimizer(optimizer2, duration=0.1) n_iter = 30 From 2de2be3a417af79c07ec7344ebd75f73cd17c4a2 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 27 Aug 2023 08:03:45 +0200 Subject: [PATCH 02/13] add tests to check if output is empty if verbosity=False --- tests/test_empty_output/__init__.py | 0 tests/test_empty_output/non_verbose.py | 26 ++++++++++++++++++++ tests/test_empty_output/test_empty_output.py | 23 +++++++++++++++++ tests/test_empty_output/verbose.py | 26 ++++++++++++++++++++ 4 files changed, 75 insertions(+) create mode 100644 tests/test_empty_output/__init__.py create mode 100644 tests/test_empty_output/non_verbose.py create mode 100644 tests/test_empty_output/test_empty_output.py create mode 100644 tests/test_empty_output/verbose.py diff --git a/tests/test_empty_output/__init__.py b/tests/test_empty_output/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_empty_output/non_verbose.py b/tests/test_empty_output/non_verbose.py new file mode 100644 index 00000000..5eaae417 --- /dev/null +++ b/tests/test_empty_output/non_verbose.py @@ -0,0 +1,26 @@ +import numpy as np +from hyperactive import Hyperactive + + +def ackley_function(para): + x, y = para["x"], para["y"] + + loss = ( + -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y))) + - np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y))) + + np.exp(1) + + 20 + ) + + return -loss + + +search_space = { + "x": list(np.arange(-10, 10, 0.01)), + "y": list(np.arange(-10, 10, 0.01)), +} + + +hyper = Hyperactive(verbosity=False) +hyper.add_search(ackley_function, search_space, n_iter=30) +hyper.run() diff --git a/tests/test_empty_output/test_empty_output.py b/tests/test_empty_output/test_empty_output.py new file mode 100644 index 00000000..5796e13c --- /dev/null +++ b/tests/test_empty_output/test_empty_output.py @@ -0,0 +1,23 @@ +import os +import subprocess + +here = os.path.dirname(os.path.abspath(__file__)) + +verbose_file = os.path.join(here, "verbose.py") +non_verbose_file = os.path.join(here, "non_verbose.py") + + +def test_empty_output(): + output_verbose = subprocess.run(["python", verbose_file], stdout=subprocess.PIPE) + output_non_verbose = subprocess.run( + ["python", non_verbose_file], stdout=subprocess.PIPE + ) + + verbose_str = output_verbose.stdout.decode() + non_verbose_str = output_non_verbose.stdout.decode() + + print("\n verbose_str \n", verbose_str, "\n") + print("\n non_verbose_str \n", non_verbose_str, "\n") + + assert verbose_str + assert not non_verbose_str diff --git a/tests/test_empty_output/verbose.py b/tests/test_empty_output/verbose.py new file mode 100644 index 00000000..05eecb83 --- /dev/null +++ b/tests/test_empty_output/verbose.py @@ -0,0 +1,26 @@ +import numpy as np +from hyperactive import Hyperactive + + +def ackley_function(para): + x, y = para["x"], para["y"] + + loss = ( + -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y))) + - np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y))) + + np.exp(1) + + 20 + ) + + return -loss + + +search_space = { + "x": list(np.arange(-10, 10, 0.01)), + "y": list(np.arange(-10, 10, 0.01)), +} + + +hyper = Hyperactive() +hyper.add_search(ackley_function, search_space, n_iter=30) +hyper.run() From 58e3be0a6fb3f6a79c5c2019f23367728df167b4 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 27 Aug 2023 12:49:25 +0200 Subject: [PATCH 03/13] v4.5.0 --- hyperactive/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hyperactive/__init__.py b/hyperactive/__init__.py index 42b61685..5d50281c 100644 --- a/hyperactive/__init__.py +++ b/hyperactive/__init__.py @@ -2,7 +2,7 @@ # Email: simon.blanke@yahoo.com # License: MIT License -__version__ = "4.4.3" +__version__ = "4.5.0" __license__ = "MIT" From a93cf61ec837d4482da4b60123c5dfe1e7773ee4 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 27 Aug 2023 13:02:43 +0200 Subject: [PATCH 04/13] update roadmap + what's new section --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 00a99b52..883760e9 100644 --- a/README.md +++ b/README.md @@ -76,9 +76,9 @@ As its name suggests Hyperactive started as a hyperparameter optimization packag
## What's new? + - ### 27.08.2023 v4.5.0 add early-stopping for Optimization Strategies - ### 01.03.2023 v4.4.0 add new feature: "Optimization Strategies" - ### 18.11.2022 v4.3.0 with three new optimization algorithms (Spiral Optimization, Lipschitz Optimizer, DIRECT Optimizer) - - ### 04.05.2022 v4.2.0 with support of handling Exceptions and Callbacks
@@ -939,7 +939,7 @@ Each of the following optimizer classes can be initialized and passed to the "ad
-v4.5.0 +v4.5.0 :heavy_check_mark: - [x] add early stopping feature to custom optimization strategies - [x] display additional outputs from objective-function in results in command-line From 9c163ec031410714039a1ca0d8530930bb2b7ea5 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 10 Sep 2023 20:14:35 +0200 Subject: [PATCH 05/13] add spiral-opt to examples --- .../spiral_optimization.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 examples/optimization_techniques/spiral_optimization.py diff --git a/examples/optimization_techniques/spiral_optimization.py b/examples/optimization_techniques/spiral_optimization.py new file mode 100644 index 00000000..b5504843 --- /dev/null +++ b/examples/optimization_techniques/spiral_optimization.py @@ -0,0 +1,26 @@ +import numpy as np + +from hyperactive import Hyperactive +from hyperactive.optimizers import SpiralOptimization + + +def sphere_function(para): + x = para["x"] + y = para["y"] + + return -(x * x + y * y) + + +search_space = { + "x": list(np.arange(-25, 10, 0.1)), + "y": list(np.arange(-10, 15, 0.1)), +} + +opt = SpiralOptimization( + population=15, + decay_rate=0.99, +) + +hyper = Hyperactive() +hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt) +hyper.run() From cae141f138518abe45e546ce3438e8f7e0e713ab Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Thu, 21 Sep 2023 15:50:30 +0200 Subject: [PATCH 06/13] add pattern-search example --- .../optimization_techniques/pattern_search.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 examples/optimization_techniques/pattern_search.py diff --git a/examples/optimization_techniques/pattern_search.py b/examples/optimization_techniques/pattern_search.py new file mode 100644 index 00000000..f7bc4846 --- /dev/null +++ b/examples/optimization_techniques/pattern_search.py @@ -0,0 +1,28 @@ +import numpy as np + +from hyperactive import Hyperactive +from hyperactive.optimizers import PatternSearch + + +def sphere_function(para): + x = para["x"] + y = para["y"] + + return -(x * x + y * y) + + +search_space = { + "x": list(np.arange(-10, 10, 0.1)), + "y": list(np.arange(-10, 10, 0.1)), +} + +opt = PatternSearch( + n_positions=2, + pattern_size=0.5, + reduction=0.99, +) + + +hyper = Hyperactive() +hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt) +hyper.run() From df25faf9ac53c2652863dd7bba93b78cb6e41d2d Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 23 Sep 2023 16:34:24 +0200 Subject: [PATCH 07/13] add powell's-method example --- .../optimization_techniques/powells_method.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 examples/optimization_techniques/powells_method.py diff --git a/examples/optimization_techniques/powells_method.py b/examples/optimization_techniques/powells_method.py new file mode 100644 index 00000000..c2f36edd --- /dev/null +++ b/examples/optimization_techniques/powells_method.py @@ -0,0 +1,25 @@ +import numpy as np + +from hyperactive import Hyperactive +from hyperactive.optimizers import PowellsMethod + + +def sphere_function(para): + x = para["x"] + y = para["y"] + + return -(x * x + y * y) + + +search_space = { + "x": list(np.arange(-10, 10, 0.1)), + "y": list(np.arange(-10, 10, 0.1)), +} + +opt = PowellsMethod( + iters_p_dim=20, +) + +hyper = Hyperactive() +hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt) +hyper.run() From 080e6d8fd7e164c09ed8dcab918b9a8abfba1048 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Thu, 28 Sep 2023 20:05:49 +0200 Subject: [PATCH 08/13] add downhill-simplex example --- .../downhill_simplex.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 examples/optimization_techniques/downhill_simplex.py diff --git a/examples/optimization_techniques/downhill_simplex.py b/examples/optimization_techniques/downhill_simplex.py new file mode 100644 index 00000000..afa8253d --- /dev/null +++ b/examples/optimization_techniques/downhill_simplex.py @@ -0,0 +1,29 @@ +import numpy as np + +from hyperactive import Hyperactive +from hyperactive.optimizers import DownhillSimplexOptimizer + + +def sphere_function(para): + x = para["x"] + y = para["y"] + + return -(x * x + y * y) + + +search_space = { + "x": list(np.arange(-10, 10, 0.1)), + "y": list(np.arange(-10, 10, 0.1)), +} + +opt = DownhillSimplexOptimizer( + alpha=1.2, + gamma=1.1, + beta=0.8, + sigma=1, +) + + +hyper = Hyperactive() +hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt) +hyper.run() From bf1b06ea8a0949e6c72445b596e406e76a0b3171 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Fri, 6 Oct 2023 20:17:46 +0200 Subject: [PATCH 09/13] add example for direct algorithm --- .../direct_algorithm.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 examples/optimization_techniques/direct_algorithm.py diff --git a/examples/optimization_techniques/direct_algorithm.py b/examples/optimization_techniques/direct_algorithm.py new file mode 100644 index 00000000..2965e270 --- /dev/null +++ b/examples/optimization_techniques/direct_algorithm.py @@ -0,0 +1,24 @@ +import numpy as np + +from hyperactive import Hyperactive +from hyperactive.optimizers import DirectAlgorithm + + +def sphere_function(para): + x = para["x"] + y = para["y"] + + return -(x * x + y * y) + + +search_space = { + "x": list(np.arange(-10, 10, 0.1)), + "y": list(np.arange(-10, 10, 0.1)), +} + +opt = DirectAlgorithm() + + +hyper = Hyperactive() +hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt) +hyper.run() From 60019e666e0f755be38b701b4f2b60adbe4bad52 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 8 Oct 2023 17:15:36 +0200 Subject: [PATCH 10/13] add stochastic hill climbing example --- .../stochastic_hill_climbing.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 examples/optimization_techniques/stochastic_hill_climbing.py diff --git a/examples/optimization_techniques/stochastic_hill_climbing.py b/examples/optimization_techniques/stochastic_hill_climbing.py new file mode 100644 index 00000000..66e44e23 --- /dev/null +++ b/examples/optimization_techniques/stochastic_hill_climbing.py @@ -0,0 +1,28 @@ +import numpy as np + +from hyperactive import Hyperactive +from hyperactive.optimizers import StochasticHillClimbingOptimizer + + +def sphere_function(para): + x = para["x"] + y = para["y"] + + return -(x * x + y * y) + + +search_space = { + "x": list(np.arange(-10, 10, 0.1)), + "y": list(np.arange(-10, 10, 0.1)), +} + +opt = StochasticHillClimbingOptimizer( + epsilon=0.01, + n_neighbours=5, + distribution="laplace", + p_accept=0.05, +) + +hyper = Hyperactive() +hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt) +hyper.run() From 56e2bfab9ad91902a943754521ad1f9a795fa293 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 8 Oct 2023 17:15:40 +0200 Subject: [PATCH 11/13] add lipschitz optimization example --- .../lipschitz_optimization.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 examples/optimization_techniques/lipschitz_optimization.py diff --git a/examples/optimization_techniques/lipschitz_optimization.py b/examples/optimization_techniques/lipschitz_optimization.py new file mode 100644 index 00000000..fbbd5acc --- /dev/null +++ b/examples/optimization_techniques/lipschitz_optimization.py @@ -0,0 +1,25 @@ +import numpy as np + +from hyperactive import Hyperactive +from hyperactive.optimizers import LipschitzOptimizer + + +def sphere_function(para): + x = para["x"] + y = para["y"] + + return -(x * x + y * y) + + +search_space = { + "x": list(np.arange(-10, 10, 0.1)), + "y": list(np.arange(-10, 10, 0.1)), +} + +opt = LipschitzOptimizer( + sampling={"random": 100000}, +) + +hyper = Hyperactive() +hyper.add_search(sphere_function, search_space, n_iter=100, optimizer=opt) +hyper.run() From 5da99684b12058158868048f0238ee09b741d870 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 8 Oct 2023 17:21:09 +0200 Subject: [PATCH 12/13] add grid search example --- .../optimization_techniques/grid_search.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 examples/optimization_techniques/grid_search.py diff --git a/examples/optimization_techniques/grid_search.py b/examples/optimization_techniques/grid_search.py new file mode 100644 index 00000000..c50e68ce --- /dev/null +++ b/examples/optimization_techniques/grid_search.py @@ -0,0 +1,26 @@ +import numpy as np + +from hyperactive import Hyperactive +from hyperactive.optimizers import GridSearchOptimizer + + +def sphere_function(para): + x = para["x"] + y = para["y"] + + return -(x * x + y * y) + + +search_space = { + "x": list(np.arange(-10, 10, 0.1)), + "y": list(np.arange(-10, 10, 0.1)), +} + +opt = GridSearchOptimizer( + step_size=3, +) + + +hyper = Hyperactive() +hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt) +hyper.run() From 4fe153ef893e90288432ae4c79697cba2942fa4e Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 8 Oct 2023 17:21:36 +0200 Subject: [PATCH 13/13] add links to example to README --- README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 883760e9..66209485 100644 --- a/README.md +++ b/README.md @@ -115,29 +115,29 @@ Hyperactive features a collection of optimization algorithms that can be used fo
  • Hill Climbing
  • Repulsing Hill Climbing
  • Simulated Annealing
  • -
  • Downhill Simplex Optimizer
  • +
  • Downhill Simplex Optimizer

  • Global Search:
    Population Methods:
    Sequential Methods: