diff --git a/README.md b/README.md
index 00a99b52..66209485 100644
--- a/README.md
+++ b/README.md
@@ -76,9 +76,9 @@ As its name suggests Hyperactive started as a hyperparameter optimization packag
## What's new?
+ - ### 27.08.2023 v4.5.0 add early-stopping for Optimization Strategies
- ### 01.03.2023 v4.4.0 add new feature: "Optimization Strategies"
- ### 18.11.2022 v4.3.0 with three new optimization algorithms (Spiral Optimization, Lipschitz Optimizer, DIRECT Optimizer)
- - ### 04.05.2022 v4.2.0 with support of handling Exceptions and Callbacks
@@ -115,29 +115,29 @@ Hyperactive features a collection of optimization algorithms that can be used fo
Hill Climbing
Repulsing Hill Climbing
Simulated Annealing
- Downhill Simplex Optimizer
+ Downhill Simplex Optimizer
Global Search:
Population Methods:
Sequential Methods:
- Bayesian Optimization
- - Lipschitz Optimization
- - Direct Algorithm
+ - Lipschitz Optimization
+ - Direct Algorithm
- Tree of Parzen Estimators
- Forest Optimizer
[dto]
@@ -939,7 +939,7 @@ Each of the following optimizer classes can be initialized and passed to the "ad
-v4.5.0
+v4.5.0 :heavy_check_mark:
- [x] add early stopping feature to custom optimization strategies
- [x] display additional outputs from objective-function in results in command-line
diff --git a/examples/optimization_techniques/direct_algorithm.py b/examples/optimization_techniques/direct_algorithm.py
new file mode 100644
index 00000000..2965e270
--- /dev/null
+++ b/examples/optimization_techniques/direct_algorithm.py
@@ -0,0 +1,24 @@
+import numpy as np
+
+from hyperactive import Hyperactive
+from hyperactive.optimizers import DirectAlgorithm
+
+
+def sphere_function(para):
+ x = para["x"]
+ y = para["y"]
+
+ return -(x * x + y * y)
+
+
+search_space = {
+ "x": list(np.arange(-10, 10, 0.1)),
+ "y": list(np.arange(-10, 10, 0.1)),
+}
+
+opt = DirectAlgorithm()
+
+
+hyper = Hyperactive()
+hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
+hyper.run()
diff --git a/examples/optimization_techniques/downhill_simplex.py b/examples/optimization_techniques/downhill_simplex.py
new file mode 100644
index 00000000..afa8253d
--- /dev/null
+++ b/examples/optimization_techniques/downhill_simplex.py
@@ -0,0 +1,29 @@
+import numpy as np
+
+from hyperactive import Hyperactive
+from hyperactive.optimizers import DownhillSimplexOptimizer
+
+
+def sphere_function(para):
+ x = para["x"]
+ y = para["y"]
+
+ return -(x * x + y * y)
+
+
+search_space = {
+ "x": list(np.arange(-10, 10, 0.1)),
+ "y": list(np.arange(-10, 10, 0.1)),
+}
+
+opt = DownhillSimplexOptimizer(
+ alpha=1.2,
+ gamma=1.1,
+ beta=0.8,
+ sigma=1,
+)
+
+
+hyper = Hyperactive()
+hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
+hyper.run()
diff --git a/examples/optimization_techniques/grid_search.py b/examples/optimization_techniques/grid_search.py
new file mode 100644
index 00000000..c50e68ce
--- /dev/null
+++ b/examples/optimization_techniques/grid_search.py
@@ -0,0 +1,26 @@
+import numpy as np
+
+from hyperactive import Hyperactive
+from hyperactive.optimizers import GridSearchOptimizer
+
+
+def sphere_function(para):
+ x = para["x"]
+ y = para["y"]
+
+ return -(x * x + y * y)
+
+
+search_space = {
+ "x": list(np.arange(-10, 10, 0.1)),
+ "y": list(np.arange(-10, 10, 0.1)),
+}
+
+opt = GridSearchOptimizer(
+ step_size=3,
+)
+
+
+hyper = Hyperactive()
+hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
+hyper.run()
diff --git a/examples/optimization_techniques/lipschitz_optimization.py b/examples/optimization_techniques/lipschitz_optimization.py
new file mode 100644
index 00000000..fbbd5acc
--- /dev/null
+++ b/examples/optimization_techniques/lipschitz_optimization.py
@@ -0,0 +1,25 @@
+import numpy as np
+
+from hyperactive import Hyperactive
+from hyperactive.optimizers import LipschitzOptimizer
+
+
+def sphere_function(para):
+ x = para["x"]
+ y = para["y"]
+
+ return -(x * x + y * y)
+
+
+search_space = {
+ "x": list(np.arange(-10, 10, 0.1)),
+ "y": list(np.arange(-10, 10, 0.1)),
+}
+
+opt = LipschitzOptimizer(
+ sampling={"random": 100000},
+)
+
+hyper = Hyperactive()
+hyper.add_search(sphere_function, search_space, n_iter=100, optimizer=opt)
+hyper.run()
diff --git a/examples/optimization_techniques/pattern_search.py b/examples/optimization_techniques/pattern_search.py
new file mode 100644
index 00000000..f7bc4846
--- /dev/null
+++ b/examples/optimization_techniques/pattern_search.py
@@ -0,0 +1,28 @@
+import numpy as np
+
+from hyperactive import Hyperactive
+from hyperactive.optimizers import PatternSearch
+
+
+def sphere_function(para):
+ x = para["x"]
+ y = para["y"]
+
+ return -(x * x + y * y)
+
+
+search_space = {
+ "x": list(np.arange(-10, 10, 0.1)),
+ "y": list(np.arange(-10, 10, 0.1)),
+}
+
+opt = PatternSearch(
+ n_positions=2,
+ pattern_size=0.5,
+ reduction=0.99,
+)
+
+
+hyper = Hyperactive()
+hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
+hyper.run()
diff --git a/examples/optimization_techniques/powells_method.py b/examples/optimization_techniques/powells_method.py
new file mode 100644
index 00000000..c2f36edd
--- /dev/null
+++ b/examples/optimization_techniques/powells_method.py
@@ -0,0 +1,25 @@
+import numpy as np
+
+from hyperactive import Hyperactive
+from hyperactive.optimizers import PowellsMethod
+
+
+def sphere_function(para):
+ x = para["x"]
+ y = para["y"]
+
+ return -(x * x + y * y)
+
+
+search_space = {
+ "x": list(np.arange(-10, 10, 0.1)),
+ "y": list(np.arange(-10, 10, 0.1)),
+}
+
+opt = PowellsMethod(
+ iters_p_dim=20,
+)
+
+hyper = Hyperactive()
+hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
+hyper.run()
diff --git a/examples/optimization_techniques/spiral_optimization.py b/examples/optimization_techniques/spiral_optimization.py
new file mode 100644
index 00000000..b5504843
--- /dev/null
+++ b/examples/optimization_techniques/spiral_optimization.py
@@ -0,0 +1,26 @@
+import numpy as np
+
+from hyperactive import Hyperactive
+from hyperactive.optimizers import SpiralOptimization
+
+
+def sphere_function(para):
+ x = para["x"]
+ y = para["y"]
+
+ return -(x * x + y * y)
+
+
+search_space = {
+ "x": list(np.arange(-25, 10, 0.1)),
+ "y": list(np.arange(-10, 15, 0.1)),
+}
+
+opt = SpiralOptimization(
+ population=15,
+ decay_rate=0.99,
+)
+
+hyper = Hyperactive()
+hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
+hyper.run()
diff --git a/examples/optimization_techniques/stochastic_hill_climbing.py b/examples/optimization_techniques/stochastic_hill_climbing.py
new file mode 100644
index 00000000..66e44e23
--- /dev/null
+++ b/examples/optimization_techniques/stochastic_hill_climbing.py
@@ -0,0 +1,28 @@
+import numpy as np
+
+from hyperactive import Hyperactive
+from hyperactive.optimizers import StochasticHillClimbingOptimizer
+
+
+def sphere_function(para):
+ x = para["x"]
+ y = para["y"]
+
+ return -(x * x + y * y)
+
+
+search_space = {
+ "x": list(np.arange(-10, 10, 0.1)),
+ "y": list(np.arange(-10, 10, 0.1)),
+}
+
+opt = StochasticHillClimbingOptimizer(
+ epsilon=0.01,
+ n_neighbours=5,
+ distribution="laplace",
+ p_accept=0.05,
+)
+
+hyper = Hyperactive()
+hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
+hyper.run()
diff --git a/hyperactive/__init__.py b/hyperactive/__init__.py
index 42b61685..5d50281c 100644
--- a/hyperactive/__init__.py
+++ b/hyperactive/__init__.py
@@ -2,7 +2,7 @@
# Email: simon.blanke@yahoo.com
# License: MIT License
-__version__ = "4.4.3"
+__version__ = "4.5.0"
__license__ = "MIT"
diff --git a/tests/test_empty_output/__init__.py b/tests/test_empty_output/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/test_empty_output/non_verbose.py b/tests/test_empty_output/non_verbose.py
new file mode 100644
index 00000000..5eaae417
--- /dev/null
+++ b/tests/test_empty_output/non_verbose.py
@@ -0,0 +1,26 @@
+import numpy as np
+from hyperactive import Hyperactive
+
+
+def ackley_function(para):
+ x, y = para["x"], para["y"]
+
+ loss = (
+ -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y)))
+ - np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y)))
+ + np.exp(1)
+ + 20
+ )
+
+ return -loss
+
+
+search_space = {
+ "x": list(np.arange(-10, 10, 0.01)),
+ "y": list(np.arange(-10, 10, 0.01)),
+}
+
+
+hyper = Hyperactive(verbosity=False)
+hyper.add_search(ackley_function, search_space, n_iter=30)
+hyper.run()
diff --git a/tests/test_empty_output/test_empty_output.py b/tests/test_empty_output/test_empty_output.py
new file mode 100644
index 00000000..5796e13c
--- /dev/null
+++ b/tests/test_empty_output/test_empty_output.py
@@ -0,0 +1,23 @@
+import os
+import subprocess
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+verbose_file = os.path.join(here, "verbose.py")
+non_verbose_file = os.path.join(here, "non_verbose.py")
+
+
+def test_empty_output():
+ output_verbose = subprocess.run(["python", verbose_file], stdout=subprocess.PIPE)
+ output_non_verbose = subprocess.run(
+ ["python", non_verbose_file], stdout=subprocess.PIPE
+ )
+
+ verbose_str = output_verbose.stdout.decode()
+ non_verbose_str = output_non_verbose.stdout.decode()
+
+ print("\n verbose_str \n", verbose_str, "\n")
+ print("\n non_verbose_str \n", non_verbose_str, "\n")
+
+ assert verbose_str
+ assert not non_verbose_str
diff --git a/tests/test_empty_output/verbose.py b/tests/test_empty_output/verbose.py
new file mode 100644
index 00000000..05eecb83
--- /dev/null
+++ b/tests/test_empty_output/verbose.py
@@ -0,0 +1,26 @@
+import numpy as np
+from hyperactive import Hyperactive
+
+
+def ackley_function(para):
+ x, y = para["x"], para["y"]
+
+ loss = (
+ -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y)))
+ - np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y)))
+ + np.exp(1)
+ + 20
+ )
+
+ return -loss
+
+
+search_space = {
+ "x": list(np.arange(-10, 10, 0.01)),
+ "y": list(np.arange(-10, 10, 0.01)),
+}
+
+
+hyper = Hyperactive()
+hyper.add_search(ackley_function, search_space, n_iter=30)
+hyper.run()
diff --git a/tests/test_optimization_strategies/test_early_stopping.py b/tests/test_optimization_strategies/test_early_stopping.py
index 7d5d6e2a..c7d0b636 100644
--- a/tests/test_optimization_strategies/test_early_stopping.py
+++ b/tests/test_optimization_strategies/test_early_stopping.py
@@ -9,8 +9,19 @@
from ._parametrize import optimizers
+n_iter_no_change_parametr = (
+ "n_iter_no_change",
+ [
+ (5),
+ (10),
+ (15),
+ ],
+)
+
+
+@pytest.mark.parametrize(*n_iter_no_change_parametr)
@pytest.mark.parametrize(*optimizers)
-def test_strategy_early_stopping_0(Optimizer):
+def test_strategy_early_stopping_0(Optimizer, n_iter_no_change):
def objective_function(para):
score = -para["x1"] * para["x1"]
return score
@@ -19,7 +30,7 @@ def objective_function(para):
"x1": list(np.arange(0, 100, 0.1)),
}
- n_iter_no_change = 5
+ # n_iter_no_change = 5
early_stopping = {
"n_iter_no_change": n_iter_no_change,
}
@@ -28,8 +39,8 @@ def objective_function(para):
optimizer2 = RandomSearchOptimizer()
opt_strat = CustomOptimizationStrategy()
- opt_strat.add_optimizer(optimizer1, duration=0.5, early_stopping=early_stopping)
- opt_strat.add_optimizer(optimizer2, duration=0.5)
+ opt_strat.add_optimizer(optimizer1, duration=0.9, early_stopping=early_stopping)
+ opt_strat.add_optimizer(optimizer2, duration=0.1)
n_iter = 30