Skip to content

Commit

Permalink
Merge branch 'master' into feature/add-constr-opt
Browse files Browse the repository at this point in the history
  • Loading branch information
SimonBlanke committed Oct 19, 2023
2 parents bf26abd + 4fe153e commit 4917688
Show file tree
Hide file tree
Showing 15 changed files with 311 additions and 14 deletions.
18 changes: 9 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,9 @@ As its name suggests Hyperactive started as a hyperparameter optimization packag
<br>

## What's new?
- ### 27.08.2023 v4.5.0 add early-stopping for Optimization Strategies
- ### 01.03.2023 v4.4.0 add new feature: "Optimization Strategies"
- ### 18.11.2022 v4.3.0 with three new optimization algorithms (Spiral Optimization, Lipschitz Optimizer, DIRECT Optimizer)
- ### 04.05.2022 v4.2.0 with support of handling Exceptions and Callbacks

<br>

Expand Down Expand Up @@ -115,29 +115,29 @@ Hyperactive features a collection of optimization algorithms that can be used fo
<li><a href="./examples/optimization_techniques/hill_climbing.py">Hill Climbing</a></li>
<li><a href="./examples/optimization_techniques/repulsing_hill_climbing.py">Repulsing Hill Climbing</a></li>
<li><a href="./examples/optimization_techniques/simulated_annealing.py">Simulated Annealing</a></li>
<li>Downhill Simplex Optimizer</li>
<li><a href="./examples/optimization_techniques/downhill_simplex.py">Downhill Simplex Optimizer</a></li>
</ul><br>
<a><b>Global Search:</b></a>
<ul>
<li><a href="./examples/optimization_techniques/random_search.py">Random Search</a></li>
<li>Grid Search Optimizer</li>
<li><a href="./examples/optimization_techniques/grid_search.py">Grid Search</a></li>
<li><a href="./examples/optimization_techniques/rand_rest_hill_climbing.py">Random Restart Hill Climbing</a></li>
<li><a href="./examples/optimization_techniques/random_annealing.py">Random Annealing</a> [<a href="#/./overview#experimental-algorithms">*</a>] </li>
<li>Powell's Method</li>
<li>Pattern Search</li>
<li><a href="./examples/optimization_techniques/pattern_search.py">Powell's Method</a></li>
<li><a href="./examples/optimization_techniques/powells_method.py">Pattern Search</a></li>
</ul><br>
<a><b>Population Methods:</b></a>
<ul>
<li><a href="./examples/optimization_techniques/parallel_tempering.py">Parallel Tempering</a></li>
<li><a href="./examples/optimization_techniques/particle_swarm_optimization.py">Particle Swarm Optimizer</li>
<li>Spiral Optimization</li>
<li><a href="./examples/optimization_techniques/spiral_optimization.py">Spiral Optimization</li>
<li><a href="./examples/optimization_techniques/evolution_strategy.py">Evolution Strategy</a></li>
</ul><br>
<a><b>Sequential Methods:</b></a>
<ul>
<li><a href="./examples/optimization_techniques/bayesian_optimization.py">Bayesian Optimization</a></li>
<li>Lipschitz Optimization</li>
<li>Direct Algorithm</li>
<li><a href="./examples/optimization_techniques/lipschitz_optimization.py">Lipschitz Optimization</a></li>
<li><a href="./examples/optimization_techniques/direct_algorithm.py">Direct Algorithm</a></li>
<li><a href="./examples/optimization_techniques/tpe.py">Tree of Parzen Estimators</a></li>
<li><a href="./examples/optimization_techniques/forest_optimization.py">Forest Optimizer</a>
[<a href="#/./overview#references">dto</a>] </li>
Expand Down Expand Up @@ -939,7 +939,7 @@ Each of the following optimizer classes can be initialized and passed to the "ad
</details>

<details>
<summary><b>v4.5.0</b> </summary>
<summary><b>v4.5.0</b> :heavy_check_mark: </summary>

- [x] add early stopping feature to custom optimization strategies
- [x] display additional outputs from objective-function in results in command-line
Expand Down
24 changes: 24 additions & 0 deletions examples/optimization_techniques/direct_algorithm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import numpy as np

from hyperactive import Hyperactive
from hyperactive.optimizers import DirectAlgorithm


def sphere_function(para):
x = para["x"]
y = para["y"]

return -(x * x + y * y)


search_space = {
"x": list(np.arange(-10, 10, 0.1)),
"y": list(np.arange(-10, 10, 0.1)),
}

opt = DirectAlgorithm()


hyper = Hyperactive()
hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
hyper.run()
29 changes: 29 additions & 0 deletions examples/optimization_techniques/downhill_simplex.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import numpy as np

from hyperactive import Hyperactive
from hyperactive.optimizers import DownhillSimplexOptimizer


def sphere_function(para):
x = para["x"]
y = para["y"]

return -(x * x + y * y)


search_space = {
"x": list(np.arange(-10, 10, 0.1)),
"y": list(np.arange(-10, 10, 0.1)),
}

opt = DownhillSimplexOptimizer(
alpha=1.2,
gamma=1.1,
beta=0.8,
sigma=1,
)


hyper = Hyperactive()
hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
hyper.run()
26 changes: 26 additions & 0 deletions examples/optimization_techniques/grid_search.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import numpy as np

from hyperactive import Hyperactive
from hyperactive.optimizers import GridSearchOptimizer


def sphere_function(para):
x = para["x"]
y = para["y"]

return -(x * x + y * y)


search_space = {
"x": list(np.arange(-10, 10, 0.1)),
"y": list(np.arange(-10, 10, 0.1)),
}

opt = GridSearchOptimizer(
step_size=3,
)


hyper = Hyperactive()
hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
hyper.run()
25 changes: 25 additions & 0 deletions examples/optimization_techniques/lipschitz_optimization.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import numpy as np

from hyperactive import Hyperactive
from hyperactive.optimizers import LipschitzOptimizer


def sphere_function(para):
x = para["x"]
y = para["y"]

return -(x * x + y * y)


search_space = {
"x": list(np.arange(-10, 10, 0.1)),
"y": list(np.arange(-10, 10, 0.1)),
}

opt = LipschitzOptimizer(
sampling={"random": 100000},
)

hyper = Hyperactive()
hyper.add_search(sphere_function, search_space, n_iter=100, optimizer=opt)
hyper.run()
28 changes: 28 additions & 0 deletions examples/optimization_techniques/pattern_search.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import numpy as np

from hyperactive import Hyperactive
from hyperactive.optimizers import PatternSearch


def sphere_function(para):
x = para["x"]
y = para["y"]

return -(x * x + y * y)


search_space = {
"x": list(np.arange(-10, 10, 0.1)),
"y": list(np.arange(-10, 10, 0.1)),
}

opt = PatternSearch(
n_positions=2,
pattern_size=0.5,
reduction=0.99,
)


hyper = Hyperactive()
hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
hyper.run()
25 changes: 25 additions & 0 deletions examples/optimization_techniques/powells_method.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import numpy as np

from hyperactive import Hyperactive
from hyperactive.optimizers import PowellsMethod


def sphere_function(para):
x = para["x"]
y = para["y"]

return -(x * x + y * y)


search_space = {
"x": list(np.arange(-10, 10, 0.1)),
"y": list(np.arange(-10, 10, 0.1)),
}

opt = PowellsMethod(
iters_p_dim=20,
)

hyper = Hyperactive()
hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
hyper.run()
26 changes: 26 additions & 0 deletions examples/optimization_techniques/spiral_optimization.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import numpy as np

from hyperactive import Hyperactive
from hyperactive.optimizers import SpiralOptimization


def sphere_function(para):
x = para["x"]
y = para["y"]

return -(x * x + y * y)


search_space = {
"x": list(np.arange(-25, 10, 0.1)),
"y": list(np.arange(-10, 15, 0.1)),
}

opt = SpiralOptimization(
population=15,
decay_rate=0.99,
)

hyper = Hyperactive()
hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
hyper.run()
28 changes: 28 additions & 0 deletions examples/optimization_techniques/stochastic_hill_climbing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import numpy as np

from hyperactive import Hyperactive
from hyperactive.optimizers import StochasticHillClimbingOptimizer


def sphere_function(para):
x = para["x"]
y = para["y"]

return -(x * x + y * y)


search_space = {
"x": list(np.arange(-10, 10, 0.1)),
"y": list(np.arange(-10, 10, 0.1)),
}

opt = StochasticHillClimbingOptimizer(
epsilon=0.01,
n_neighbours=5,
distribution="laplace",
p_accept=0.05,
)

hyper = Hyperactive()
hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
hyper.run()
2 changes: 1 addition & 1 deletion hyperactive/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# Email: simon.blanke@yahoo.com
# License: MIT License

__version__ = "4.4.3"
__version__ = "4.5.0"
__license__ = "MIT"


Expand Down
Empty file.
26 changes: 26 additions & 0 deletions tests/test_empty_output/non_verbose.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import numpy as np
from hyperactive import Hyperactive


def ackley_function(para):
x, y = para["x"], para["y"]

loss = (
-20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y)))
- np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y)))
+ np.exp(1)
+ 20
)

return -loss


search_space = {
"x": list(np.arange(-10, 10, 0.01)),
"y": list(np.arange(-10, 10, 0.01)),
}


hyper = Hyperactive(verbosity=False)
hyper.add_search(ackley_function, search_space, n_iter=30)
hyper.run()
23 changes: 23 additions & 0 deletions tests/test_empty_output/test_empty_output.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import os
import subprocess

here = os.path.dirname(os.path.abspath(__file__))

verbose_file = os.path.join(here, "verbose.py")
non_verbose_file = os.path.join(here, "non_verbose.py")


def test_empty_output():
output_verbose = subprocess.run(["python", verbose_file], stdout=subprocess.PIPE)
output_non_verbose = subprocess.run(
["python", non_verbose_file], stdout=subprocess.PIPE
)

verbose_str = output_verbose.stdout.decode()
non_verbose_str = output_non_verbose.stdout.decode()

print("\n verbose_str \n", verbose_str, "\n")
print("\n non_verbose_str \n", non_verbose_str, "\n")

assert verbose_str
assert not non_verbose_str
26 changes: 26 additions & 0 deletions tests/test_empty_output/verbose.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import numpy as np
from hyperactive import Hyperactive


def ackley_function(para):
x, y = para["x"], para["y"]

loss = (
-20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y)))
- np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y)))
+ np.exp(1)
+ 20
)

return -loss


search_space = {
"x": list(np.arange(-10, 10, 0.01)),
"y": list(np.arange(-10, 10, 0.01)),
}


hyper = Hyperactive()
hyper.add_search(ackley_function, search_space, n_iter=30)
hyper.run()
Loading

0 comments on commit 4917688

Please sign in to comment.