Simple example of the evolutionary optimization framework
This notebook provides a simple example for the use of the evolutionary optimization framework builtin to the library. Under the hood, the implementation of the evolutionary algorithm is powered by deap
and pypet
cares about the parallelization and storage of the simulation data for us.
Here we demonstrate how to fit parameters of a the evaluation function optimize_me
which simply computes the distance of the parameters to the unit circle and returns this as the fitness_tuple
that DEAP expects.
# change to the root directory of the project
import os
if os.getcwd().split("/")[-1] == "examples":
os.chdir('..')
# This will reload all imports as soon as the code changes
%load_ext autoreload
%autoreload 2
try:
import matplotlib.pyplot as plt
except ImportError:
import sys
!{sys.executable} -m pip install matplotlib seaborn
import matplotlib.pyplot as plt
import numpy as np
import logging
from neurolib.utils.parameterSpace import ParameterSpace
from neurolib.optimize.evolution import Evolution
import neurolib.optimize.evolution.evolutionaryUtils as eu
import neurolib.utils.functions as func
def optimize_me(traj):
ind = evolution.getIndividualFromTraj(traj)
logging.info("Hello, I am {}".format(ind.id))
logging.info("You can also call me {}, or simply ({:.2}, {:.2}).".format(ind.params, ind.x, ind.y))
# let's make a circle
computation_result = abs((ind.x**2 + ind.y**2) - 1)
# DEAP wants a tuple as fitness, ALWAYS!
fitness_tuple = (computation_result ,)
# we also require a dictionary with at least a single result for storing the results in the hdf
result_dict = {}
return fitness_tuple, result_dict
pars = ParameterSpace(['x', 'y'], [[-5.0, 5.0], [-5.0, 5.0]])
evolution = Evolution(optimize_me, pars, weightList = [-1.0],
POP_INIT_SIZE=10, POP_SIZE = 6, NGEN=4, filename="example-2.0.hdf")
# info: chose POP_INIT_SIZE=100, POP_SIZE = 50, NGEN=10 for real exploration,
# values here are low for testing: POP_INIT_SIZE=10, POP_SIZE = 6, NGEN=4
evolution.run(verbose = True)
evolution.loadResults()
evolution.info(plot=True)
gens, all_scores = evolution.getScoresDuringEvolution(reverse=True)
import matplotlib.pyplot as plt
plt.figure(figsize=(8, 4), dpi=200)
plt.plot(gens, np.nanmean(all_scores, axis=1))
plt.fill_between(gens, np.nanmin(all_scores, axis=1), np.nanmax(all_scores, axis=1), alpha=0.3)
plt.xlabel("Generation #")
plt.ylabel("Score")