|
| 1 | +""" |
| 2 | +Minimal example for fixed initial conditions |
| 3 | +============================================ |
| 4 | +
|
| 5 | +Example demonstrating the use of Cartesian genetic programming for a simple |
| 6 | +regression task (see `example_minimal.py`). However, here we initialize the |
| 7 | +initial parent population to a specific expression. |
| 8 | +""" |
| 9 | + |
| 10 | +# The docopt str is added explicitly to ensure compatibility with |
| 11 | +# sphinx-gallery. |
| 12 | +docopt_str = """ |
| 13 | + Usage: |
| 14 | + example_initialize_individuals.py |
| 15 | +
|
| 16 | + Options: |
| 17 | + -h --help |
| 18 | +""" |
| 19 | + |
| 20 | +import matplotlib.pyplot as plt |
| 21 | +import numpy as np |
| 22 | +import scipy.constants |
| 23 | +from docopt import docopt |
| 24 | + |
| 25 | +import cgp |
| 26 | + |
| 27 | +args = docopt(docopt_str) |
| 28 | + |
| 29 | +# %% |
| 30 | +# We first define a target function. |
| 31 | + |
| 32 | + |
| 33 | +def f_target(x): |
| 34 | + return x ** 2 + 1.0 |
| 35 | + |
| 36 | + |
| 37 | +# %% |
| 38 | +# Then we define the objective function for the evolution. It uses |
| 39 | +# the mean-squared error between the output of the expression |
| 40 | +# represented by a given individual and the target function evaluated |
| 41 | +# on a set of random points. |
| 42 | + |
| 43 | + |
| 44 | +def objective(individual): |
| 45 | + |
| 46 | + if not individual.fitness_is_None(): |
| 47 | + return individual |
| 48 | + |
| 49 | + n_function_evaluations = 1000 |
| 50 | + |
| 51 | + np.random.seed(1234) |
| 52 | + |
| 53 | + f = individual.to_func() |
| 54 | + loss = 0 |
| 55 | + for x in np.random.uniform(-4, 4, n_function_evaluations): |
| 56 | + # the callable returned from `to_func` accepts and returns |
| 57 | + # lists; accordingly we need to pack the argument and unpack |
| 58 | + # the return value |
| 59 | + y = f(x) |
| 60 | + loss += (f_target(x) - y) ** 2 |
| 61 | + |
| 62 | + individual.fitness = -loss / n_function_evaluations |
| 63 | + |
| 64 | + return individual |
| 65 | + |
| 66 | + |
| 67 | +# %% |
| 68 | +# We want to initialize all individuals to the same expression (for |
| 69 | +# illustration, functionally not necessarily the best idea). We can provide a |
| 70 | +# function to the `Population` constructor which is called for each individual |
| 71 | +# of the initial parent population. Unfortunately, we can not provide the |
| 72 | +# expression directly, but rather need to manually set (parts of) the |
| 73 | +# individuals genome to the correct values. See Jordan, Schmidt et al. (2020) |
| 74 | +# https://doi.org/10.7554/eLife.66273 figure 2 for details about the encoding. |
| 75 | +def individual_init(ind): |
| 76 | + # f(x) = x * x |
| 77 | + ind.genome.set_expression_for_output([2, 0, 0]) |
| 78 | + assert str(ind.to_sympy(simplify=False)) == "x_0*x_0" |
| 79 | + return ind |
| 80 | + |
| 81 | + |
| 82 | +pop = cgp.Population(individual_init=individual_init) |
| 83 | + |
| 84 | +# %% |
| 85 | +# Next, we set up the evolutionary search. We define a callback for recording |
| 86 | +# of fitness over generations |
| 87 | +history = {} |
| 88 | +history["fitness_champion"] = [] |
| 89 | + |
| 90 | + |
| 91 | +def recording_callback(pop): |
| 92 | + history["fitness_champion"].append(pop.champion.fitness) |
| 93 | + |
| 94 | + |
| 95 | +# %% |
| 96 | +# and finally perform the evolution relying on the libraries default |
| 97 | +# hyperparameters except that we terminate the evolution as soon as one |
| 98 | +# individual has reached fitness zero. |
| 99 | +pop = cgp.evolve( |
| 100 | + objective, pop, termination_fitness=0.0, print_progress=True, callback=recording_callback |
| 101 | +) |
| 102 | + |
| 103 | + |
| 104 | +# %% |
| 105 | +# After finishing the evolution, we plot the result and log the final |
| 106 | +# evolved expression. |
| 107 | + |
| 108 | + |
| 109 | +width = 9.0 |
| 110 | +fig, axes = plt.subplots(1, 2, figsize=(width, width / scipy.constants.golden)) |
| 111 | + |
| 112 | +ax_fitness, ax_function = axes[0], axes[1] |
| 113 | +ax_fitness.set_xlabel("Generation") |
| 114 | +ax_fitness.set_ylabel("Fitness") |
| 115 | + |
| 116 | +ax_fitness.plot(history["fitness_champion"], label="Champion") |
| 117 | + |
| 118 | +ax_fitness.set_yscale("symlog") |
| 119 | +ax_fitness.set_ylim(-1.0e2, 0.1) |
| 120 | +ax_fitness.axhline(0.0, color="0.7") |
| 121 | + |
| 122 | +f = pop.champion.to_func() |
| 123 | +x = np.linspace(-5.0, 5.0, 20) |
| 124 | +y = [f(x_i) for x_i in x] |
| 125 | +y_target = [f_target(x_i) for x_i in x] |
| 126 | + |
| 127 | +ax_function.plot(x, y_target, lw=2, alpha=0.5, label="Target") |
| 128 | +ax_function.plot(x, y, "x", label="Champion") |
| 129 | +ax_function.legend() |
| 130 | +ax_function.set_ylabel(r"$f(x)$") |
| 131 | +ax_function.set_xlabel(r"$x$") |
| 132 | + |
| 133 | +fig.savefig("example_initialize_individuals.pdf", dpi=300) |
0 commit comments