Tutorial

The minimal example

Here is is a short usage example, which keeps the default settings and does just a basic minimization without fancy.

 1#include <stdlib.h>
 2#include <eskit.h>
 3
 4double
 5fitness(const double* x, size_t N) {
 6 ...
 7}
 8
 9int
10main(int argc, char* argv[]) {
11  ekCMA strategy;
12  ekOptimizer optim;
13
14  /* Initialization */
15  ekOptimizer_init(&optim, 10);
16  ekCMA_init(&strategy, ekOptimizer_N(&optim));
17
18  /* Setup */
19  ekCMA_setOptimizer(&strategy, &optim);   
20
21  /* Iterations */
22  ekOptimizer_start(&optim);
23  do {
24    /* Sample */
25    ekOptimizer_sampleCloud(&optim);
26
27    /* Evaluate */
28    ekOptimizer_evaluateFunction(&optim, fitness);
29
30    /* Update */
31    ekOptimizer_update(&optim);
32  } while(!ekOptimizer_stop(&optim));
33
34  /* Job done */
35  ekCMA_destroy(&strategy);
36  ekOptimizer_destroy(&optim);
37  return EXIT_SUCCESS;
38}

The initial search point for the optimization is not set, the default is the null vector. In the case where you have no idea where the optimum lies, a better choice would be picking the initial point randomly in the search space.

Custom settings

Here, the minimal example is augmented with some custom settings:

  • Use of the SepCMA strategy instead of the CMA strategy.

  • Custom population sizing.

  • Seeding the optimizer pseudo-random number generator

  • Initialize randomly the initial search point xMean

 1#include <stdlib.h>
 2#include <eskit.h>
 3
 4double
 5fitness(const double* x, size_t N) {
 6  ...
 7}
 8
 9int
10main(int argc, char* argv[]) {
11  ekOptimizer optim;
12  ekSepCMA strategy;
13
14  /* Initialization */
15  ekOptimizer_init(&optim, 10);
16  ekSepCMA_init(&strategy, ekOptimizer_N(&optim));
17
18  /* Setup */
19  ekSepCMA_setOptimizer(&strategy, &optim);
20
21  ekOptimizer_setMuLambda(&optim, ekOptimizer_N(&optim) / 2, ekOptimizer_N(&optim));
22
23  ekRandomizer_seed(ekOptimizer_getRandomizer(&optim), 1337);
24  ekArrayOpsD_uniform(ekOptimizer_xMean(&optim), ekOptimizer_N(&optim), ekOptimizer_getRandomizer(&optim), -1.0, 1.0);
25
26  /* Iterations */
27  ekOptimizer_start(&optim);
28  do {
29    /* Sample */
30    ekOptimizer_sampleCloud(&optim);
31
32    /* Evaluate */
33    ekOptimizer_evaluateFunction(&optim, fitness);
34
35    /* Update */
36    ekOptimizer_update(&optim);
37  } while(!ekOptimizer_stop(&optim));
38
39  /* Job done */
40  ekSepCMA_destroy(&strategy);
41  ekOptimizer_destroy(&optim);
42  return EXIT_SUCCESS;
43}

The seed for the optimizer’s pseudo-random number generator is fixed to 1337. You might want to pick a different seed at each run, which is doable but plateform dependent. I suggest the following procedures to pick a good seed:

  • Under Linux, read /dev/random or /dev/urandom.

  • Under any operating system, read the system time, the processor time usage, the hostname, and the process identifier. Put them in a string, and hash the string with a good hashing function like Murmur.

Custom evaluation

Here, the minimal example is augmented to show how to handle custome evaluation. it can be useful to do resampling for instance, to deal with some singular points where the fitness is not defined. Another case is evaluating the points in parallel, by exploiting multiple threads and/or a cluster of computers.

 1#include <stdlib.h>
 2#include <eskit.h>
 3
 4int
 5isValidPoint(const double* x, size_t N) {
 6  ...
 7}
 8
 9double
10fitness(const double* x, size_t N) {
11  ...
12}
13
14int
15main(int argc, char* argv[]) {
16  size_t i;
17  ekCMA strategy;
18  ekOptimizer optim;
19
20  /* Initialization */
21  ekOptimizer_init(&optim, 10);
22  ekCMA_init(&strategy, ekOptimizer_N(&optim));
23
24  /* Setup */
25  ekCMA_setOptimizer(&strategy, &optim);   
26	
27  /* Iterations */
28  ekOptimizer_start(&optim);
29  do {
30    /* Sample */
31    ekOptimizer_sampleCloud(&optim);
32
33    /* Evaluate */
34    for(i = 0; i < ekOptimizer_lambda(&optim); ++i) {
35      /* Resample if the point is not valid */
36      while(!isValidPoint(ekOptimizer_point(&optim, i).x, ekOptimizer_N(&optim)))
37        ekOptimizer_samplePoint(&optim, i);
38
39      /* Evaluate the point */
40      ekOptimizer_point(&optim, i).fitness = fitness(ekOptimizer_point(&optim, i).x, ekOptimizer_N(&optim));
41    }
42
43    /* Update */
44    ekOptimizer_update(&optim);
45  } while(!ekOptimizer_stop(&optim));
46
47  /* Job done */
48  ekCMA_destroy(&strategy);
49  ekOptimizer_destroy(&optim);
50  return EXIT_SUCCESS;
51}

Note the resampling loop might never returns, depending of the validity domain. Using some maximum number of retries would be safer.

Logging

It’s usually quite useful to display the log of an optimization run. Here it is the minimal example augmented to produce a basic log to the standard output.

 1#include <stdlib.h>
 2#include <stdio.h>
 3#include <eskit.h>
 4
 5double
 6fitness(const double* x, size_t N) {
 7  ...
 8}
 9
10int
11main(int argc, char* argv[]) {
12  size_t nbEvals;
13  ekOptimizer optim;
14  ekCMA strategy;
15
16  /* Initialization */
17  ekOptimizer_init(&optim, 10);
18  ekCMA_init(&strategy, ekOptimizer_N(&optim));
19
20  /* Setup */
21  ekCMA_setOptimizer(&strategy, &optim);
22
23  /* Iterations */
24  nbEvals = 0;
25  ekOptimizer_start(&optim);
26  do {
27    /* Sample */
28    ekOptimizer_sampleCloud(&optim);
29
30    /* Evaluate */
31    ekOptimizer_evaluateFunction(&optim, fitness);
32    nbEvals += ekOptimizer_lambda(&optim);
33
34    /* Update */
35    ekOptimizer_update(&optim);
36
37    /* Print nb evals, fitness, sigma */
38    printf("%u %e %e\n",
39           nbEvals, 
40           ekOptimizer_bestPoint(&optim).fitness,
41           ekCMA_sigma(&strategy));
42  } while(!ekOptimizer_stop(&optim));
43
44  /* Print the best individual and the mean point */
45  printf("# xBest=");
46  ekArrayOpsD_print(ekOptimizer_bestPoint(&optim).x, ekOptimizer_N(&optim), stdout);
47  printf("# xMean=");
48  ekArrayOpsD_print(ekOptimizer_xMean(&optim), ekOptimizer_N(&optim), stdout);
49
50  /* Job done */
51  ekCMA_destroy(&strategy);
52  ekOptimizer_destroy(&optim);
53  return EXIT_SUCCESS;
54}
55

Custom covariance matrix setting

ESKit allows the use of a custom covariance matrix setting for the CMA point handlers. The publication that introduces SepCMA suggest to use SepCMA to bootstrap CMA. Here it is the implementation of the strategy in the SepCMA-ES publication.

 1#include <stdlib.h>
 2#include <eskit.h>
 3
 4double
 5fitness(const double* u, size_t N) {
 6  ...
 7}
 8
 9void
10optimize(ekOptimizer* optim, size_t nbIter) {
11  size_t nbIterDec;
12
13  if (nbIter == 0) {
14    nbIter = 1;
15    nbIterDec = 0;
16  }
17  else
18   nbIterDec = 1;
19
20  /* Iterations */
21  ekOptimizer_start(optim);
22  do {
23    /* Sample */
24    ekOptimizer_sampleCloud(optim);
25
26    /* Evaluate */
27    ekOptimizer_evaluateFunction(optim, fitness);
28
29    /* Update */
30    ekOptimizer_update(optim);
31		nbIter -= nbIterDec;
32  } while((nbIter != 0) && (!ekOptimizer_stop(optim)));
33}
34
35void
36switchSepCMA2CMA(ekSepCMA* sepCMA, ekCMA* CMA) {
37  size_t N;
38  ekMatrix cov;
39
40  N = ekMatrix_nbCols(ekSepCMA_C(sepCMA));
41
42  ekMatrix_init(&cov, N, N);
43  ekMatrix_fill(&cov, 0.0);
44  ekMatrix_setDiagonal(&cov, ekSepCMA_D(sepCMA));
45  ekCMA_setC(CMA, &cov);
46  ekMatrix_destroy(&cov);
47}
48
49int
50main(int argc, char* argv[]) {
51  ekOptimizer optim;
52  ekCMA CMA;
53  ekSepCMA sepCMA;
54
55  /* Initialization */
56  ekOptimizer_init(&optim, 10);
57  ekCMA_init(&CMA, ekOptimizer_N(&optim));
58  ekSepCMA_init(&sepCMA, ekOptimizer_N(&optim));
59
60  /* First optimization step: setup & run with SepCMA */
61  ekSepCMA_setOptimizer(&sepCMA, &optim);
62  optimize(&optim, 100.0 * ekOptimizer_N(&optim) / ekOptimizer_lambda(&optim));
63
64  /* Second optimization step: switch to CMA & run */
65  switchSepCMA2CMA(&sepCMA, &CMA);
66  ekCMA_setOptimizer(&CMA, &optim);
67  optimize(&optim, 0);
68
69  /* Job done */
70  ekCMA_destroy(&CMA);
71  ekSepCMA_destroy(&sepCMA);
72  ekOptimizer_destroy(&optim);
73  return EXIT_SUCCESS;
74}
75