import emoo
import numpy as np

# Define the variables and their lower and upper search bounds
variables = [["x", -10, 10],
             ["y", -10, 10],
             ["z", -10, 10]]

# Define the list of objectives (in this case, it is only one)
objectives = ["dist1"]

# This is the function which is going to be minimized
def func_to_optimize(parameters):
    
    x = parameters['x']
    y = parameters['y']
    z = parameters['z']
    
    dist1 = x**2 + y**2 + z**2
    return dict({"dist1": dist1})

# After each generation this function is called
def checkpopulation(population, columns, gen):

    # population is already sorted
    best_individual = population[0]
    x = best_individual[columns['x']]
    y = best_individual[columns['y']]
    z = best_individual[columns['z']]
    dist1 = best_individual[columns['dist1']]
    
    print "Generation %d: x: %.2f; y: %.2f; z: %.2f; dist1: %.2f"%(gen, x,y,z,dist1)
    
    
    # save the population as an numpy arry for further analysis
    #np.save("pop%d"%gen, population)
    
# Initiate the Evlutionary Multiobjective Optimization
emoo = emoo.Emoo(N = 10, C = 20, variables = variables, objectives = objectives)
# Parameters:
# N: size of population
# C: size of capacity 

emoo.setup(eta_m_0 = 20, eta_c_0 = 20, p_m = 0.5)
# Parameters:
# eta_m_0, eta_c_0: defines the initial strength of the mution and crossover parameter (large values mean weak effect)
# p_m: probabily of mutation of a parameter (holds for each parameter independently)

emoo.get_objectives_error = func_to_optimize
emoo.checkpopulation = checkpopulation

emoo.evolution(generations = 10)