after testing I found a way it works.
This workinge xample takes a simplex into the algorithm which is then evaluated and from there the algorithm is started:
from math import pi, sin
from random import uniform
import matplotlib.pyplot as plt
from scipy.optimize import minimize
def function(x, a, b, c):
return a * x ** 2 + b * x + c
def cost_function(guess):
y_test = [function(x_i, *guess) for x_i in x_range]
differences = [(y_i - data_i)**2 for y_i, data_i in zip(y_test, data)]
opt_plot.set_ydata(y_test)
plt.pause(1e-6)
cost = sum(differences) / len(differences)
print('cost', cost, 'guess', guess, end='\n')
return cost
def get_initial_simplex(guess, delta_0=.2):
print('get simplex')
simplex = []
simplex.append([cost_function(guess), guess])
for i in range(len(guess)):
simplex_guess = guess.copy()
simplex_guess[i] += delta_0
cost = cost_function(simplex_guess)
simplex.append([cost, simplex_guess])
simplex = sorted(simplex, key=lambda x: x[0])
print('done')
return [elem[1] for elem in simplex]
# create data
x_range = [i / 100 for i in range(-100, 100)]
data = [3 * sin(x_i + pi / 2) + 2 for x_i in x_range]
# plot the data:
fig, ax = plt.subplots()
ax.plot(x_range, data)
opt_plot, = ax.plot(x_range, [0 for _ in data])
guess = [uniform(-1,1) for _ in range(3)]
# start optimization of mse function
options ={
'initial_simplex': get_initial_simplex(guess)
}
result = minimize(cost_function, guess, method='Nelder-Mead', options=options)