根据SciPy文档,可以使用多个变量最小化函数,但它没有说明如何优化这些函数.
from scipy.optimize import minimize
from math import *
def f(c):
return sqrt((sin(pi/2) + sin(0) + sin(c) - 2)**2 + (cos(pi/2) + cos(0) + cos(c) - 1)**2)
print minimize(f, 3.14/2 + 3.14/7)
Run Code Online (Sandbox Code Playgroud)
上面的代码确实尝试最小化函数f,但对于我的任务,我需要最小化三个变量.
简单地引入第二个参数并相应地调整最小化会产生错误(TypeError: f() takes exactly 2 arguments (1 given)).
minimize使用多个变量进行最小化时如何工作.
我有一个数据集,我想通过最小二乘误差法找到一个混合高斯模型。
代码是这样的:
from sklearn.neighbors import KernelDensity
kde = KernelDensity().fit(sample)
def gaussian_2d(x,y,meanx,meany,sigx,sigy,rho):
# rho <= 1
part1 = 1/(2*np.pi*sigx*sigy*sqrt(1-0.5**2))
part2 = -1/2*(1-rho**2)
part3 = (((x-meanx)/sigx)**2-2*rho*(x-meanx)*(y-meany)/(sigx*sigy)+((y-meany)/sigy)**2)
return part1*exp(part2*part3)
def square_error(f1,f2, u1,v1,sigu1,sigv1,rho1, u2,v2,sigu2,sigv2,rho2, u3,v3,sigu3,sigv3,rho3):
# 1. Generate Mixed Gaussian Model
def gaussian1(x,y):
return gaussian_2d(x,y,u1,v1,sigu1,sigv1,rho1)
def gaussian2(x,y):
return gaussian_2d(x,y,u2,v2,sigu2,sigv2,rho2)
def gaussian3(x,y):
return gaussian_2d(x,y,u3,v3,sigu3,sigv3,rho3)
mixed_model = f1*gaussian1(x,y)+f2*gaussian2(x,y)+(1-f1-f2)*gaussian3(x,y)
# 2. Calculate the sum of square error
sum_error = 0
for point in sample:
error = (exp(mixed_model(point)) - exp(kde.score(point)))**2
sum_error += error
return sum_error
# How can …Run Code Online (Sandbox Code Playgroud)