微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

使用scipy.optimize时发生错误:-类型错误:costFuncReg缺少1个必需的位置参数:'lambda _'

如何解决使用scipy.optimize时发生错误:-类型错误:costFuncReg缺少1个必需的位置参数:'lambda _'

def costFuncReg(theta,X,y,lambda_):
m = y.size
J = 0
grad = np.zeros(theta.shape)
h = sigmoid(X.dot(theta.T))

temp = theta
temp[0] = 0

J = (1/m)*np.sum(-y.dot(np.log(h))-(1-y).dot(np.log(1-h)))+(lambda_ /(2*m))*np.sum(np.square(temp))

grad = (1/m)* (h-y).dot(X)
grad = grad+(lambda_/m)*temp

return J,grad
  • 我已经定义了成本函数正则化函数
  • 我想使用scipy.optimize.minimize进行Logistic回归。
options = {'maxiter':400}
res = optimize.minimize(costFuncReg,initial_theta,(X,y),jac=True,method='TNC',options=options)

cost = res.fun
theta = res.X
print('Cost at theta found by optimize.minimize: {:.3f}'.format(cost))
print('Expected cost (approx): 0.203\n');

print('theta:')
print('\t[{:.3f},{:.3f},{:.3f}]'.format(*theta))
print('Expected theta (approx):\n\t[-25.161,0.206,0.201]')

-我被这个错误困住了:-

---------------------------------------------------------------------------

TypeError跟踪(最近的呼叫 最后) 1个选项= {'maxiter':400} ----> 2 res = optimize.minimize(costFuncReg,initial_theta,(X,y),jac = True, method ='TNC',options = options) 3 4费用= res.fun 5 theta = res.X

C:\ ProgramData \ Anaconda3 \ lib \ site-packages \ scipy \ optimize_minimize.py 在minimum(fun,x0,args,method,jac,hess,hessp,bounds, 约束,tol,回调,选项) 618 callback = callback,** options) 第619章 -> 620 return _minimize_tnc(fun,x0,args,jac,bounds,callback = callback, 621个**选项) 622 elif meth =='cobyla':

C:\ ProgramData \ Anaconda3 \ lib \ site-packages \ scipy \ optimize \ tnc.py在 _minimize_tnc(fun,x0,args,jac,bounds,eps,scale,offset,mesg_num,maxCGit,maxiter,eta,stepmx,precision,minfev,ftol,xtol, gtol,重新缩放,显示,回调,finite_diff_rel_step,maxfun, ** unkNown_options) 373消息= MSG_NONE 374 -> 375 sf = _prepare_scalar_function(fun,x0,jac = jac,args = args,epsilon = eps, 第376章 377 bounds = new_bounds)

C:\ ProgramData \ Anaconda3 \ lib \ site-packages \ scipy \ optimize \ optimize.py 在_prepare_scalar_function(fun,x0,jac,args,bounds,epsilon, finite_diff_rel_step,hess) 259#ScalarFunction缓存。毕业时重用fun(x) 260#计算减少了整体功能评估。 -> 261 sf = ScalarFunction(fun,x0,args,grad,hess, 262(finite_diff_rel_step,bounds,epsilon = epsilon) 263

C:\ ProgramData \ Anaconda3 \ lib \ site-packages \ scipy \ optimize_differentiable_functions.py 在 init 中(自我,乐趣,x0,args,grad,hess,finite_diff_rel_step, finite_diff_bounds,epsilon) 74 75 self._update_fun_impl = update_fun ---> 76 self._update_fun() 77 78#梯度评估

C:\ ProgramData \ Anaconda3 \ lib \ site-packages \ scipy \ optimize_differentiable_functions.py 在_update_fun() 164 def _update_fun(个体): 165(如果不是self.f_updated: -> 166 self._update_fun_impl() 167 self.f_updated = True 168

C:\ ProgramData \ Anaconda3 \ lib \ site-packages \ scipy \ optimize_differentiable_functions.py 在update_fun()中 71 72 def update_fun(): ---> 73 self.f = fun_wrapped(self.x) 74 75 self._update_fun_impl = update_fun

C:\ ProgramData \ Anaconda3 \ lib \ site-packages \ scipy \ optimize_differentiable_functions.py 在fun_wrapped(x)中 68 def fun_wrapped(x): 69 self.nfev + = 1 ---> 70返回乐趣(x,* args) 71 72 def update_fun():

C:\ ProgramData \ Anaconda3 \ lib \ site-packages \ scipy \ optimize \ optimize.py 在通话中(自己,x,* args) 72 def 通话(自己,x,* args): 73“”“返回函数值”“” ---> 74自我._compute_if_needed(x,* args) 75返回self._value 76

C:\ ProgramData \ Anaconda3 \ lib \ site-packages \ scipy \ optimize \ optimize.py 在_compute_if_needed(self,x,* args)中 66如果不是np.all(x == self.x)或self._value为None或self.jac为None: 67 self.x = np.asarray(x).copy() ---> 68 fg = self.fun(x,* args) 69 self.jac = fg [1] 70 self._value = fg [0]

TypeError:costFuncReg()缺少1个必需的位置参数: 'lambda _'

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。