如何解决Kerastuner:'ValueError: not a legal parameter' 当我使用 LSTM 网络时出现问题,但密集层工作正常
'ValueError: not a legal parameter' 使用 LSTM 时出现问题。但是,如果我只使用 Dense 层,它工作正常。
使用 LSTM 时发生错误。
def model_builder(hp_units1=40):
model = Sequential()
model.add(LSTM(units = hp_units1,return_sequences = True,input_shape = (X.shape[1],1)))
model.add(Dropout(0.2))
model.add(LSTM(units = 40,return_sequences = True))
model.add(Dropout(0.2))
model.add(Dense(units = 1))
optimizer = Adam(learning_rate=hp_learning_rate,momentum=hp_momentum)
model.compile(optimizer = optimizer,loss = 'mean_squared_error',metrics=['accuracy'])
return model
hp_units1 = [30+i*5 for i in range(5)]
hp_learning_rate =[0.01,0.001,0.0001,0.00001]
hp_momentum = [0.0,0.2,0.4,0.6,0.8,0.9]
param_grid = dict(units1=hp_units1,learning_rate=hp_learning_rate,momentum=hp_momentum)
model = KerasRegressor(build_fn=model_builder,nb_epoch=100,batch_size=32,verbose=0)
grid = gridsearchcv(estimator=model,param_grid=param_grid,n_jobs=-1)
grid_result = grid.fit(X,Y)
但是,使用 Dense 层效果很好。
def baseline_model(learn_rate=0.01,momentum=0): # (optimizer = 'adam')
model = Sequential()
input = X.shape[1]
model.add(Dense(5,input_shape=(input,),kernel_initializer='normal',activation='relu'))
model.add(Dense(4,activation='linear'))
model.add(Dense(1))
optimizer = SGD(lr=learn_rate,momentum=momentum)
model.compile(loss='mean_squared_error',optimizer=optimizer,metrics=['accuracy'])
return model
model = KerasRegressor(build_fn=baseline_model,batch_size=10,verbose=0)
learn_rate = [0.001,0.01,0.1,0.3]
momentum = [0.0,0.9]
param_grid = dict(learn_rate=learn_rate,momentum=momentum)
grid = gridsearchcv(estimator=model,Y)
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。