TypeError:缺少所需的位置参数。当使用Kerastuner调整Ann深学习模型时
我正在使用人造神经网络(ANN)模型进行超参数调整刚刚使用 kerastuner 我想使用它来进行二进制分类。以下是我的代码:
import tensorflow as tf
from tensorflow import keras
from keras import Input
from keras.models import Sequential
from keras.layers import Dense, Flatten, Dropout, BatchNormalization
import keras_tuner as kt
from keras_tuner.tuners import RandomSearch
from keras_tuner.tuners import Hyperband
from keras_tuner import HyperModel
def build_model(hp):
# Create a Sequential model
model = tf.keras.Sequential()
# Input Layer: The now model will take as input arrays of shape (None, 67). My dataset has 67 columns.
model.add(tf.keras.Input(shape = (67,)))
# Tune number of hidden layers and number of neurons
for i in range(hp.Int('num_layers', 1, 3)):
hp_units = hp.Int(f'units_{i}', min_value = 32, max_value = 512, step = 32)
model.add(Dense(units = hp_units, activation = 'relu'))
# Output Layer
model.add(Dense(units = 1, activation='sigmoid'))
# Compile the model
hp_learning_rate = hp.Choice('learning_rate', values = [1e-2, 1e-3, 1e-4])
model.compile(optimizer = keras.optimizers.Adam(learning_rate = hp_learning_rate),
loss = keras.losses.binary_crossentropy(),
metrics = ["accuracy"]
)
return model
# HyperBand algorithm from keras tuner
hpb_tuner = kt.Hyperband(
hypermodel = build_model,
objective = 'val_accuracy',
max_epochs = 50,
factor = 3,
seed = 42,
executions_per_trial = 3,
directory = 'ANN_Parameters_Tuning',
project_name = 'Medical Claim'
)
然后,我面对下面的问题:
TypeError Traceback (most recent call last)
<ipython-input-114-b58f291b49ae> in <module>
1 # HyperBand algorithm from keras tuner
2
----> 3 hpb_tuner = kt.Hyperband(
4 hypermodel = build_model,
5 objective = 'val_accuracy',
~\anaconda3\envs\medicalclaim\lib\site-packages\keras_tuner\tuners\hyperband.py in __init__(self, hypermodel, objective, max_epochs, factor, hyperband_iterations, seed, hyperparameters, tune_new_entries, allow_new_entries, **kwargs)
373 allow_new_entries=allow_new_entries,
374 )
--> 375 super(Hyperband, self).__init__(
376 oracle=oracle, hypermodel=hypermodel, **kwargs
377 )
~\anaconda3\envs\medicalclaim\lib\site-packages\keras_tuner\engine\tuner.py in __init__(self, oracle, hypermodel, max_model_size, optimizer, loss, metrics, distribution_strategy, directory, project_name, logger, tuner_id, overwrite, executions_per_trial)
108 )
109
--> 110 super(Tuner, self).__init__(
111 oracle=oracle,
112 hypermodel=hypermodel,
~\anaconda3\envs\medicalclaim\lib\site-packages\keras_tuner\engine\base_tuner.py in __init__(self, oracle, hypermodel, directory, project_name, logger, overwrite)
101 self._display = tuner_utils.Display(oracle=self.oracle)
102
--> 103 self._populate_initial_space()
104
105 if not overwrite and tf.io.gfile.exists(self._get_tuner_fname()):
~\anaconda3\envs\medicalclaim\lib\site-packages\keras_tuner\engine\base_tuner.py in _populate_initial_space(self)
130
131 while True:
--> 132 self.hypermodel.build(hp)
133
134 # Update the recored scopes.
<ipython-input-113-ac44a2da327d> in build_model(hp)
18 hp_learning_rate = hp.Choice('learning_rate', values = [1e-2, 1e-3, 1e-4])
19 model.compile(optimizer = keras.optimizers.Adam(learning_rate = hp_learning_rate),
---> 20 loss = keras.losses.binary_crossentropy(),
21 metrics = ["accuracy"]
22 )
~\anaconda3\envs\medicalclaim\lib\site-packages\tensorflow\python\util\traceback_utils.py in error_handler(*args, **kwargs)
151 except Exception as e:
152 filtered_tb = _process_traceback_frames(e.__traceback__)
--> 153 raise e.with_traceback(filtered_tb) from None
154 finally:
155 del filtered_tb
~\anaconda3\envs\medicalclaim\lib\site-packages\tensorflow\python\util\dispatch.py in op_dispatch_handler(*args, **kwargs)
1088 if iterable_params is not None:
1089 args, kwargs = replace_iterable_params(args, kwargs, iterable_params)
-> 1090 result = api_dispatcher.Dispatch(args, kwargs)
1091 if result is not NotImplemented:
1092 return result
TypeError: Missing required positional argument
即使我使用Randomsearch
来自Kerastuner的也具有与上述追溯相同的错误。以下是我的RandomSearch
的代码:
# RandomSearch algorithm from keras tuner
random_tuner = RandomSearch(
hypermodel = build_model,
objective = 'val_accuracy',
max_trials = 50,
seed = 42,
overwrite = True,
executions_per_trial = 3,
directory = 'ANN_Parameters_Tuning',
project_name = 'Medical Claim'
)
I was doing hyperparameter tuning for my Artificial Neural Network (ANN) model just now using KerasTuner where I want to use it to do binary classification. Below is my codes:
import tensorflow as tf
from tensorflow import keras
from keras import Input
from keras.models import Sequential
from keras.layers import Dense, Flatten, Dropout, BatchNormalization
import keras_tuner as kt
from keras_tuner.tuners import RandomSearch
from keras_tuner.tuners import Hyperband
from keras_tuner import HyperModel
def build_model(hp):
# Create a Sequential model
model = tf.keras.Sequential()
# Input Layer: The now model will take as input arrays of shape (None, 67). My dataset has 67 columns.
model.add(tf.keras.Input(shape = (67,)))
# Tune number of hidden layers and number of neurons
for i in range(hp.Int('num_layers', 1, 3)):
hp_units = hp.Int(f'units_{i}', min_value = 32, max_value = 512, step = 32)
model.add(Dense(units = hp_units, activation = 'relu'))
# Output Layer
model.add(Dense(units = 1, activation='sigmoid'))
# Compile the model
hp_learning_rate = hp.Choice('learning_rate', values = [1e-2, 1e-3, 1e-4])
model.compile(optimizer = keras.optimizers.Adam(learning_rate = hp_learning_rate),
loss = keras.losses.binary_crossentropy(),
metrics = ["accuracy"]
)
return model
# HyperBand algorithm from keras tuner
hpb_tuner = kt.Hyperband(
hypermodel = build_model,
objective = 'val_accuracy',
max_epochs = 50,
factor = 3,
seed = 42,
executions_per_trial = 3,
directory = 'ANN_Parameters_Tuning',
project_name = 'Medical Claim'
)
Then, I face below issue:
TypeError Traceback (most recent call last)
<ipython-input-114-b58f291b49ae> in <module>
1 # HyperBand algorithm from keras tuner
2
----> 3 hpb_tuner = kt.Hyperband(
4 hypermodel = build_model,
5 objective = 'val_accuracy',
~\anaconda3\envs\medicalclaim\lib\site-packages\keras_tuner\tuners\hyperband.py in __init__(self, hypermodel, objective, max_epochs, factor, hyperband_iterations, seed, hyperparameters, tune_new_entries, allow_new_entries, **kwargs)
373 allow_new_entries=allow_new_entries,
374 )
--> 375 super(Hyperband, self).__init__(
376 oracle=oracle, hypermodel=hypermodel, **kwargs
377 )
~\anaconda3\envs\medicalclaim\lib\site-packages\keras_tuner\engine\tuner.py in __init__(self, oracle, hypermodel, max_model_size, optimizer, loss, metrics, distribution_strategy, directory, project_name, logger, tuner_id, overwrite, executions_per_trial)
108 )
109
--> 110 super(Tuner, self).__init__(
111 oracle=oracle,
112 hypermodel=hypermodel,
~\anaconda3\envs\medicalclaim\lib\site-packages\keras_tuner\engine\base_tuner.py in __init__(self, oracle, hypermodel, directory, project_name, logger, overwrite)
101 self._display = tuner_utils.Display(oracle=self.oracle)
102
--> 103 self._populate_initial_space()
104
105 if not overwrite and tf.io.gfile.exists(self._get_tuner_fname()):
~\anaconda3\envs\medicalclaim\lib\site-packages\keras_tuner\engine\base_tuner.py in _populate_initial_space(self)
130
131 while True:
--> 132 self.hypermodel.build(hp)
133
134 # Update the recored scopes.
<ipython-input-113-ac44a2da327d> in build_model(hp)
18 hp_learning_rate = hp.Choice('learning_rate', values = [1e-2, 1e-3, 1e-4])
19 model.compile(optimizer = keras.optimizers.Adam(learning_rate = hp_learning_rate),
---> 20 loss = keras.losses.binary_crossentropy(),
21 metrics = ["accuracy"]
22 )
~\anaconda3\envs\medicalclaim\lib\site-packages\tensorflow\python\util\traceback_utils.py in error_handler(*args, **kwargs)
151 except Exception as e:
152 filtered_tb = _process_traceback_frames(e.__traceback__)
--> 153 raise e.with_traceback(filtered_tb) from None
154 finally:
155 del filtered_tb
~\anaconda3\envs\medicalclaim\lib\site-packages\tensorflow\python\util\dispatch.py in op_dispatch_handler(*args, **kwargs)
1088 if iterable_params is not None:
1089 args, kwargs = replace_iterable_params(args, kwargs, iterable_params)
-> 1090 result = api_dispatcher.Dispatch(args, kwargs)
1091 if result is not NotImplemented:
1092 return result
TypeError: Missing required positional argument
Even if I use RandomSearch
from KerasTuner also has the same error as above traceback. Below is my codes for RandomSearch
:
# RandomSearch algorithm from keras tuner
random_tuner = RandomSearch(
hypermodel = build_model,
objective = 'val_accuracy',
max_trials = 50,
seed = 42,
overwrite = True,
executions_per_trial = 3,
directory = 'ANN_Parameters_Tuning',
project_name = 'Medical Claim'
)
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。
绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论