找不到匹配功能来调用已加载模型
我使用自定义对象训练了一个深神网络,并能够成功训练它,但是当我保存并尝试重新加载模型以进行再培训时,我将帖子的主题视为错误。 我已经尝试使用TensorFlow创建一个get_config来
class SkipCon(keras.layers.Layer):
def __init__(self, size, reduce = True, deep = 3, skip_when=0, activation="relu", **kwargs):
super().__init__(**kwargs)
self.activation = keras.activations.get(activation) # used to combine
# skip connections and cascaded dense layers
self.main_layers =[]
self.skip_when = skip_when #to be used in call as a control
if reduce:
for _ in range(deep):
self.main_layers.extend([
keras.layers.Dense(size, activation=activation,
use_bias=True),
keras.layers.BatchNormalization()])
# Reduce the input size by two each time, if the
# network is to be designed deeper and narrow
size = size/2
else:
for _ in range(deep):
self.main_layers.extend([
keras.layers.Dense(size, activation=activation,
use_bias=True),
keras.layers.BatchNormalization()])
self.skip_layers = []
if skip_when > 0:
if reduce:
size = size*2 # since the size of skipped connection
# should match with cascaded dense
self.skip_layers = [
keras.layers.Dense(size, activation=activation,
use_bias=True),
keras.layers.BatchNormalization()]
def call(self, inputs):
Z = inputs
for layer in self.main_layers:
Z = layer(Z)
if not self.skip_when:
return self.activation(Z)
skip_Z = inputs
for layer in self.skip_layers:
skip_Z = layer(skip_Z)
return self.activation(Z + skip_Z)
def get_config(self):
return {"Z": self.activation}
@classmethod
def from_config(cls, config):
return cls(**config)
跳过连接创建的
user_layer = keras.layers.Dense(256, activation='relu', use_bias = True)(feature_layer_users)
# Add a Skip Connection
user_layer = SkipCon(size = 256, deep = 2, reduce = False, skip_when=1, activation="relu")(user_layer)
user_layer = keras.layers.Dropout(0.5)(user_layer) # Way to handle overfitting
user_layer = SkipCon(size = 256, deep = 2, reduce = True, skip_when=1, activation="relu")(user_layer)
user_layer = keras.layers.Dropout(0.5)(user_layer)
user_layer = SkipCon(size = 64, deep = 2, reduce = True, skip_when=0, activation="relu")(user_layer)
item_layer = keras.layers.Dense(256, activation='relu', use_bias = True)(feature_layer_items)
# Add a Skip Connection
item_layer = SkipCon(size = 256, deep = 2, reduce = False, skip_when=1, activation="relu")(item_layer)
item_layer = keras.layers.Dropout(0.5)(item_layer) # Way to handle overfitting
item_layer = SkipCon(size = 256, deep = 2, reduce = True, skip_when=1, activation="relu")(item_layer)
item_layer = keras.layers.Dropout(0.5)(item_layer)
item_layer = SkipCon(size = 64, deep = 2, reduce = True, skip_when=0, activation="relu")(item_layer)
combined = tf.concat([user_layer, item_layer], axis =-1)
both_layer = SkipCon(size = 128, deep = 2, reduce = False, skip_when=1, activation="relu")(combined)
both_layer = keras.layers.Dropout(0.5)(both_layer)
both_layer = SkipCon(size = 64, deep = 2, reduce = False, skip_when=1, activation="relu")(combined)
both_layer = keras.layers.Dropout(0.5)(both_layer)
both_layer = SkipCon(size = 16, deep = 2, reduce = False, skip_when=0, activation="relu")(both_layer)
z = keras.layers.Dense(3, activation="softmax")(both_layer)
model = keras.Model(inputs=[input_user, input_items], outputs=z)
保存模型
model.save("my_model")
重新加载模型
model = keras.models.load_model('my_model', custom_objects={"SkipCon":SkipCon})
错误:
ValueError: Could not find matching function to call loaded from the SavedModel. Got:
Positional arguments (2 total):
I trained a deep neural network using a custom object and was able to successfully train it however when I saved and try to reload the model for retraining, I got the subject of the post as an error.
I've tried using TensorFlow to create a get_config to
class SkipCon(keras.layers.Layer):
def __init__(self, size, reduce = True, deep = 3, skip_when=0, activation="relu", **kwargs):
super().__init__(**kwargs)
self.activation = keras.activations.get(activation) # used to combine
# skip connections and cascaded dense layers
self.main_layers =[]
self.skip_when = skip_when #to be used in call as a control
if reduce:
for _ in range(deep):
self.main_layers.extend([
keras.layers.Dense(size, activation=activation,
use_bias=True),
keras.layers.BatchNormalization()])
# Reduce the input size by two each time, if the
# network is to be designed deeper and narrow
size = size/2
else:
for _ in range(deep):
self.main_layers.extend([
keras.layers.Dense(size, activation=activation,
use_bias=True),
keras.layers.BatchNormalization()])
self.skip_layers = []
if skip_when > 0:
if reduce:
size = size*2 # since the size of skipped connection
# should match with cascaded dense
self.skip_layers = [
keras.layers.Dense(size, activation=activation,
use_bias=True),
keras.layers.BatchNormalization()]
def call(self, inputs):
Z = inputs
for layer in self.main_layers:
Z = layer(Z)
if not self.skip_when:
return self.activation(Z)
skip_Z = inputs
for layer in self.skip_layers:
skip_Z = layer(skip_Z)
return self.activation(Z + skip_Z)
def get_config(self):
return {"Z": self.activation}
@classmethod
def from_config(cls, config):
return cls(**config)
Skip connection created
user_layer = keras.layers.Dense(256, activation='relu', use_bias = True)(feature_layer_users)
# Add a Skip Connection
user_layer = SkipCon(size = 256, deep = 2, reduce = False, skip_when=1, activation="relu")(user_layer)
user_layer = keras.layers.Dropout(0.5)(user_layer) # Way to handle overfitting
user_layer = SkipCon(size = 256, deep = 2, reduce = True, skip_when=1, activation="relu")(user_layer)
user_layer = keras.layers.Dropout(0.5)(user_layer)
user_layer = SkipCon(size = 64, deep = 2, reduce = True, skip_when=0, activation="relu")(user_layer)
item_layer = keras.layers.Dense(256, activation='relu', use_bias = True)(feature_layer_items)
# Add a Skip Connection
item_layer = SkipCon(size = 256, deep = 2, reduce = False, skip_when=1, activation="relu")(item_layer)
item_layer = keras.layers.Dropout(0.5)(item_layer) # Way to handle overfitting
item_layer = SkipCon(size = 256, deep = 2, reduce = True, skip_when=1, activation="relu")(item_layer)
item_layer = keras.layers.Dropout(0.5)(item_layer)
item_layer = SkipCon(size = 64, deep = 2, reduce = True, skip_when=0, activation="relu")(item_layer)
combined = tf.concat([user_layer, item_layer], axis =-1)
both_layer = SkipCon(size = 128, deep = 2, reduce = False, skip_when=1, activation="relu")(combined)
both_layer = keras.layers.Dropout(0.5)(both_layer)
both_layer = SkipCon(size = 64, deep = 2, reduce = False, skip_when=1, activation="relu")(combined)
both_layer = keras.layers.Dropout(0.5)(both_layer)
both_layer = SkipCon(size = 16, deep = 2, reduce = False, skip_when=0, activation="relu")(both_layer)
z = keras.layers.Dense(3, activation="softmax")(both_layer)
model = keras.Model(inputs=[input_user, input_items], outputs=z)
Saving model
model.save("my_model")
Reloading model
model = keras.models.load_model('my_model', custom_objects={"SkipCon":SkipCon})
Error:
ValueError: Could not find matching function to call loaded from the SavedModel. Got:
Positional arguments (2 total):
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论
评论(1)
我相信这是因为您的
get_config(self)
函数应与自定义层的参数匹配。我无法完全复制您的示例,因为我错过了您的输入层,但是调整GET_CONFIG功能使我可以在保存模型后正确加载模型。https://github.com/tensorflow/tensorflow/tensorflow/tensorflow/37973
I believe this is because your
get_config(self)
function should match the arguments of your custom layer. I couldn't completely replicate your example as I’m missing your input layers, but adjusting the get_config function allowed me to properly load the model after saving it.https://github.com/tensorflow/tensorflow/issues/37973
https://www.tensorflow.org/guide/keras/save_and_serialize#custom_objects