ValueError:未知损失函数:分类交叉熵.请确保将此对象传递给"custom_objects"



我正试图使用ML的功能制作一个对话机器人,我在YouTube上观看教程时写了这段代码,但我遇到了一个未知错误,要求我将对象传递给"自定义对象",我不知道该怎么办。如果有人能帮我,那就太好了

这是我的代码:

import random as rd
import json
import pickle
import numpy as np
import nltk
from nltk.stem import WordNetLemmatizer
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Activation, Dropout
from tensorflow.keras.optimizers import SGD
lemmatizer = WordNetLemmatizer()
intents = json.loads(open('train.json').read())
words =[]
classes =[]
documents =[]
ignore_letters = ['!','?','.',',']
for intent in intents['intents']:
for pattern in intent["patterns"]:
word_list = nltk.word_tokenize(pattern)
words.extend(word_list)
documents.append((word_list,intent['tag']))
if intent['tag'] not in classes:
classes.append(intent['tag'])
words = [lemmatizer.lemmatize(word) for word in words if word not in ignore_letters]
words = sorted(set(words))
classes = sorted(set(classes))
pickle.dump(words, open('words.pkl', 'wb'))
pickle.dump(classes, open('classes.pkl','wb'))
training = []
output_empty = [0]*len(classes)
for document in documents:
bag=[]
word_patterns = document[0]
word_patterns = [lemmatizer.lemmatize(word.lower())for word in word_patterns]
for word in words:
bag.append(1) if word in word_patterns else bag.append(0)
output_row = list(output_empty)
output_row[classes.index(document[1])] = 1
training.append([bag, output_row])
rd.shuffle(training)
training = np.array(training)
train_x = list(training[:,0])
train_y = list(training[:,1])
model = Sequential()
model.add(Dense(128, input_shape=(len(train_x[0]), ), activation = 'relu'))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(len(train_y[0]), activation='softmax'))
sgd = SGD(lr=0, decay=1e-6, momentum=0.9,nesterov = True)
model.compile(loss='categorical crossentropy', optimizer=sgd, metrics=['accuracy'])
hist=model.fit(np.array(train_x), np.array(train_y), epochs=200, batch_size=5, verbose=True )
model.save('chatbot_model.h5', hist)
print('done')

当我运行它时,它会返回

2021-06-03 23:29:11.692757: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'cudart64_110.dll'; dlerror: cudart64_110.dll not found
2021-06-03 23:29:11.692947: I tensorflow/stream_executor/cuda/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.
c:Speechtotexttraining.py:53: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray
training = np.array(training)
2021-06-03 23:29:14.418464: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'nvcuda.dll'; dlerror: nvcuda.dll not found
2021-06-03 23:29:14.418804: W tensorflow/stream_executor/cuda/cuda_driver.cc:326] failed call to cuInit: UNKNOWN ERROR (303)
2021-06-03 23:29:14.427096: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:169] retrieving CUDA diagnostic information for host: DESKTOP-LEKVIN8
2021-06-03 23:29:14.427515: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:176] hostname: DESKTOP-LEKVIN8
2021-06-03 23:29:14.428318: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX AVX2
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2021-06-03 23:29:14.523825: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:176] None of the MLIR Optimization Passes are enabled (registered 2)
Epoch 1/200
Traceback (most recent call last):
File "c:Speechtotexttraining.py", line 68, in <module>
hist=model.fit(np.array(train_x), np.array(train_y), epochs=200, batch_size=5, verbose=True )
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkerasenginetraining.py", line 1183, in fit
tmp_logs = self.train_function(iterator)
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythoneagerdef_function.py", line 889, in __call__
result = self._call(*args, **kwds)
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythoneagerdef_function.py", line 933, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythoneagerdef_function.py", line 763, in _initialize
self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythoneagerfunction.py", line 3050, in _get_concrete_function_internal_garbage_collected
graph_function, _ = self._maybe_define_function(args, kwargs)
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythoneagerfunction.py", line 3444, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythoneagerfunction.py", line 3279, in _create_graph_function
func_graph_module.func_graph_from_py_func(
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonframeworkfunc_graph.py", line 999, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythoneagerdef_function.py", line 672, in wrapped_fn
out = weak_wrapped_fn().__wrapped__(*args, **kwds)
File "C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonframeworkfunc_graph.py", line 986, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in user code:
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkerasenginetraining.py:855 train_function  *
return step_function(self, iterator)
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkerasenginetraining.py:845 step_function  **
outputs = model.distribute_strategy.run(run_step, args=(data,))
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythondistributedistribute_lib.py:1285 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythondistributedistribute_lib.py:2833 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythondistributedistribute_lib.py:3608 _call_for_each_replica
return fn(*args, **kwargs)
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkerasenginetraining.py:838 run_step  **
outputs = model.train_step(data)
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkerasenginetraining.py:796 train_step
loss = self.compiled_loss(
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkerasenginecompile_utils.py:187 __call__
self.build(y_pred)
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkerasenginecompile_utils.py:136 build
self._losses = nest.map_structure(self._get_loss_object, self._losses)
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonutilnest.py:867 map_structure
structure[0], [func(*x) for x in entries],
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonutilnest.py:867 <listcomp>
structure[0], [func(*x) for x in entries],
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkerasenginecompile_utils.py:276 _get_loss_object
loss = losses_mod.get(loss)
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkeraslosses.py:2078 get
return deserialize(identifier)
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkeraslosses.py:2033 deserialize
return deserialize_keras_object(
C:UsersanishAppDataRoamingPythonPython39site-packagestensorflowpythonkerasutilsgeneric_utils.py:698 deserialize_keras_object
raise ValueError(
ValueError: Unknown loss function: categorical crossentropy. Please ensure this object is passed to the `custom_objects` argument. See https://www.tensorflow.org/guide/keras/save_and_serialize#registering_the_custom_object for details.

非常感谢您的帮助。。

您似乎有拼写错误。尝试使用此代码(关注低条(:

model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])

使用loss='parse_categorical_crossentory'

相关内容

最新更新