Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Trying to load a saved Tensorflow ELMO model but get "TypeError: 'str' object is not callable" when loading

I'm trying to load a saved Tensorflow ELMO model in a different function than I trained it, because I want to do multiple predictions with the model without having to train it every time. My (simplified) code is as follows:

(builder.py)

from word_classifier import train_word_classifier, predict_labels

def builder(lines):

    train_word_classifier()

    for lst in lines:
        print('PRED_LABELS: ', predict_labels(lst))

(word_classifier.py)

import pandas as pd
import numpy as np

import tensorflow as tf
import tensorflow_hub as hub
from tensorflow.python.keras import backend as K

from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow.keras.models import Model, Sequential, model_from_json
from tensorflow.keras.layers import LSTM, Embedding, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, add, Input


def train_word_classifier():

   """
   Input data preparation excluded for readability
   """

    sess = tf.compat.v1.Session()
    K.set_session(sess)

    elmo_model = hub.Module("https://tfhub.dev/google/elmo/3", trainable=True)
    init = tf.compat.v1.global_variables_initializer()

    sess.run(init)

    input_text = Input(shape=(MAX_LEN,), dtype=tf.string)

    def elmo_embedding(inData):
        return \
            elmo_model(inputs={"tokens": tf.squeeze(tf.cast(inData, tf.string)),
                               "sequence_len": tf.constant(BATCH_SIZE * [MAX_LEN])},
                       signature="tokens", as_dict=True)["elmo"]

    embedding = Lambda(lambda text, : elmo_embedding(text), output_shape=(MAX_LEN, 1024))(input_text, )

    x = Bidirectional(LSTM(units=LSTM_UNITS, return_sequences=LSTM_RETURN_SEQ,
                           recurrent_dropout=LSTM_RO_DROPOUT, dropout=LSTM_DROPOUT))(embedding)
    x_rnn = Bidirectional(LSTM(units=LSTM_UNITS, return_sequences=LSTM_RETURN_SEQ,
                               recurrent_dropout=LSTM_RO_DROPOUT, dropout=LSTM_DROPOUT))(x)
    x = add([x, x_rnn])  # residual connection to the first biLSTM
    out = TimeDistributed(Dense(n_tags, activation="softmax"))(x)
    model = Model(input_text, out)
    model.compile(optimizer="adam", loss="sparse_categorical_crossentropy", metrics=["accuracy"])

    line_count_training_data = count_lines_in_file(CLASSIFIER_SENTENCE_FILE, 10)
    size_train, size_test = get_count_for_batch_train_test_data(line_count_training_data)
    print(size_train)
    mode_dict = {
            "train": size_train,
            "test": size_test,
        }

    x_tr, x_val = x_tr[:mode_dict["train"] * BATCH_SIZE], x_tr[-mode_dict["test"] * BATCH_SIZE:]
    y_tr, y_val = y_tr[:mode_dict["train"] * BATCH_SIZE], y_tr[-mode_dict["test"] * BATCH_SIZE:]
    y_tr = y_tr.reshape(y_tr.shape[0], y_tr.shape[1], 1)
    y_val = y_val.reshape(y_val.shape[0], y_val.shape[1], 1)
    
    history = model.fit(np.array(x_tr),
                        y_tr,
                        validation_data=(np.array(x_val), y_val),
                        batch_size=BATCH_SIZE,
                        epochs=NUM_EPOCHS,
                        verbose=VERBOSE_VALUE)

    model_json = model.to_json()
    with open("resources/SavedModel/word_classifier/model.json", "w") as json_file:
        json_file.write(model_json)
    # serialize weights to HDF5
    model.save_weights("resources/SavedModel/word_classifier/model.h5")


def predict_labels(input_data_list):

    with open('resources/SavedModel/word_classifier/model.json', 'r') as json_file:
        loaded_model_json = json_file.read()

    def elmo_embedding(inData):
        return \
            elmo_model(inputs={"tokens": tf.squeeze(tf.cast(inData, tf.string)),
                               "sequence_len": tf.constant(BATCH_SIZE * [MAX_LEN])},
                       signature="tokens", as_dict=True)["elmo"]

    loaded_model = tf.keras.models.model_from_json(loaded_model_json, custom_objects={'elmo_embedding': elmo_embedding})

    # load weights into new model
    loaded_model.load_weights("resources/SavedModel/word_classifier/model.h5")
    print("Loaded model from disk")

In the end, after training the model, this gives me the error "TypeError: 'str' object is not callable" with the following traceback:

Traceback (most recent call last):
  File "usc_coordinator.py", line 62, in <module>
    run_usc_coordinator(fIn, fOut, mode)
  File "usc_coordinator.py", line 32, in run_usc_coordinator
    user_story_builder(fast_mode, file_in)
  File "/home/ubuntu/PA/PA_AI4US/PythonVersion/src/builder.py", line 45, in builder
    print('PRED_LABELS: ', predict_labels(lst))
  File "/home/ubuntu/PA/PA_AI4US/PythonVersion/src/word_classifier.py", line 161, in predict_labels
    loaded_model = tf.keras.models.model_from_json(loaded_model_json, custom_objects={'elmo_embedding': elmo_embedding})
  File "/home/ubuntu/.local/lib/python3.8/site-packages/tensorflow/python/keras/saving/model_config.py", line 122, in model_from_json
    return deserialize(config, custom_objects=custom_objects)
  File "/home/ubuntu/.local/lib/python3.8/site-packages/tensorflow/python/keras/layers/serialization.py", line 171, in deserialize
    return generic_utils.deserialize_keras_object(
  File "/home/ubuntu/.local/lib/python3.8/site-packages/tensorflow/python/keras/utils/generic_utils.py", line 354, in deserialize_keras_object
    return cls.from_config(
  File "/home/ubuntu/.local/lib/python3.8/site-packages/tensorflow/python/keras/engine/functional.py", line 616, in from_config
    input_tensors, output_tensors, created_layers = reconstruct_from_config(
  File "/home/ubuntu/.local/lib/python3.8/site-packages/tensorflow/python/keras/engine/functional.py", line 1214, in reconstruct_from_config
    process_node(layer, node_data)
  File "/home/ubuntu/.local/lib/python3.8/site-packages/tensorflow/python/keras/engine/functional.py", line 1162, in process_node
    output_tensors = layer(input_tensors, **kwargs)
  File "/home/ubuntu/.local/lib/python3.8/site-packages/tensorflow/python/keras/engine/base_layer_v1.py", line 776, in __call__
    outputs = call_fn(cast_inputs, *args, **kwargs)
  File "/home/ubuntu/.local/lib/python3.8/site-packages/tensorflow/python/keras/layers/core.py", line 903, in call
    result = self.function(inputs, **kwargs)
  File "/home/ubuntu/PA/PA_AI4US/PythonVersion/src/word_classifier.py", line 101, in <lambda>
    embedding = Lambda(lambda text, : elmo_embedding(text), output_shape=(MAX_LEN, 1024))(input_text, )
TypeError: 'str' object is not callable

My versions are:

Python 3.8.10
Keras 2.3.0
Tensorflow 2.3.1
Tensorflow-hub 0.10.0

My guess is that the error is caused by the variable input_text that is set as a dtype tf.string. However, I don't how what to do about that without breaking the training sequence.

I hope that somebody can help!

like image 519
StrawberryPizza Avatar asked Sep 20 '25 10:09

StrawberryPizza


1 Answers

It is a bug in tensorflow v2.3.1:

Loading a model with a Lambda layer causes a 'str' object is not callable exception #46659

https://github.com/tensorflow/tensorflow/issues/46659

like image 125
AndrzejO Avatar answered Sep 22 '25 02:09

AndrzejO