I'm trying to instantiate a BERT model using TensorFlow. This code was running correctly until a few days ago, but now I'm getting this error. I've used this same code to instantiate BERT models on Kaggle in other notebooks and it worked fine. Can anyone give me a hint? I should note that I'm using Transformers version 4.31.0 to fix another error. The code I use is:
# Creazione del modello BERT per BYTECODE
BC_input_layer = Input(shape=(max_seq_length,), dtype=tf.int32, name="input_ids")
BC_bert_model = TFBertModel.from_pretrained("bert-base-uncased")
BC_pooler_output = BC_bert_model(BC_input_layer)[1]
BC_dropout_layer = Dropout(dropout_rate)(BC_pooler_output)
BC_output_layer = Dense(6, activation='sigmoid')(BC_dropout_layer)
BC_model = Model(inputs=BC_input_layer, outputs=BC_output_layer)
The error is
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[10], line 19
17 SC_input_layer = Input(shape=(max_seq_length,), dtype=tf.int32, name="input_ids")
18 SC_mask_layer = Input(shape=(max_seq_length,), dtype=tf.int32, name="attention_mask")
---> 19 SC_bert_model = TFBertModel.from_pretrained("bert-base-uncased")
20 SC_pooler_output = SC_bert_model(SC_input_layer, attention_mask=SC_mask_layer)[1] # Estrai il secondo output, che รจ il pooler_output
22 # Aggiungi un layer di Dropout
File /opt/conda/lib/python3.10/site-packages/transformers/modeling_tf_utils.py:2894, in TFPreTrainedModel.from_pretrained(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, *model_args, **kwargs)
2892 model.build() # build the network with dummy inputs
2893 else:
-> 2894 model.build() # build the network with dummy inputs
2896 if safetensors_from_pt:
2897 from .modeling_tf_pytorch_utils import load_pytorch_state_dict_in_tf2_model
File /opt/conda/lib/python3.10/site-packages/keras/src/layers/layer.py:224, in Layer.__new__.<locals>.build_wrapper(*args, **kwargs)
221 @wraps(original_build_method)
222 def build_wrapper(*args, **kwargs):
223 with backend.name_scope(obj.name, caller=obj):
--> 224 original_build_method(*args, **kwargs)
225 # Record build config.
226 signature = inspect.signature(original_build_method)
File /opt/conda/lib/python3.10/site-packages/transformers/modeling_tf_utils.py:1131, in TFPreTrainedModel.build(self, input_shape)
1129 def build(self, input_shape=None):
1130 call_context = get_call_context_function()
-> 1131 if self.built or call_context().in_call:
1132 self.built = True
1133 else:
TypeError: 'NoneType' object is not callable