I'm trying to build model "ner_ontonotes_bert_mult" via GoogleColab using example in documentation:
from deeppavlov import build_model, configs
ner_model = build_model(configs.ner.ner_ontonotes_bert_mult, download=True)
but get error: TypeError: init() got an unexpected keyword argument 'num_tags'
p.s. if I try to load another model (e.g. "ner_rus_bert"), the error does not appear
Full error (* maybe the error is related to directoty /packages/deeppavlov/models/torch_bert/crf.py* ):
2022-12-17 13:08:23.235 INFO in 'deeppavlov.download'['download'] at line 138: Skipped http://files.deeppavlov.ai/v1/ner/ner_ontonotes_bert_mult_torch_crf.tar.gz download because of matching hashes
INFO:deeppavlov.download:Skipped http://files.deeppavlov.ai/v1/ner/ner_ontonotes_bert_mult_torch_crf.tar.gz download because of matching hashes
Some weights of the model checkpoint at bert-base-multilingual-cased were not used when initializing BertForTokenClassification: ['cls.predictions.transform.LayerNorm.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.bias', 'cls.predictions.decoder.weight', 'cls.seq_relationship.bias', 'cls.predictions.transform.dense.weight', 'cls.predictions.transform.dense.bias', 'cls.seq_relationship.weight']
- This IS expected if you are initializing BertForTokenClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).
- This IS NOT expected if you are initializing BertForTokenClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).
Some weights of BertForTokenClassification were not initialized from the model checkpoint at bert-base-multilingual-cased and are newly initialized: ['classifier.bias', 'classifier.weight']
You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.
2022-12-17 13:08:30.1 ERROR in 'deeppavlov.core.common.params'['params'] at line 108: Exception in <class 'deeppavlov.models.torch_bert.torch_transformers_sequence_tagger.TorchTransformersSequenceTagger'>
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/core/common/params.py", line 102, in from_params
component = obj(**dict(config_params, **kwargs))
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/models/torch_bert/torch_transformers_sequence_tagger.py", line 182, in __init__
super().__init__(optimizer=optimizer,
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/core/models/torch_model.py", line 98, in __init__
self.load()
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/models/torch_bert/torch_transformers_sequence_tagger.py", line 295, in load
self.crf = CRF(self.n_classes).to(self.device)
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/models/torch_bert/crf.py", line 13, in __init__
super().__init__(num_tags=num_tags, batch_first=batch_first)
TypeError: __init__() got an unexpected keyword argument 'num_tags'
ERROR:deeppavlov.core.common.params:Exception in <class 'deeppavlov.models.torch_bert.torch_transformers_sequence_tagger.TorchTransformersSequenceTagger'>
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/core/common/params.py", line 102, in from_params
component = obj(**dict(config_params, **kwargs))
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/models/torch_bert/torch_transformers_sequence_tagger.py", line 182, in __init__
super().__init__(optimizer=optimizer,
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/core/models/torch_model.py", line 98, in __init__
self.load()
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/models/torch_bert/torch_transformers_sequence_tagger.py", line 295, in load
self.crf = CRF(self.n_classes).to(self.device)
File "/usr/local/lib/python3.8/dist-packages/deeppavlov/models/torch_bert/crf.py", line 13, in __init__
super().__init__(num_tags=num_tags, batch_first=batch_first)
TypeError: __init__() got an unexpected keyword argument 'num_tags'
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-75-5e156706e7e4> in <module>
1 # from deeppavlov import configs, build_model
2
----> 3 ner_model = build_model(configs.ner.ner_ontonotes_bert_mult, download=True)
5 frames
/usr/local/lib/python3.8/dist-packages/deeppavlov/core/commands/infer.py in build_model(config, mode, load_trained, install, download)
53 .format(component_config.get('class_name', component_config.get('ref', 'UNKNOWN'))))
54
---> 55 component = from_params(component_config, mode=mode)
56
57 if 'id' in component_config:
/usr/local/lib/python3.8/dist-packages/deeppavlov/core/common/params.py in from_params(params, mode, **kwargs)
100 kwargs['mode'] = mode
101
--> 102 component = obj(**dict(config_params, **kwargs))
103 try:
104 _refs[config_params['id']] = component
/usr/local/lib/python3.8/dist-packages/deeppavlov/models/torch_bert/torch_transformers_sequence_tagger.py in __init__(self, n_tags, pretrained_bert, bert_config_file, attention_probs_keep_prob, hidden_keep_prob, optimizer, optimizer_parameters, learning_rate_drop_patience, learning_rate_drop_div, load_before_drop, clip_norm, min_learning_rate, use_crf, **kwargs)
180 self.use_crf = use_crf
181
--> 182 super().__init__(optimizer=optimizer,
183 optimizer_parameters=optimizer_parameters,
184 learning_rate_drop_patience=learning_rate_drop_patience,
/usr/local/lib/python3.8/dist-packages/deeppavlov/core/models/torch_model.py in __init__(self, device, optimizer, optimizer_parameters, lr_scheduler, lr_scheduler_parameters, learning_rate_drop_patience, learning_rate_drop_div, load_before_drop, min_learning_rate, *args, **kwargs)
96 self.opt = deepcopy(kwargs)
97
---> 98 self.load()
99 # we need to switch to eval mode here because by default it's in `train` mode.
100 # But in case of `interact/build_model` usage, we need to have model in eval mode.
/usr/local/lib/python3.8/dist-packages/deeppavlov/models/torch_bert/torch_transformers_sequence_tagger.py in load(self, fname)
293 self.model.to(self.device)
294 if self.use_crf:
--> 295 self.crf = CRF(self.n_classes).to(self.device)
296
297 self.optimizer = getattr(torch.optim, self.optimizer_name)(
/usr/local/lib/python3.8/dist-packages/deeppavlov/models/torch_bert/crf.py in __init__(self, num_tags, batch_first)
11
12 def __init__(self, num_tags: int, batch_first: bool = False) -> None:
---> 13 super().__init__(num_tags=num_tags, batch_first=batch_first)
14 nn.init.zeros_(self.transitions)
15 nn.init.zeros_(self.start_transitions)
TypeError: __init__() got an unexpected keyword argument 'num_tags'
Make sure that you are using the latest version of DeepPavlov by running:
!pip install deeppavlov
Then import all the required packages:
from deeppavlov import configs, build_model
Install the model's requirements and download the pretrained model:
ner_model = build_model(configs.ner.ner_ontonotes_bert_mult, download=True, install=True)
You can get more information about this model and many others in our recent Medium article.