You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
ValueError Traceback (most recent call last)
/tmp/ipykernel_3356/3905977959.py in
----> 1 fill = pipeline('fill-mask', model='tamil_bert', tokenizer='tamil_bert')
~/.local/lib/python3.8/site-packages/transformers/pipelines/init.py in pipeline(task, model, config, tokenizer, feature_extractor, framework, revision, use_fast, use_auth_token, model_kwargs, **kwargs)
452 # Will load the correct model if possible
453 model_classes = {"tf": targeted_task["tf"], "pt": targeted_task["pt"]}
--> 454 framework, model = infer_framework_load_model(
455 model,
456 model_classes=model_classes,
~/.local/lib/python3.8/site-packages/transformers/pipelines/base.py in infer_framework_load_model(model, config, model_classes, task, framework, **model_kwargs)
156
157 if isinstance(model, str):
--> 158 raise ValueError(f"Could not load model {model} with any of the following classes: {class_tuple}.")
159
160 framework = "tf" if model.class.name.startswith("TF") else "pt"
ValueError: Could not load model tamil_bert with any of the following classes: (<class 'transformers.models.auto.modeling_auto.AutoModelForMaskedLM'>,).
The text was updated successfully, but these errors were encountered:
ValueError Traceback (most recent call last)
/tmp/ipykernel_3356/3905977959.py in
----> 1 fill = pipeline('fill-mask', model='tamil_bert', tokenizer='tamil_bert')
~/.local/lib/python3.8/site-packages/transformers/pipelines/init.py in pipeline(task, model, config, tokenizer, feature_extractor, framework, revision, use_fast, use_auth_token, model_kwargs, **kwargs)
452 # Will load the correct model if possible
453 model_classes = {"tf": targeted_task["tf"], "pt": targeted_task["pt"]}
--> 454 framework, model = infer_framework_load_model(
455 model,
456 model_classes=model_classes,
~/.local/lib/python3.8/site-packages/transformers/pipelines/base.py in infer_framework_load_model(model, config, model_classes, task, framework, **model_kwargs)
156
157 if isinstance(model, str):
--> 158 raise ValueError(f"Could not load model {model} with any of the following classes: {class_tuple}.")
159
160 framework = "tf" if model.class.name.startswith("TF") else "pt"
ValueError: Could not load model tamil_bert with any of the following classes: (<class 'transformers.models.auto.modeling_auto.AutoModelForMaskedLM'>,).
The text was updated successfully, but these errors were encountered: