You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
File ~\NLP\lib\site-packages\optimum\utils\normalized_config.py:262, in NormalizedConfigManager.check_supported_model(cls, model_type)
261 model_types = ", ".join(cls._conf.keys())
--> 262 raise KeyError(
263 f"{model_type} model type is not supported yet in NormalizedConfig. Only {model_types} are supported. "
264 f"If you want to support {model_type} please propose a PR or open up an issue."
265 )
KeyError: 'mpnet model type is not supported yet in NormalizedConfig. Only albert, bart, bert, blenderbot, blenderbot_small, bloom, falcon, camembert, codegen, cvt, deberta, deberta-v2, deit, distilbert, donut-swin, electra, encoder-decoder, gpt2, gpt-bigcode, gpt_neo, gpt_neox, llama, gptj, imagegpt, longt5, marian, mbart, mt5, m2m_100, nystromformer, opt, pegasus, pix2struct, poolformer, regnet, resnet, roberta, speech_to_text, splinter, t5, trocr, whisper, vision-encoder-decoder, vit, xlm-roberta, yolos, mpt, gpt_bigcode are supported. If you want to support mpnet please propose a PR or open up an issue.'
During handling of the above exception, another exception occurred:
NotImplementedError Traceback (most recent call last)
Cell In[4], line 5
2 from optimum.onnxruntime.configuration import OptimizationConfig
4 # create ORTOptimizer and define optimization configuration
----> 5 optimizer = ORTOptimizer.from_pretrained(model)
6 optimization_config = OptimizationConfig(optimization_level=99) # enable all optimizations
8 # apply the optimization configuration to the model
File ~\NLP\lib\site-packages\optimum\onnxruntime\optimization.py:125, in ORTOptimizer.from_pretrained(cls, model_or_path, file_names)
123 else:
124 raise ValueError(f"Unable to load the model from {model_or_path}.")
--> 125 return cls(onnx_model_path, config=config, from_ortmodel=from_ortmodel)
File ~\NLP\lib\site-packages\optimum\onnxruntime\optimization.py:69, in ORTOptimizer.init(self, onnx_model_path, config, from_ortmodel)
67 self.normalized_config = NormalizedConfigManager.get_normalized_config_class(self.model_type)(self.config)
68 except KeyError:
---> 69 raise NotImplementedError(
70 f"Tried to use ORTOptimizer for the model type {self.model_type}, but it is not available yet. Please open an issue"
71 " or submit a PR at https://github.com/huggingface/optimum."
72 )
NotImplementedError: Tried to use ORTOptimizer for the model type mpnet, but it is not available yet. Please open an issue or submit a PR at https://github.com/huggingface/optimum.
Your contribution
no
The text was updated successfully, but these errors were encountered:
Feature request
Support MLPNet in:
Motivation
I would like to optimize the inference of PatentSBERTa model using optimum.onnxruntime When I run the following code, I get a NonImplementedError:
from optimum.onnxruntime import ORTModelForFeatureExtraction
from optimum.onnxruntime import ORTOptimizer
model_id = "AI-Growth-Lab/PatentSBERTa"
model = ORTModelForFeatureExtraction.from_pretrained(model_id, from_transformers=True)
optimizer = ORTOptimizer.from_pretrained(model)
KeyError Traceback (most recent call last)
File ~\NLP\lib\site-packages\optimum\onnxruntime\optimization.py:67, in ORTOptimizer.init(self, onnx_model_path, config, from_ortmodel)
66 try:
---> 67 self.normalized_config = NormalizedConfigManager.get_normalized_config_class(self.model_type)(self.config)
68 except KeyError:
File ~\NLP\lib\site-packages\optimum\utils\normalized_config.py:269, in NormalizedConfigManager.get_normalized_config_class(cls, model_type)
267 @classmethod
268 def get_normalized_config_class(cls, model_type: str) -> Type:
--> 269 cls.check_supported_model(model_type)
270 return cls._conf[model_type]
File ~\NLP\lib\site-packages\optimum\utils\normalized_config.py:262, in NormalizedConfigManager.check_supported_model(cls, model_type)
261 model_types = ", ".join(cls._conf.keys())
--> 262 raise KeyError(
263 f"{model_type} model type is not supported yet in NormalizedConfig. Only {model_types} are supported. "
264 f"If you want to support {model_type} please propose a PR or open up an issue."
265 )
KeyError: 'mpnet model type is not supported yet in NormalizedConfig. Only albert, bart, bert, blenderbot, blenderbot_small, bloom, falcon, camembert, codegen, cvt, deberta, deberta-v2, deit, distilbert, donut-swin, electra, encoder-decoder, gpt2, gpt-bigcode, gpt_neo, gpt_neox, llama, gptj, imagegpt, longt5, marian, mbart, mt5, m2m_100, nystromformer, opt, pegasus, pix2struct, poolformer, regnet, resnet, roberta, speech_to_text, splinter, t5, trocr, whisper, vision-encoder-decoder, vit, xlm-roberta, yolos, mpt, gpt_bigcode are supported. If you want to support mpnet please propose a PR or open up an issue.'
During handling of the above exception, another exception occurred:
NotImplementedError Traceback (most recent call last)
Cell In[4], line 5
2 from optimum.onnxruntime.configuration import OptimizationConfig
4 # create ORTOptimizer and define optimization configuration
----> 5 optimizer = ORTOptimizer.from_pretrained(model)
6 optimization_config = OptimizationConfig(optimization_level=99) # enable all optimizations
8 # apply the optimization configuration to the model
File ~\NLP\lib\site-packages\optimum\onnxruntime\optimization.py:125, in ORTOptimizer.from_pretrained(cls, model_or_path, file_names)
123 else:
124 raise ValueError(f"Unable to load the model from {model_or_path}.")
--> 125 return cls(onnx_model_path, config=config, from_ortmodel=from_ortmodel)
File ~\NLP\lib\site-packages\optimum\onnxruntime\optimization.py:69, in ORTOptimizer.init(self, onnx_model_path, config, from_ortmodel)
67 self.normalized_config = NormalizedConfigManager.get_normalized_config_class(self.model_type)(self.config)
68 except KeyError:
---> 69 raise NotImplementedError(
70 f"Tried to use ORTOptimizer for the model type {self.model_type}, but it is not available yet. Please open an issue"
71 " or submit a PR at https://github.com/huggingface/optimum."
72 )
NotImplementedError: Tried to use ORTOptimizer for the model type mpnet, but it is not available yet. Please open an issue or submit a PR at https://github.com/huggingface/optimum.
Your contribution
no
The text was updated successfully, but these errors were encountered: