1717"""
1818
1919from ..tokenizer_utils_base import BatchEncoding
20- from .tokenizer import ErnieViLTokenizer
21- from .feature_extraction import ErnieViLFeatureExtractor
20+ from ..processing_utils import ProcessorMixin
2221
2322__all__ = ["ErnieViLProcessor" ]
2423
2524
26- class ErnieViLProcessor (object ):
25+ class ErnieViLProcessor (ProcessorMixin ):
2726 r"""
2827 Constructs a ErnieViL processor which wraps a ErnieViL feature extractor and a ErnieViL tokenizer into a single processor.
2928 [`ErnieViLProcessor`] offers all the functionalities of [`ErnieViLFeatureExtractor`] and [`ErnieViLTokenizer`]. See the
@@ -34,11 +33,12 @@ class ErnieViLProcessor(object):
3433 tokenizer ([`ErnieViLTokenizer`]):
3534 The tokenizer is a required input.
3635 """
36+ feature_extractor_class = "ErnieViLFeatureExtractor"
37+ tokenizer_class = "ErnieViLTokenizer"
3738
3839 def __init__ (self , feature_extractor , tokenizer ):
39- super ().__init__ ()
40- self .tokenizer = tokenizer
41- self .feature_extractor = feature_extractor
40+ super ().__init__ (feature_extractor , tokenizer )
41+ self .current_processor = self .feature_extractor
4242
4343 def __call__ (self , text = None , images = None , return_tensors = None , ** kwargs ):
4444 """
@@ -105,15 +105,3 @@ def decode(self, *args, **kwargs):
105105 the docstring of this method for more information.
106106 """
107107 return self .tokenizer .decode (* args , ** kwargs )
108-
109- # TODO junnyu find a better way from_pretrained and save_pretrained
110- @classmethod
111- def from_pretrained (cls , pretrained_model_name_or_path , * args , ** kwargs ):
112- tokenizer = ErnieViLTokenizer .from_pretrained (
113- pretrained_model_name_or_path , * args , ** kwargs )
114- feature_extractor = ErnieViLFeatureExtractor ()
115- return cls (feature_extractor , tokenizer )
116-
117- def save_pretrained (self , save_directory , filename_prefix = None , ** kwargs ):
118- return self .tokenizer .save_pretrained (save_directory , filename_prefix ,
119- ** kwargs )
0 commit comments