File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
neural_compressor/torch/quantization Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -85,8 +85,9 @@ class TorchBaseConfig(BaseConfig):
8585
8686 # re-write func _get_op_name_op_type_config to fallback op_type with string
8787 # because there are some special op_types for IPEX backend: `Linear&Relu`, `Linear&add`, ...
88- def __init__ (self , white_list ):
88+ def __init__ (self , white_list = DEFAULT_WHITE_LIST ):
8989 super ().__init__ (white_list )
90+ self .params_list = self .__class__ ._generate_params_list ()
9091 self .non_tunable_params : List [str ] = ["white_list" ]
9192
9293 def _get_op_name_op_type_config (self ):
@@ -966,7 +967,6 @@ def __init__(
966967 output_dir (str): The output directory for temporary files (default is "./temp_auto_round").
967968 """
968969 super ().__init__ (white_list = white_list )
969- self .params_list = self .__class__ ._generate_params_list ()
970970 # these two params are lists but not tunable
971971 self .non_tunable_params .extend (["options" , "shared_layers" ])
972972
You can’t perform that action at this time.
0 commit comments