Skip to content

Commit 91920a6

Browse files
authored
fix (#10585)
1 parent 477048c commit 91920a6

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

paddlenlp/transformers/tokenizer_utils_base.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2898,9 +2898,10 @@ def pad(
28982898
)
28992899

29002900
required_input = encoded_inputs[self.model_input_names[0]]
2901+
support_padding_side = "padding_side" in set(inspect.signature(self._pad).parameters.keys())
29012902
if required_input and not isinstance(required_input[0], (list, tuple)):
29022903
# some tokenizers might not have the padding_side attribute
2903-
if "padding_side" in set(inspect.signature(self._pad).parameters.keys()):
2904+
if support_padding_side:
29042905
encoded_inputs = self._pad(
29052906
encoded_inputs,
29062907
max_length=max_length,
@@ -2937,7 +2938,7 @@ def pad(
29372938
batch_outputs = {}
29382939
for i in range(batch_size):
29392940
inputs = dict((k, v[i]) for k, v in encoded_inputs.items())
2940-
if "padding_side" in set(inspect.signature(self._pad).parameters.keys()):
2941+
if support_padding_side:
29412942
outputs = self._pad(
29422943
inputs,
29432944
max_length=max_length,

0 commit comments

Comments
 (0)