Skip to content

Commit 6505abb

Browse files
committed
Reuse tokenizer tests for original t5-small
1 parent 32d8df4 commit 6505abb

File tree

1 file changed

+3
-9
lines changed

1 file changed

+3
-9
lines changed

tests/models/t5/tokenization.js

+3-9
Original file line numberDiff line numberDiff line change
@@ -237,13 +237,7 @@ export const TEST_CONFIG = {
237237
decoded: "Hey </s>. how are you</s>",
238238
},
239239
},
240-
"google-t5/t5-small": {
241-
// Test that tokenizer type can be inferred (`type: "Unigram"` is missing)
242-
SIMPLE: {
243-
text: BASE_TEST_STRINGS.SIMPLE,
244-
tokens: ["\u2581How", "\u2581are", "\u2581you", "\u2581doing", "?"],
245-
ids: [571, 33, 25, 692, 58, 1],
246-
decoded: "How are you doing?</s>",
247-
},
248-
}
249240
};
241+
242+
// Test that tokenizer type can be inferred (`type: "Unigram"` is missing)
243+
TEST_CONFIG["google-t5/t5-small"] = TEST_CONFIG["Xenova/t5-small"];

0 commit comments

Comments
 (0)