| { | |
| "module": "keras_hub.src.models.gpt2.gpt2_tokenizer", | |
| "class_name": "GPT2Tokenizer", | |
| "config": { | |
| "name": "gpt2_tokenizer", | |
| "trainable": true, | |
| "dtype": { | |
| "module": "keras", | |
| "class_name": "DTypePolicy", | |
| "config": { | |
| "name": "int32" | |
| }, | |
| "registered_name": null | |
| }, | |
| "config_file": "tokenizer.json", | |
| "sequence_length": null, | |
| "add_prefix_space": false, | |
| "unsplittable_tokens": [ | |
| "<|endoftext|>" | |
| ] | |
| }, | |
| "registered_name": "keras_hub>GPT2Tokenizer" | |
| } |