| { | |
| "module": "keras_nlp.src.models.gpt2.gpt2_causal_lm", | |
| "class_name": "GPT2CausalLM", | |
| "config": { | |
| "backbone": { | |
| "module": "keras_nlp.src.models.gpt2.gpt2_backbone", | |
| "class_name": "GPT2Backbone", | |
| "config": { | |
| "name": "gpt2_backbone", | |
| "trainable": true, | |
| "vocabulary_size": 50257, | |
| "num_layers": 12, | |
| "num_heads": 12, | |
| "hidden_dim": 768, | |
| "intermediate_dim": 3072, | |
| "dropout": 0.1, | |
| "max_sequence_length": 1024 | |
| }, | |
| "registered_name": "keras_nlp>GPT2Backbone" | |
| }, | |
| "preprocessor": { | |
| "module": "keras_nlp.src.models.gpt2.gpt2_causal_lm_preprocessor", | |
| "class_name": "GPT2CausalLMPreprocessor", | |
| "config": { | |
| "name": "gpt2_causal_lm_preprocessor", | |
| "trainable": true, | |
| "dtype": "float32", | |
| "tokenizer": { | |
| "module": "keras_nlp.src.models.gpt2.gpt2_tokenizer", | |
| "class_name": "GPT2Tokenizer", | |
| "config": { | |
| "name": "gpt2_tokenizer", | |
| "trainable": true, | |
| "dtype": "int32", | |
| "sequence_length": null, | |
| "add_prefix_space": false | |
| }, | |
| "registered_name": "keras_nlp>GPT2Tokenizer" | |
| }, | |
| "sequence_length": 1024, | |
| "add_start_token": true, | |
| "add_end_token": true | |
| }, | |
| "registered_name": "keras_nlp>GPT2CausalLMPreprocessor" | |
| }, | |
| "name": "gpt2_causal_lm" | |
| }, | |
| "registered_name": "keras_nlp>GPT2CausalLM" | |
| } |