{ "model_type": "babylang", "architectures": ["GPT"], "vocab_size": 50257, "block_size": 128, "n_layer": 6, "n_head": 6, "n_embd": 384, "dropout": 0.0, "bias": true, "auto_map": { "AutoConfig": "model.GPTConfig", "AutoModelForCausalLM": "model.GPT" }, "torch_dtype": "float32", "transformers_version": "4.42.0" }