{ "auto_mapping": null, "base_model_name_or_path": "meta-llama/Llama-2-7b-chat-hf", "inference_mode": true, "num_attention_heads": 32, "num_layers": 32, "num_transformer_submodules": 1, "num_virtual_tokens": 12, "peft_type": "PROMPT_TUNING", "prompt_tuning_init": "TEXT", "prompt_tuning_init_text": "What are the important entities in this document? What are the important dates in this document? What events are happening in this document? What is the result of these events? Please answer the above questions:", "revision": null, "task_type": "CAUSAL_LM", "token_dim": 4096, "tokenizer_kwargs": null, "tokenizer_name_or_path": "meta-llama/Llama-2-7b-chat-hf" }