prompt-plus-plus / variables.py
baconnier's picture
Update variables.py
70fc62f verified
raw
history blame
1.39 kB
import json
import os
# Load JSON data from environment variable and parse it
json_data = os.getenv('PROMPT_TEMPLATES', '{}')
prompt_data = json.loads(json_data)
# Create dictionaries from the JSON data
metaprompt_explanations = {
key: data["description"]
for key, data in prompt_data.items()
}
# Generate markdown explanation
explanation_markdown = "".join([
f"- **{key}**: {value}\n"
for key, value in metaprompt_explanations.items()
])
# Define models list
models = [
# Meta-Llama models (all support system)
"meta-llama/Meta-Llama-3-70B-Instruct",
"meta-llama/Meta-Llama-3-8B-Instruct",
"meta-llama/Llama-3.1-70B-Instruct",
"meta-llama/Llama-3.1-8B-Instruct",
"meta-llama/Llama-3.2-3B-Instruct",
"meta-llama/Llama-3.2-1B-Instruct",
"meta-llama/Llama-2-13b-chat-hf",
"meta-llama/Llama-2-7b-chat-hf",
# HuggingFaceH4 models (support system)
"HuggingFaceH4/zephyr-7b-beta",
"HuggingFaceH4/zephyr-7b-alpha",
# Qwen models (support system)
"Qwen/Qwen2.5-72B-Instruct",
"Qwen/Qwen2.5-1.5B",
"microsoft/Phi-3.5-mini-instruct"
]
# Check for API token
api_token = os.getenv('HF_API_TOKEN')
if not api_token:
raise ValueError("HF_API_TOKEN not found in environment variables")
# Store templates in a dictionary
meta_prompts = {
key: data["template"]
for key, data in prompt_data.items()
}