File size: 659 Bytes
bee396b |
1 2 3 4 5 6 7 8 9 10 11 12 |
{
"agent_name": null,
"training_start": null,
"training_end": null,
"created_by": "DCAgent",
"base_model_name": "Qwen/Qwen3-8B",
"dataset_name": "penfever/GLM-4.6-swesmith-32ep-131k-nosumm-reasoning,penfever/GLM-4.6-nl2bash-verified-32ep-32k-reasoning,penfever/GLM-4.6-stackexchange-overflow-sandboxes-32eps-65k-reasoning,penfever/GLM-4.6-inferredbugs-32ep-65k-reasoning",
"training_type": "SFT",
"training_parameters": "https://huggingface.co/laion/swesmith-nl2bash-stack-bugsseq/blob/main/config.json",
"wandb_link": "https://wandb.ai/DCAgent/OpenThoughts-Agent/runs/swesmith-nl2bash-stack-bugsseq_Qwen3-8B",
"traces_location_s3": null
} |