nielsr's picture
nielsr HF Staff
Add library name, pipeline tag
c65d5dd verified
|
raw
history blame
18.4 kB
metadata
library_name: transformers
pipeline_tag: text-generation

CodeI/O: Condensing Reasoning Patterns via Code Input-Output Prediction

📑 Paper    |    🌐 Project Page    |    💾 Released Resources    |    📦 Repo

This is the resource page of the CodeI/O collection on Huggingface, we highlight your currect position with a blue block.

Dataset

Dataset Link
CodeI/O-PythonEdu-Reasoning 🤗

Models

Base Model / Training CodeI/O CodeI/O++
Stage 1 Stage 2 Stage 1 Stage 2
Qwen 2.5 7B Coder 🤗 🤗 🤗 🤗
LLaMA 3.1 8B 🤗 🤗 🤗 🤗
DeepSeek v2 Lite Coder 🤗 🤗 🤗 🤗

Citation

If you find these resources helpful, please kindly cite as:

@article{li2025codeio,
  title={CodeI/O: Condensing Reasoning Patterns via Code Input-Output Prediction},
  author={Li, Junlong and Guo, Daya and Yang, Dejian and Xu, Runxin and Wu, Yu and He, Junxian},
  journal={arXiv preprint arXiv:2502.07316},
  year={2025}
}

File information

The repository contains the following file information:

Filename: tokenizer_config.json Content: { "add_bos_token": true, "add_eos_token": false, "bos_token": { "__type": "AddedToken", "content": "<\uff5cbegin\u2581of\u2581sentence\uff5c>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false }, "clean_up_tokenization_spaces": false, "eos_token": { "__type": "AddedToken", "content": "<\uff5cend\u2581of\u2581sentence\uff5c>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false }, "legacy": true, "model_max_length": 16384, "pad_token": { "__type": "AddedToken", "content": "<\uff5cend\u2581of\u2581sentence\uff5c>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false }, "sp_model_kwargs": {}, "unk_token": null, "tokenizer_class": "LlamaTokenizerFast", "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<\uff5cUser\uff5c>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<\uff5cAssistant\uff5c><\uff5ctool\u2581calls\u2581begin\uff5c><\uff5ctool\u2581call\u2581begin\uff5c>' + tool['type'] + '<\uff5ctool\u2581sep\uff5c>' + tool['function']['name'] + '\n' + 'json' + '\\n' + tool['function']['arguments'] + '\\n' + '' + '<\uff5ctool\u2581call\u2581end\uff5c>'}}{%- set ns.is_first = true -%}{%- else %}{{'\n' + '<\uff5ctool\u2581call\u2581begin\uff5c>' + tool['type'] + '<\uff5ctool\u2581sep\uff5c>' + tool['function']['name'] + '\n' + 'json' + '\\n' + tool['function']['arguments'] + '\\n' + '' + '<\uff5ctool\u2581call\u2581end\uff5c>'}}{{'<\uff5ctool\u2581calls\u2581end\uff5c><\uff5cend\u2581of\u2581sentence\uff5c>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<\uff5ctool\u2581outputs\u2581end\uff5c>' + message['content'] + '<\uff5cend\u2581of\u2581sentence\uff5c>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '' in content %}{% set content = content.split('')[-1] %}{% endif %}{{'<\uff5cAssistant\uff5c>' + content + '<\uff5cend\u2581of\u2581sentence\uff5c>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<\uff5ctool\u2581outputs\u2581begin\uff5c><\uff5ctool\u2581output\u2581begin\uff5c>' + message['content'] + '<\uff5ctool\u2581output\u2581end\uff5c>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\n<\uff5ctool\u2581output\u2581begin\uff5c>' + message['content'] + '<\uff5ctool\u2581output\u2581end\uff5c>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<\uff5ctool\u2581outputs\u2581end\uff5c>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<\uff5cAssistant\uff5c>'}}{% endif %}" }

Filename: generation_config.json Content: { "_from_model_config": true, "bos_token_id": 32000, "eos_token_id": 32001, "transformers_version": "4.34.1" }

Filename: config.json Content: { "architectures": [ "LlamaForCausalLM" ], "bos_token_id": 32000, "eos_token_id": 32001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "llama", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 100000, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.34.1", "use_cache": true, "vocab_size": 32256 }

Filename: tokenizer.json Content: "Content of the file is larger than 50 KB, too long to display."

Filename: model.safetensors.index.json Content: { "metadata": { "total_size": 16060522496 }, "weight_map": { "lm_head.weight": "model-00002-of-00002.safetensors", "model.embed_tokens.weight": "model-00001-of-00002.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00001-of-000002.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00001-of-000002.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.13.input_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00001-of-000002.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00001-of-000002.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00001-of-000002.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00001-of-000002.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.14.input_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00001-of-000002.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00001-of-000002.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00001-of-000002.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00001-of-000002.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.15.input_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00001-of-000002.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00001-of-000002.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00001-of-000002.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00001-of-000002.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.16.input_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00001-of-000002.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00001-of-000002.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00001-of-000002.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00001-of-000002.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.17.input_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00001-of-000002.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00001-of-000002.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00001-of-000002.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00001-of-000002.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.18.input_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00001-of-000002.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00001-of-000002.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00001-of-000002.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00001-of-000002.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.19.input_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00001-of-000002.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00001-of-000002.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00001-of-000002.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00001-of-000002.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.2.input_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00001-of-000002.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00001-of-000002.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00001-of-000002.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00001-of-000002.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00001-of-000002.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00001-of-000002.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00001-of-000002.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00001-of-000002.safetensors", "model.layers.20.input_layernorm.weight": "model-00002-of-000002.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00002-of-000002.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00002-of-000002.safetensors",