{ "cells": [ { "cell_type": "code", "execution_count": 1, "id": "1dabd320-903a-4f6c-9634-e8bd5993f90f", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "* Running on local URL: http://127.0.0.1:7860\n", "* To create a public link, set `share=True` in `launch()`.\n" ] }, { "data": { "text/html": [ "
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/plain": [] }, "execution_count": 1, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from transformers import AutoModelForCausalLM, AutoTokenizer\n", "import torch\n", "import gradio as gr\n", "\n", "# مدل و توکنایزر\n", "model_name = \"HuggingFaceTB/SmolLM2-360M-Instruct\"\n", "tokenizer = AutoTokenizer.from_pretrained(model_name)\n", "model = AutoModelForCausalLM.from_pretrained(model_name)\n", "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", "model.to(device)\n", "model.eval()\n", "\n", "# پیام‌های اولیه مکالمه\n", "def format_messages(user_prompt):\n", " system_message = \"<|im_start|>system\\n<|im_end|>\\n\"\n", " user_message = f\"<|im_start|>user\\n{user_prompt}<|im_end|>\\n\"\n", " assistant_prefix = \"<|im_start|>assistant\\n\"\n", " full_prompt = system_message + user_message + assistant_prefix\n", " return full_prompt\n", "\n", "# تابع چت\n", "def chat(user_input):\n", " prompt = format_messages(user_input)\n", " inputs = tokenizer(prompt, return_tensors=\"pt\").to(device)\n", "\n", " with torch.no_grad():\n", " outputs = model.generate(\n", " **inputs,\n", " max_new_tokens=256,\n", " do_sample=True,\n", " temperature=0.7,\n", " top_p=0.9,\n", " pad_token_id=tokenizer.eos_token_id\n", " )\n", "\n", " response = tokenizer.decode(outputs[0], skip_special_tokens=True)\n", " \n", " # پاسخ مدل بعد از آخرین <|im_start|>assistant\n", " if \"<|im_start|>assistant\" in response:\n", " response = response.split(\"<|im_start|>assistant\")[-1].strip()\n", " if \"<|im_end|>\" in response:\n", " response = response.split(\"<|im_end|>\")[0].strip()\n", " \n", " return response\n", "\n", "# رابط گرافیکی Gradio\n", "interface = gr.Interface(\n", " fn=chat,\n", " inputs=gr.Textbox(lines=3, placeholder=\"پیام خود را وارد کنید...\"),\n", " outputs=\"text\",\n", " title=\"💬 SmolLM2 Chatbot\",\n", " description=\"مدل سبک و مکالمه‌محور SmolLM2 از Hugging Face با فرمت قالب رسمی\"\n", ")\n", "\n", "interface.launch()\n" ] }, { "cell_type": "code", "execution_count": null, "id": "0f00bd7a-f925-4970-aeb6-96f1dcbad3c8", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python [conda env:base] *", "language": "python", "name": "conda-base-py" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.7" } }, "nbformat": 4, "nbformat_minor": 5 }