{"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"name":"python","version":"3.10.12","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"},"kaggle":{"accelerator":"nvidiaTeslaT4","dataSources":[],"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"code","source":"# pip install torch_xla -q","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-23T07:29:06.028570Z","iopub.execute_input":"2024-12-23T07:29:06.028897Z","iopub.status.idle":"2024-12-23T07:29:06.040164Z","shell.execute_reply.started":"2024-12-23T07:29:06.028804Z","shell.execute_reply":"2024-12-23T07:29:06.039321Z"}},"outputs":[],"execution_count":null},{"cell_type":"code","source":"# ! nvidia-smi -L","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-23T07:29:06.046744Z","iopub.execute_input":"2024-12-23T07:29:06.047018Z","iopub.status.idle":"2024-12-23T07:29:06.051114Z","shell.execute_reply.started":"2024-12-23T07:29:06.046989Z","shell.execute_reply":"2024-12-23T07:29:06.050374Z"}},"outputs":[],"execution_count":null},{"cell_type":"code","source":"!pip install gradio diffusers gTTS together -q","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-25T00:17:36.027884Z","iopub.execute_input":"2024-12-25T00:17:36.028181Z","iopub.status.idle":"2024-12-25T00:17:50.860673Z","shell.execute_reply.started":"2024-12-25T00:17:36.028159Z","shell.execute_reply":"2024-12-25T00:17:50.859768Z"}},"outputs":[{"name":"stdout","text":"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m41.8/41.8 kB\u001b[0m \u001b[31m2.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.2/57.2 MB\u001b[0m \u001b[31m30.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m00:01\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m320.4/320.4 kB\u001b[0m \u001b[31m18.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.2/3.2 MB\u001b[0m \u001b[31m82.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.6/70.6 kB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m94.8/94.8 kB\u001b[0m \u001b[31m6.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m73.5/73.5 kB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.6/78.6 kB\u001b[0m \u001b[31m4.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m450.5/450.5 kB\u001b[0m \u001b[31m25.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m131.3/131.3 kB\u001b[0m \u001b[31m9.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m11.2/11.2 MB\u001b[0m \u001b[31m106.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m0:01\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m73.2/73.2 kB\u001b[0m \u001b[31m4.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.3/62.3 kB\u001b[0m \u001b[31m3.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m3.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n\u001b[?25h","output_type":"stream"}],"execution_count":1},{"cell_type":"code","source":"# import torch_xla.core.xla_model as xm\n# tpu = xm.xla_device()","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-23T07:29:20.075258Z","iopub.execute_input":"2024-12-23T07:29:20.075501Z","iopub.status.idle":"2024-12-23T07:29:20.078665Z","shell.execute_reply.started":"2024-12-23T07:29:20.075480Z","shell.execute_reply":"2024-12-23T07:29:20.078023Z"}},"outputs":[],"execution_count":null},{"cell_type":"code","source":"import torch\nfrom diffusers import TextToVideoSDPipeline, DiffusionPipeline\nfrom diffusers.utils import export_to_video\nimport gradio as gr\nfrom transformers import AutoTokenizer, AutoModelForCausalLM\nimport PIL\nfrom io import BytesIO\nfrom gtts import gTTS\nimport time\nfrom pydub import AudioSegment\nimport nltk\nfrom together import Together\nimport base64\n","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-25T00:17:53.042072Z","iopub.execute_input":"2024-12-25T00:17:53.042422Z","iopub.status.idle":"2024-12-25T00:18:10.158571Z","shell.execute_reply.started":"2024-12-25T00:17:53.042362Z","shell.execute_reply":"2024-12-25T00:18:10.157888Z"}},"outputs":[{"name":"stderr","text":"The cache for model files in Transformers v4.22.0 has been updated. Migrating your old cache. This is a one-time only operation. You can interrupt this and resume the migration later on by calling `transformers.utils.move_cache()`.\n","output_type":"stream"},{"output_type":"display_data","data":{"text/plain":"0it [00:00, ?it/s]","application/vnd.jupyter.widget-view+json":{"version_major":2,"version_minor":0,"model_id":"af4ba4ed88764d96a965454ca9f53a61"}},"metadata":{}}],"execution_count":2},{"cell_type":"code","source":"# tokenizer = AutoTokenizer.from_pretrained(\"MBZUAI/LaMini-GPT-774M\")\n# model0 = AutoModelForCausalLM.from_pretrained(\"MBZUAI/LaMini-GPT-774M\")\n\ntokenizer = AutoTokenizer.from_pretrained(\"ParisNeo/LLama-3.2-3B-Lollms-Finetuned-GGUF\")\nmodel0 = AutoModelForCausalLM.from_pretrained(\"ParisNeo/LLama-3.2-3B-Lollms-Finetuned-GGUF\", ignore_mismatched_sizes=True)\n\n# tokenizer = AutoTokenizer.from_pretrained(\"gokaygokay/tiny_llama_chat_description_to_prompt\", cache_dir = '/kaggle/working')\n# model0 = AutoModelForCausalLM.from_pretrained(\"gokaygokay/tiny_llama_chat_description_to_prompt\", ignore_mismatched_sizes=True, cache_dir = '/kaggle/working')\n# model0 = AutoModelForCausalLM.from_pretrained(\"MJ199999/gpt3_model\",ignore_mismatched_sizes=True, from_tf=True)\n","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-25T00:18:12.707855Z","iopub.execute_input":"2024-12-25T00:18:12.708546Z","iopub.status.idle":"2024-12-25T00:21:21.315758Z","shell.execute_reply.started":"2024-12-25T00:18:12.708513Z","shell.execute_reply":"2024-12-25T00:21:21.314829Z"}},"outputs":[{"output_type":"display_data","data":{"text/plain":"tokenizer_config.json: 0%| | 0.00/55.4k [00:00 {end_time_formatted}\\n\"\n srt_content += f\"{sentence}\\n\\n\"\n \n start_time = end_time # Update start time for the next sentence\n \n return audio_path, srt_content","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-25T00:22:58.860290Z","iopub.execute_input":"2024-12-25T00:22:58.860668Z","iopub.status.idle":"2024-12-25T00:22:59.006324Z","shell.execute_reply.started":"2024-12-25T00:22:58.860633Z","shell.execute_reply":"2024-12-25T00:22:59.005656Z"}},"outputs":[{"name":"stdout","text":"[nltk_data] Downloading package punkt to /usr/share/nltk_data...\n[nltk_data] Package punkt is already up-to-date!\n","output_type":"stream"}],"execution_count":11},{"cell_type":"code","source":"def tti(prompt, num_steps = 50, width = 512, heights = 512):\n # Load the pre-trained Stable Diffusion pipeline from Hugging Face\n pipe = DiffusionPipeline.from_pretrained(\"stabilityai/stable-diffusion-2-1\")\n #pipe.load_lora_weights(\"FradigmaDangerYT/dalle-e-mini\")\n \n # Move the pipeline to GPU (you can select the GPU with cuda:1 for the second GPU)\n device0 = torch.device(\"cuda:0\") # Use \"cuda:0\" for the first GPU, \"cuda:1\" for the second GPU\n pipe.to(device0)\n print(heights)\n # Generate an image\n image = pipe(prompt, num_inference_steps = num_steps, width = width, height = heights).images[0] # Generate image from the prompt\n return image\n","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-25T00:22:59.007053Z","iopub.execute_input":"2024-12-25T00:22:59.007324Z","iopub.status.idle":"2024-12-25T00:22:59.011814Z","shell.execute_reply.started":"2024-12-25T00:22:59.007302Z","shell.execute_reply":"2024-12-25T00:22:59.010754Z"}},"outputs":[],"execution_count":12},{"cell_type":"code","source":"prompt = 'A nice black lexus 570 car running on the snowy road.'\nimage = tti(prompt, num_steps = 25, width = 320, heights = 240)\n# image = PIL.Image.open(BytesIO(image))\nimage.save('result.png')\nimage.show()","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-25T00:22:59.012694Z","iopub.execute_input":"2024-12-25T00:22:59.012942Z","iopub.status.idle":"2024-12-25T00:23:23.213670Z","shell.execute_reply.started":"2024-12-25T00:22:59.012911Z","shell.execute_reply":"2024-12-25T00:23:23.212714Z"}},"outputs":[{"output_type":"display_data","data":{"text/plain":"model_index.json: 0%| | 0.00/537 [00:00, received 2.\n warnings.warn(\n/usr/local/lib/python3.10/dist-packages/gradio/utils.py:1007: UserWarning: Expected at least 5 arguments for function , received 2.\n warnings.warn(\n","output_type":"stream"},{"name":"stdout","text":"* Running on local URL: http://127.0.0.1:7860\nKaggle notebooks require sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n\n* Running on public URL: https://d2c6c018093abcee72.gradio.live\n\nThis share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from the terminal in the working directory to deploy to Hugging Face Spaces (https://huggingface.co/spaces)\n","output_type":"stream"},{"output_type":"display_data","data":{"text/plain":"","text/html":"
"},"metadata":{}},{"execution_count":14,"output_type":"execute_result","data":{"text/plain":""},"metadata":{}}],"execution_count":14},{"cell_type":"code","source":"demo.close()","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2024-12-23T08:38:27.489912Z","iopub.execute_input":"2024-12-23T08:38:27.490274Z","iopub.status.idle":"2024-12-23T08:38:27.609053Z","shell.execute_reply.started":"2024-12-23T08:38:27.490243Z","shell.execute_reply":"2024-12-23T08:38:27.607832Z"}},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null}]}