Akshat1000 commited on
Commit
8c610d5
·
verified ·
1 Parent(s): 09831b3

Upload llllmmm.ipynb

Browse files
Files changed (1) hide show
  1. llllmmm.ipynb +144 -0
llllmmm.ipynb ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "id": "initial_id",
6
+ "metadata": {
7
+ "ExecuteTime": {
8
+ "end_time": "2024-07-01T11:08:00.253851Z",
9
+ "start_time": "2024-07-01T11:08:00.067738Z"
10
+ }
11
+ },
12
+ "source": [
13
+ "import torch\n",
14
+ "from transformers import AutoTokenizer, AutoModelForCausalLM,TrainingArguments\n",
15
+ "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")"
16
+ ],
17
+ "outputs": [],
18
+ "execution_count": 2
19
+ },
20
+ {
21
+ "cell_type": "code",
22
+ "id": "df5ce2489db64f8d",
23
+ "metadata": {
24
+ "ExecuteTime": {
25
+ "end_time": "2024-07-01T11:08:15.731889Z",
26
+ "start_time": "2024-07-01T11:08:03.640950Z"
27
+ }
28
+ },
29
+ "source": [
30
+ "model = AutoModelForCausalLM.from_pretrained(\"meta-llama/Llama-2-7b-hf\", device_map=\"auto\", torch_dtype = \"auto\",cache_dir=\"D:/a\" )\n",
31
+ "tokenizer = AutoTokenizer.from_pretrained(\"meta-llama/Llama-2-7b-hf\", cache_dir=\"D:/a\", use_fast=True)"
32
+ ],
33
+ "outputs": [
34
+ {
35
+ "data": {
36
+ "text/plain": [
37
+ "Loading checkpoint shards: 0%| | 0/2 [00:00<?, ?it/s]"
38
+ ],
39
+ "application/vnd.jupyter.widget-view+json": {
40
+ "version_major": 2,
41
+ "version_minor": 0,
42
+ "model_id": "475b288bdaa84157adbb7b99f89c7e5c"
43
+ }
44
+ },
45
+ "metadata": {},
46
+ "output_type": "display_data"
47
+ },
48
+ {
49
+ "name": "stderr",
50
+ "output_type": "stream",
51
+ "text": [
52
+ "WARNING:root:Some parameters are on the meta device device because they were offloaded to the disk.\n"
53
+ ]
54
+ }
55
+ ],
56
+ "execution_count": 3
57
+ },
58
+ {
59
+ "cell_type": "code",
60
+ "id": "7ce8ee88e61ac738",
61
+ "metadata": {
62
+ "ExecuteTime": {
63
+ "end_time": "2024-07-01T11:08:50.346303Z",
64
+ "start_time": "2024-07-01T11:08:50.336252Z"
65
+ }
66
+ },
67
+ "source": [
68
+ "def get_llama2_response(prompt, max_new_tokens=50):\n",
69
+ " inputs = tokenizer(prompt, return_tensors=\"pt\").to(device)\n",
70
+ " outputs = model.generate(**inputs, max_new_tokens=max_new_tokens, temperature= 0.00001)\n",
71
+ " response = tokenizer.decode(outputs(0), skip_special_tokens=True)\n",
72
+ " return response"
73
+ ],
74
+ "outputs": [],
75
+ "execution_count": 7
76
+ },
77
+ {
78
+ "metadata": {},
79
+ "cell_type": "code",
80
+ "outputs": [
81
+ {
82
+ "data": {
83
+ "text/plain": [
84
+ "'Q:how to find the llama A:The llama is a South American camelid, a domesticated species of the genus Lama. It is used for'"
85
+ ]
86
+ },
87
+ "execution_count": 9,
88
+ "metadata": {},
89
+ "output_type": "execute_result"
90
+ }
91
+ ],
92
+ "execution_count": 9,
93
+ "source": [
94
+ "prompt = \"Q:how to find the llama A:\"\n",
95
+ "get_llama2_response(prompt, max_new_tokens=25)"
96
+ ],
97
+ "id": "a6ad65ea85069793"
98
+ },
99
+ {
100
+ "metadata": {
101
+ "jupyter": {
102
+ "is_executing": true
103
+ }
104
+ },
105
+ "cell_type": "code",
106
+ "source": [
107
+ "prompt = \"Q:SQL query to extract data from Employee Table A:\"\n",
108
+ "get_llama2_response(prompt, max_new_tokens=25)"
109
+ ],
110
+ "id": "de9f0fcc6dc9fa82",
111
+ "outputs": [],
112
+ "execution_count": null
113
+ },
114
+ {
115
+ "metadata": {},
116
+ "cell_type": "code",
117
+ "source": "",
118
+ "id": "e7f9c4411f2d8b57",
119
+ "outputs": [],
120
+ "execution_count": null
121
+ }
122
+ ],
123
+ "metadata": {
124
+ "kernelspec": {
125
+ "display_name": "Python 3 (ipykernel)",
126
+ "language": "python",
127
+ "name": "python3"
128
+ },
129
+ "language_info": {
130
+ "codemirror_mode": {
131
+ "name": "ipython",
132
+ "version": 3
133
+ },
134
+ "file_extension": ".py",
135
+ "mimetype": "text/x-python",
136
+ "name": "python",
137
+ "nbconvert_exporter": "python",
138
+ "pygments_lexer": "ipython3",
139
+ "version": "3.12.4"
140
+ }
141
+ },
142
+ "nbformat": 4,
143
+ "nbformat_minor": 5
144
+ }