Guinnessgshep commited on
Commit
b273812
·
0 Parent(s):

Duplicate from Guinnessgshep/playtime

Browse files
Files changed (9) hide show
  1. .gitattributes +35 -0
  2. INSTRUCTIONS.TXT +30 -0
  3. README.md +13 -0
  4. Run.py +7 -0
  5. app.py +12 -0
  6. cmd_linux.sh +19 -0
  7. start_linux.sh +64 -0
  8. update_linux.sh +26 -0
  9. webui.py +263 -0
.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
INSTRUCTIONS.TXT ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Thank you for downloading oobabooga/text-generation-webui.
2
+
3
+ # Installation
4
+
5
+ Run the "start" script. If all goes right, it should take care of
6
+ everything for you.
7
+
8
+ To launch the web UI in the future after it is already installed, run
9
+ the same "start" script.
10
+
11
+ # Updating the web UI
12
+
13
+ Run the "update" script. This will only install the updates, so it should
14
+ be much faster than the initial installation.
15
+
16
+ # Adding flags like --chat, --notebook, etc
17
+
18
+ Edit the "webui.py" script using a text editor and add the desired flags
19
+ to the CMD_FLAGS variable at the top. It should look like this:
20
+
21
+ CMD_FLAGS = '--chat'
22
+
23
+ For instance, to add the --api flag, change it to
24
+
25
+ CMD_FLAGS = '--chat --api'
26
+
27
+ # Running an interactive shell
28
+
29
+ To run an interactive shell in the miniconda environment, run the "cmd"
30
+ script. This is useful for installing additional requirements manually.
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Playtime
3
+ emoji: 😻
4
+ colorFrom: purple
5
+ colorTo: indigo
6
+ sdk: gradio
7
+ sdk_version: 3.35.2
8
+ app_file: app.py
9
+ pinned: false
10
+ duplicated_from: Guinnessgshep/playtime
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
Run.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ os.system('sudo apt update && sudo apt install full-upgrade && sudo apt-get install build-essentials')
2
+ os.system('pip install --upgrade pip')
3
+ os.system('pip install --upgrade gradio')
4
+ os.system('wget https://github.com/agrinman/tunnelto/releases/download/0.1.18/tunnelto-linux.tar.gz && tar -xf tunnelto-linux.tar.gz')
5
+ os.system('./tunnelto set-auth --key GeBoAW1CSmWK5SbfOXUmU8')
6
+ os.system('python download-model.py TheBloke_chronos-33b-GPTQ & python download-model.py TheBloke_Tulu-30B-SuperHOT-8K-GPTQ')
7
+ os.system('./tunnelto --subdomain ilovedogshit --port 7860 & ./start')
app.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ os.system('pip install --upgrade cmake')
3
+ os.system('pip install llama-cpp-python pyllamacpp')
4
+ os.system('CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir')
5
+ os.system('CMAKE_ARGS="-DLLAMA_OPENBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python==0.1.4')
6
+ os.system('apt update && apt install full-upgrade && apt-get install build-essentials')
7
+ os.system('pip install --upgrade pip')
8
+ os.system('pip install --upgrade gradio')
9
+ os.system('wget https://github.com/agrinman/tunnelto/releases/download/0.1.18/tunnelto-linux.tar.gz && tar -xf tunnelto-linux.tar.gz')
10
+ os.system('./tunnelto set-auth --key GeBoAW1CSmWK5SbfOXUmU8')
11
+ os.system('python download-model.py TheBloke_chronos-33b-GPTQ & python download-model.py TheBloke_Tulu-30B-SuperHOT-8K-GPTQ')
12
+ os.system('./tunnelto --subdomain ilovedogshit --port 7860 & bash start_linux.sh')
cmd_linux.sh ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ cd "$(dirname "${BASH_SOURCE[0]}")"
4
+
5
+ if [[ "$(pwd)" =~ " " ]]; then echo This script relies on Miniconda which can not be silently installed under a path with spaces. && exit; fi
6
+
7
+ # config
8
+ CONDA_ROOT_PREFIX="$(pwd)/installer_files/conda"
9
+ INSTALL_ENV_DIR="$(pwd)/installer_files/env"
10
+
11
+ # environment isolation
12
+ export PYTHONNOUSERSITE=1
13
+ unset PYTHONPATH
14
+ unset PYTHONHOME
15
+ export CUDA_PATH="$INSTALL_ENV_DIR"
16
+ export CUDA_HOME="$CUDA_PATH"
17
+
18
+ # activate env
19
+ bash --init-file <(echo "source \"$CONDA_ROOT_PREFIX/etc/profile.d/conda.sh\" && conda activate \"$INSTALL_ENV_DIR\"")
start_linux.sh ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ cd "$(dirname "${BASH_SOURCE[0]}")"
4
+
5
+ if [[ "$(pwd)" =~ " " ]]; then echo This script relies on Miniconda which can not be silently installed under a path with spaces. && exit; fi
6
+
7
+ OS_ARCH=$(uname -m)
8
+ case "${OS_ARCH}" in
9
+ x86_64*) OS_ARCH="x86_64";;
10
+ arm64*) OS_ARCH="aarch64";;
11
+ aarch64*) OS_ARCH="aarch64";;
12
+ *) echo "Unknown system architecture: $OS_ARCH! This script runs only on x86_64 or arm64" && exit
13
+ esac
14
+
15
+ # config
16
+ INSTALL_DIR="$(pwd)/installer_files"
17
+ CONDA_ROOT_PREFIX="$(pwd)/installer_files/conda"
18
+ INSTALL_ENV_DIR="$(pwd)/installer_files/env"
19
+ MINICONDA_DOWNLOAD_URL="https://repo.anaconda.com/miniconda/Miniconda3-py310_23.3.1-0-Linux-${OS_ARCH}.sh"
20
+ conda_exists="F"
21
+
22
+ # figure out whether git and conda needs to be installed
23
+ if "$CONDA_ROOT_PREFIX/bin/conda" --version &>/dev/null; then conda_exists="T"; fi
24
+
25
+ # (if necessary) install git and conda into a contained environment
26
+ # download miniconda
27
+ if [ "$conda_exists" == "F" ]; then
28
+ echo "Downloading Miniconda from $MINICONDA_DOWNLOAD_URL to $INSTALL_DIR/miniconda_installer.sh"
29
+
30
+ mkdir -p "$INSTALL_DIR"
31
+ curl -Lk "$MINICONDA_DOWNLOAD_URL" > "$INSTALL_DIR/miniconda_installer.sh"
32
+
33
+ chmod u+x "$INSTALL_DIR/miniconda_installer.sh"
34
+ bash "$INSTALL_DIR/miniconda_installer.sh" -b -p $CONDA_ROOT_PREFIX
35
+
36
+ # test the conda binary
37
+ echo "Miniconda version:"
38
+ "$CONDA_ROOT_PREFIX/bin/conda" --version
39
+ fi
40
+
41
+ # create the installer env
42
+ if [ ! -e "$INSTALL_ENV_DIR" ]; then
43
+ "$CONDA_ROOT_PREFIX/bin/conda" create -y -k --prefix "$INSTALL_ENV_DIR" python=3.10
44
+ fi
45
+
46
+ # check if conda environment was actually created
47
+ if [ ! -e "$INSTALL_ENV_DIR/bin/python" ]; then
48
+ echo "Conda environment is empty."
49
+ exit
50
+ fi
51
+
52
+ # environment isolation
53
+ export PYTHONNOUSERSITE=1
54
+ unset PYTHONPATH
55
+ unset PYTHONHOME
56
+ export CUDA_PATH="$INSTALL_ENV_DIR"
57
+ export CUDA_HOME="$CUDA_PATH"
58
+
59
+ # activate installer env
60
+ source "$CONDA_ROOT_PREFIX/etc/profile.d/conda.sh" # otherwise conda complains about 'shell not initialized' (needed when running in a script)
61
+ conda activate "$INSTALL_ENV_DIR"
62
+
63
+ # setup installer env
64
+ python webui.py
update_linux.sh ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ cd "$(dirname "${BASH_SOURCE[0]}")"
4
+
5
+ if [[ "$(pwd)" =~ " " ]]; then echo This script relies on Miniconda which can not be silently installed under a path with spaces. && exit; fi
6
+
7
+ # config
8
+ CONDA_ROOT_PREFIX="$(pwd)/installer_files/conda"
9
+ INSTALL_ENV_DIR="$(pwd)/installer_files/env"
10
+
11
+ # environment isolation
12
+ export PYTHONNOUSERSITE=1
13
+ unset PYTHONPATH
14
+ unset PYTHONHOME
15
+ export CUDA_PATH="$INSTALL_ENV_DIR"
16
+ export CUDA_HOME="$CUDA_PATH"
17
+
18
+ # activate installer env
19
+ source "$CONDA_ROOT_PREFIX/etc/profile.d/conda.sh" # otherwise conda complains about 'shell not initialized' (needed when running in a script)
20
+ conda activate "$INSTALL_ENV_DIR"
21
+
22
+ # update installer env
23
+ python webui.py --update
24
+
25
+ echo
26
+ echo "Done!"
webui.py ADDED
@@ -0,0 +1,263 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import glob
3
+ import os
4
+ import site
5
+ import subprocess
6
+ import sys
7
+
8
+ script_dir = os.getcwd()
9
+ conda_env_path = os.path.join(script_dir, "installer_files", "env")
10
+
11
+ # Use this to set your command-line flags. For the full list, see:
12
+ # https://github.com/oobabooga/text-generation-webui/#starting-the-web-ui
13
+ CMD_FLAGS = '--chat'
14
+
15
+
16
+ # Allows users to set flags in "OOBABOOGA_FLAGS" environment variable
17
+ if "OOBABOOGA_FLAGS" in os.environ:
18
+ CMD_FLAGS = os.environ["OOBABOOGA_FLAGS"]
19
+ print("The following flags have been taken from the environment variable 'OOBABOOGA_FLAGS':")
20
+ print(CMD_FLAGS)
21
+ print("To use the CMD_FLAGS Inside webui.py, unset 'OOBABOOGA_FLAGS'.\n")
22
+
23
+
24
+ def print_big_message(message):
25
+ message = message.strip()
26
+ lines = message.split('\n')
27
+ print("\n\n*******************************************************************")
28
+ for line in lines:
29
+ if line.strip() != '':
30
+ print("*", line)
31
+
32
+ print("*******************************************************************\n\n")
33
+
34
+
35
+ def run_cmd(cmd, assert_success=False, environment=False, capture_output=False, env=None):
36
+ # Use the conda environment
37
+ if environment:
38
+ if sys.platform.startswith("win"):
39
+ conda_bat_path = os.path.join(script_dir, "installer_files", "conda", "condabin", "conda.bat")
40
+ cmd = "\"" + conda_bat_path + "\" activate \"" + conda_env_path + "\" >nul && " + cmd
41
+ else:
42
+ conda_sh_path = os.path.join(script_dir, "installer_files", "conda", "etc", "profile.d", "conda.sh")
43
+ cmd = ". \"" + conda_sh_path + "\" && conda activate \"" + conda_env_path + "\" && " + cmd
44
+
45
+ # Run shell commands
46
+ result = subprocess.run(cmd, shell=True, capture_output=capture_output, env=env)
47
+
48
+ # Assert the command ran successfully
49
+ if assert_success and result.returncode != 0:
50
+ print("Command '" + cmd + "' failed with exit status code '" + str(result.returncode) + "'. Exiting...")
51
+ sys.exit()
52
+
53
+ return result
54
+
55
+
56
+ def check_env():
57
+ # If we have access to conda, we are probably in an environment
58
+ conda_exist = run_cmd("conda", environment=True, capture_output=True).returncode == 0
59
+ if not conda_exist:
60
+ print("Conda is not installed. Exiting...")
61
+ sys.exit()
62
+
63
+ # Ensure this is a new environment and not the base environment
64
+ if os.environ["CONDA_DEFAULT_ENV"] == "base":
65
+ print("Create an environment for this project and activate it. Exiting...")
66
+ sys.exit()
67
+
68
+
69
+ def install_dependencies():
70
+ # Automatically select NVIDIA
71
+ gpuchoice = "a"
72
+
73
+ if gpuchoice == "d":
74
+ print_big_message("Once the installation ends, make sure to open webui.py with a text editor\nand add the --cpu flag to CMD_FLAGS.")
75
+
76
+ # Install the version of PyTorch needed
77
+ if gpuchoice == "a":
78
+ run_cmd('conda install -y -k cuda ninja git -c nvidia/label/cuda-11.7.0 -c nvidia && python -m pip install torch==2.0.1+cu117 torchvision torchaudio --index-url https://download.pytorch.org/whl/cu117', assert_success=True, environment=True)
79
+ elif gpuchoice == "b":
80
+ print("AMD GPUs are not supported. Exiting...")
81
+ sys.exit()
82
+ elif gpuchoice == "c" or gpuchoice == "d":
83
+ run_cmd("conda install -y -k ninja git && python -m pip install torch torchvision torchaudio", assert_success=True, environment=True)
84
+ else:
85
+ print("Invalid choice. Exiting...")
86
+ sys.exit()
87
+
88
+ # Clone webui to our computer
89
+ run_cmd("git clone https://github.com/oobabooga/text-generation-webui.git", assert_success=True, environment=True)
90
+
91
+ # Install the webui dependencies
92
+ update_dependencies()
93
+
94
+ def update_dependencies():
95
+ os.chdir("text-generation-webui")
96
+ run_cmd("git pull", assert_success=True, environment=True)
97
+
98
+ # Workaround for git+ packages not updating properly
99
+ with open("requirements.txt") as f:
100
+ requirements = f.read().splitlines()
101
+ git_requirements = [req for req in requirements if req.startswith("git+")]
102
+
103
+ # Loop through each "git+" requirement and uninstall it
104
+ for req in git_requirements:
105
+ # Extract the package name from the "git+" requirement
106
+ url = req.replace("git+", "")
107
+ package_name = url.split("/")[-1].split("@")[0]
108
+
109
+ # Uninstall the package using pip
110
+ run_cmd("python -m pip uninstall -y " + package_name, environment=True)
111
+ print(f"Uninstalled {package_name}")
112
+
113
+ # Installs/Updates dependencies from all requirements.txt
114
+ run_cmd("python -m pip install -r requirements.txt --upgrade", assert_success=True, environment=True)
115
+ extensions = next(os.walk("extensions"))[1]
116
+ for extension in extensions:
117
+ if extension in ['superbooga']: # No wheels available for dependencies
118
+ continue
119
+
120
+ extension_req_path = os.path.join("extensions", extension, "requirements.txt")
121
+ if os.path.exists(extension_req_path):
122
+ run_cmd("python -m pip install -r " + extension_req_path + " --upgrade", assert_success=True, environment=True)
123
+
124
+ # Latest bitsandbytes requires minimum compute 7.0
125
+ # nvcc_device_query = "__nvcc_device_query" if not sys.platform.startswith("win") else "__nvcc_device_query.exe"
126
+ # min_compute = 70
127
+ # compute_array = run_cmd(os.path.join(conda_env_path, "bin", nvcc_device_query), environment=True, capture_output=True)
128
+ # old_bnb = "bitsandbytes==0.38.1" if not sys.platform.startswith("win") else "https://github.com/jllllll/bitsandbytes-windows-webui/raw/main/bitsandbytes-0.38.1-py3-none-any.whl"
129
+ # if compute_array.returncode == 0 and not any(int(compute) >= min_compute for compute in compute_array.stdout.decode('utf-8').split(',')):
130
+ # old_bnb_install = run_cmd(f"python -m pip install {old_bnb} --force-reinstall --no-deps", environment=True).returncode == 0
131
+ # message = "\n\nWARNING: GPU with compute < 7.0 detected!\n"
132
+ # if old_bnb_install:
133
+ # message += "Older version of bitsandbytes has been installed to maintain compatibility.\n"
134
+ # message += "You will be unable to use --load-in-4bit!\n"
135
+ # else:
136
+ # message += "You will be unable to use --load-in-8bit until you install bitsandbytes 0.38.1!\n"
137
+
138
+ # print_big_message(message)
139
+
140
+ # The following dependencies are for CUDA, not CPU
141
+ # Parse output of 'pip show torch' to determine torch version
142
+ torver_cmd = run_cmd("python -m pip show torch", assert_success=True, environment=True, capture_output=True)
143
+ torver = [v.split()[1] for v in torver_cmd.stdout.decode('utf-8').splitlines() if 'Version:' in v][0]
144
+
145
+ # Check for '+cu' in version string to determine if torch uses CUDA or not check for pytorch-cuda as well for backwards compatibility
146
+ if '+cu' not in torver and run_cmd("conda list -f pytorch-cuda | grep pytorch-cuda", environment=True, capture_output=True).returncode == 1:
147
+ return
148
+
149
+ # Finds the path to your dependencies
150
+ for sitedir in site.getsitepackages():
151
+ if "site-packages" in sitedir:
152
+ site_packages_path = sitedir
153
+ break
154
+
155
+ # This path is critical to installing the following dependencies
156
+ if site_packages_path is None:
157
+ print("Could not find the path to your Python packages. Exiting...")
158
+ sys.exit()
159
+
160
+ # Fix a bitsandbytes compatibility issue with Linux
161
+ # if sys.platform.startswith("linux"):
162
+ # shutil.copy(os.path.join(site_packages_path, "bitsandbytes", "libbitsandbytes_cuda117.so"), os.path.join(site_packages_path, "bitsandbytes", "libbitsandbytes_cpu.so"))
163
+
164
+ if not os.path.exists("repositories/"):
165
+ os.mkdir("repositories")
166
+
167
+ os.chdir("repositories")
168
+
169
+ # Install or update exllama as needed
170
+ if not os.path.exists("exllama/"):
171
+ run_cmd("git clone https://github.com/turboderp/exllama.git", environment=True)
172
+ else:
173
+ os.chdir("exllama")
174
+ run_cmd("git pull", environment=True)
175
+ os.chdir("..")
176
+
177
+ # Fix build issue with exllama in Linux/WSL
178
+ if sys.platform.startswith("linux") and not os.path.exists(f"{conda_env_path}/lib64"):
179
+ run_cmd(f'ln -s "{conda_env_path}/lib" "{conda_env_path}/lib64"', environment=True)
180
+
181
+ # Install GPTQ-for-LLaMa which enables 4bit CUDA quantization
182
+ if not os.path.exists("GPTQ-for-LLaMa/"):
183
+ run_cmd("git clone https://github.com/oobabooga/GPTQ-for-LLaMa.git -b cuda", assert_success=True, environment=True)
184
+
185
+ # Install GPTQ-for-LLaMa dependencies
186
+ os.chdir("GPTQ-for-LLaMa")
187
+ run_cmd("git pull", assert_success=True, environment=True)
188
+
189
+ # On some Linux distributions, g++ may not exist or be the wrong version to compile GPTQ-for-LLaMa
190
+ if sys.platform.startswith("linux"):
191
+ gxx_output = run_cmd("g++ -dumpfullversion -dumpversion", environment=True, capture_output=True)
192
+ if gxx_output.returncode != 0 or int(gxx_output.stdout.strip().split(b".")[0]) > 11:
193
+ # Install the correct version of g++
194
+ run_cmd("conda install -y -k gxx_linux-64=11.2.0", environment=True)
195
+
196
+ # Compile and install GPTQ-for-LLaMa
197
+ if os.path.exists('setup_cuda.py'):
198
+ os.rename("setup_cuda.py", "setup.py")
199
+
200
+ run_cmd("python -m pip install .", environment=True)
201
+
202
+ # Wheel installation can fail while in the build directory of a package with the same name
203
+ os.chdir("..")
204
+
205
+ # If the path does not exist, then the install failed
206
+ quant_cuda_path_regex = os.path.join(site_packages_path, "quant_cuda*/")
207
+ if not glob.glob(quant_cuda_path_regex):
208
+ # Attempt installation via alternative, Windows/Linux-specific method
209
+ if sys.platform.startswith("win") or sys.platform.startswith("linux"):
210
+ print_big_message("WARNING: GPTQ-for-LLaMa compilation failed, but this is FINE and can be ignored!\nThe installer will proceed to install a pre-compiled wheel.")
211
+ url = "https://github.com/jllllll/GPTQ-for-LLaMa-Wheels/raw/main/quant_cuda-0.0.0-cp310-cp310-win_amd64.whl"
212
+ if sys.platform.startswith("linux"):
213
+ url = "https://github.com/jllllll/GPTQ-for-LLaMa-Wheels/raw/Linux-x64/quant_cuda-0.0.0-cp310-cp310-linux_x86_64.whl"
214
+
215
+ result = run_cmd("python -m pip install " + url, environment=True)
216
+ if result.returncode == 0:
217
+ print("Wheel installation success!")
218
+ else:
219
+ print("ERROR: GPTQ wheel installation failed. You will not be able to use GPTQ-based models.")
220
+ else:
221
+ print("ERROR: GPTQ CUDA kernel compilation failed.")
222
+ print("You will not be able to use GPTQ-based models.")
223
+
224
+ print("Continuing with install..")
225
+
226
+
227
+ def download_model():
228
+ os.chdir("text-generation-webui")
229
+ run_cmd("python download-model.py", environment=True)
230
+
231
+
232
+ def launch_webui():
233
+ os.chdir("text-generation-webui")
234
+ run_cmd(f"python server.py {CMD_FLAGS}", environment=True)
235
+
236
+
237
+ if __name__ == "__main__":
238
+ # Verifies we are in a conda environment
239
+ check_env()
240
+
241
+ parser = argparse.ArgumentParser()
242
+ parser.add_argument('--update', action='store_true', help='Update the web UI.')
243
+ args = parser.parse_args()
244
+
245
+ if args.update:
246
+ update_dependencies()
247
+ else:
248
+ # If webui has already been installed, skip and run
249
+ if not os.path.exists("text-generation-webui/"):
250
+ install_dependencies()
251
+ os.chdir(script_dir)
252
+
253
+ # Check if a model has been downloaded yet
254
+ if len([item for item in glob.glob('text-generation-webui/models/*') if not item.endswith(('.txt', '.yaml'))]) == 0:
255
+ print_big_message("WARNING: You haven't downloaded any model yet.\nOnce the web UI launches, head over to the bottom of the \"Model\" tab and download one.")
256
+
257
+ # Workaround for llama-cpp-python loading paths in CUDA env vars even if they do not exist
258
+ conda_path_bin = os.path.join(conda_env_path, "bin")
259
+ if not os.path.exists(conda_path_bin):
260
+ os.mkdir(conda_path_bin)
261
+
262
+ # Launch the webui
263
+ launch_webui()