Back to Text Generation Webui

oobabooga/textgen

Colab-TextGen-GPU.ipynb

4.7.32.3 KB
Original Source

oobabooga/textgen

After running both cells, a public gradio URL will appear at the bottom in around 10 minutes. You can optionally generate an API link.

python
#@title 1. Keep this tab alive to prevent Colab from disconnecting you { display-mode: "form" }

#@markdown Press play on the music player that will appear below:
%%html
<audio src="https://oobabooga.github.io/silence.m4a" controls>
python
#@title 2. Launch the web UI

#@markdown You can provide a direct GGUF link or a Hugging Face model URL.

import os
from pathlib import Path

os.environ.pop('PYTHONPATH', None)
os.environ.pop('MPLBACKEND', None)

if Path.cwd().name != 'textgen':
  print("\033[1;32;1m\n --> Installing the web UI. This will take a while, but after the initial setup, you can download and test as many models as you like.\033[0;37;0m\n")

  !git clone https://github.com/oobabooga/textgen
  %cd textgen

  # Install the project in an isolated environment
  !GPU_CHOICE=A \
  LAUNCH_AFTER_INSTALL=FALSE \
  INSTALL_EXTENSIONS=FALSE \
  ./start_linux.sh

# Parameters
model_url = "https://huggingface.co/unsloth/Qwen3.5-9B-GGUF/resolve/main/Qwen3.5-9B-Q4_K_M.gguf" #@param {type:"string"}
branch = "" #@param {type:"string"}
command_line_flags = "--load-in-4bit --use_double_quant" #@param {type:"string"}
api = False #@param {type:"boolean"}

if api:
  for param in ['--api', '--public-api']:
    if param not in command_line_flags:
      command_line_flags += f" {param}"

model_url = model_url.strip()
model_name = ""
if model_url != "":
    if not model_url.startswith('http'):
        model_url = 'https://huggingface.co/' + model_url

    branch = branch.strip()
    if '/resolve/' in model_url:
        model_name = model_url.split('?')[0].split('/')[-1]
        !python download-model.py {model_url}
    else:
        url_parts = model_url.strip('/').split('/')
        model_name = f"{url_parts[-2]}_{url_parts[-1]}"
        if branch not in ['', 'main']:
            model_name += f"_{branch}"
            !python download-model.py {model_url} --branch {branch}
        else:
            !python download-model.py {model_url}

# Start the web UI
cmd = f"./start_linux.sh {command_line_flags} --share"
if model_name != "":
    cmd += f" --model {model_name}"

!$cmd