Running miqu on HF Spaces

#20
by Empereur-Pirate - opened

Hello, I'm trying to run miku on my HF Space. So far i managed to link the model to my space, but even with the dockerfile i could not install all the needed libraries, in my case "sentencepiece". I would appreciate it if you had a suggestion.

main.py :

from fastapi import FastAPI, Request, Depends
from fastapi.responses import FileResponse, JSONResponse
from fastapi.staticfiles import StaticFiles
from transformers import pipeline
from pydantic import BaseModel
from typing import Optional, Any
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TextStreamer, GenerationConfig
import os
from huggingface_hub import login

# Check whether we are executing inside a Hugging Face Space
SPACE_NAME = os.getenv("SPACE_NAME", default=None)
if SPACE_NAME is not None:
    print(f"Running inside {SPACE_NAME} Space.")
    try:
        # Try to auto-login using the Space's environment variables
        login(automatically=True)
    except Exception as e:
        print(f"Failed to auto-login ({str(e)}). Manually check the HF_ACCESS_TOKEN environment variable.")
        sys.exit(1)

try:
    HUGGINGFACE_TOKEN = os.environ['HF_ACCESS_TOKEN']
except KeyError:
    print('The environment variable "HF_ACCESS_TOKEN" is not found. Please configure it correctly in your Space.')
    sys.exit(1)

# Packages and model loading
import torch
base_model_id = "152334H/miqu-1-70b-sf"
bnb_config = BitsAndBytesConfig(
    load_in_4bit=True,
    bnb_4bit_use_double_quant=True,
    bnb_4bit_quant_type="nf4",
    bnb_4bit_compute_dtype=torch.bfloat16
)

base_model = AutoModelForCausalLM.from_pretrained(
    base_model_id,
    quantization_config=bnb_config,
    device_map="auto",
    trust_remote_code=True,
)

# Tokenizer loading
eval_tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-70b-hf", add_bos_token=True, trust_remote_code=True, use_auth_token=True)

# Streamer
streamer = TextStreamer(eval_tokenizer)

# App definition
app = FastAPI()

# Helper function to read raw request bodies
async def parse_raw(request: Request):
    return await request.body()

# Generate text
def generate_text(prompt: str) -> str:
    model_input = eval_tokenizer(prompt, return_tensors="pt").to("cuda")

    base_model.eval()
    with torch.no_grad():
        generated_sequences = base_model.generate(
            **model_input,
            max_new_tokens=4096,
            repetition_penalty=1.1,
            do_sample=True,
            temperature=1,
            streamer=streamer,
        )

    return eval_tokenizer.decode(generated_sequences[0], skip_special_tokens=True)

# Route for generating text


@app

	.post("/generate_text")
async def generate_text_route(data: BaseModel = Depends(parse_raw)):
    input_text = data.raw.decode("utf-8")
    if not input_text or len(input_text) <= 0:
        return JSONResponse({"error": "Empty input received."}, status_code=400)

    return {"output": generate_text(input_text)}

# Mount static files
app.mount("/static", StaticFiles(directory="static"), name="static")

requirements.txt:

fastapi==0.74.
requests==2.27.
sentencepiece==0.1.
torch==1.11.
transformers==4.
uvicorn[standard]==0.17.
python-dotenv==0.21.
bitsandbytes==0.31.8
accelerate==0.27.

Dockerfile :

FROM python:3.9

# Install dependencies for building SentencePiece
RUN apt-get update && \
    apt-get install -y protobuf-compiler libprotobuf-dev cmake zlib1g-dev git wget && \
    rm -rf /var/lib/apt/lists/*

WORKDIR /code
COPY requirements.txt /code/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt

# Copy only the source code after installing the dependencies
COPY . /code

# Upgrade bitsandbytes
RUN pip install --no-cache-dir --force-reinstall --upgrade bitsandbytes

# Install accelerate
RUN pip install accelerate

# Set up a new user named "user" with user ID 1000
RUN useradd -m -u 1000 user

# Switch to the "user" user
USER user

# Set home to the user's home directory
ENV HOME=/home/user \
    PATH=/home/user/.local/bin:$PATH

# Set the working directory to the user's home directory
WORKDIR $HOME/app

# Copy the current directory contents into the container at $HOME/app setting the owner to the user
COPY --chown=user . $HOME/app

CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]

Rapport d'erreur :

Build error
Build failed with exit code: 1
Build logs:

===== Build Queued at 2024-02-23 17:44:53 / Commit SHA: 8952d93 =====

--> FROM docker.io/library/python:3.9@sha256:383d072c4b840507f25453c710969aa1e1d13e47731f294a8a8890e53f834bdf
DONE 0.0s

DONE 0.0s

DONE 3.0s

DONE 6.1s

DONE 6.2s

--> RUN apt-get update &&     apt-get install -y protobuf-compiler libprotobuf-dev cmake zlib1g-dev git wget &&     rm -rf /var/lib/apt/lists/*
Get:1 http://deb.debian.org/debian bookworm InRelease [151 kB]
Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB]
Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB]
Get:4 http://deb.debian.org/debian bookworm/main amd64 Packages [8786 kB]
Get:5 http://deb.debian.org/debian bookworm-updates/main amd64 Packages [12.7 kB]
Get:6 http://deb.debian.org/debian-security bookworm-security/main amd64 Packages [143 kB]
Fetched 9196 kB in 1s (8904 kB/s)
Reading package lists...
Reading package lists...
Building dependency tree...
Reading state information...
zlib1g-dev is already the newest version (1:1.2.13.dfsg-1).
git is already the newest version (1:2.39.2-1.1).
wget is already the newest version (1.21.3-1+b2).
The following additional packages will be installed:
  cmake-data libarchive13 libjsoncpp25 libprotobuf-lite32 libprotobuf32
  libprotoc32 librhash0 libuv1
Suggested packages:
  cmake-doc cmake-format elpa-cmake-mode ninja-build lrzip protobuf-mode-el
The following NEW packages will be installed:
  cmake cmake-data libarchive13 libjsoncpp25 libprotobuf-dev
  libprotobuf-lite32 libprotobuf32 libprotoc32 librhash0 libuv1
  protobuf-compiler
0 upgraded, 11 newly installed, 0 to remove and 15 not upgraded.
Need to get 14.8 MB of archives.
After this operation, 61.6 MB of additional disk space will be used.
Get:1 http://deb.debian.org/debian bookworm/main amd64 libarchive13 amd64 3.6.2-1 [343 kB]
Get:2 http://deb.debian.org/debian bookworm/main amd64 libjsoncpp25 amd64 1.9.5-4 [78.6 kB]
Get:3 http://deb.debian.org/debian bookworm/main amd64 librhash0 amd64 1.4.3-3 [134 kB]
Get:4 http://deb.debian.org/debian bookworm/main amd64 libuv1 amd64 1.44.2-1 [140 kB]
Get:5 http://deb.debian.org/debian bookworm/main amd64 cmake-data all 3.25.1-1 [2026 kB]
Get:6 http://deb.debian.org/debian bookworm/main amd64 cmake amd64 3.25.1-1 [8692 kB]
Get:7 http://deb.debian.org/debian bookworm/main amd64 libprotobuf32 amd64 3.21.12-3 [932 kB]
Get:8 http://deb.debian.org/debian bookworm/main amd64 libprotobuf-lite32 amd64 3.21.12-3 [261 kB]
Get:9 http://deb.debian.org/debian bookworm/main amd64 libprotobuf-dev amd64 3.21.12-3 [1283 kB]
Get:10 http://deb.debian.org/debian bookworm/main amd64 libprotoc32 amd64 3.21.12-3 [829 kB]
Get:11 http://deb.debian.org/debian bookworm/main amd64 protobuf-compiler amd64 3.21.12-3 [83.9 kB]
debconf: delaying package configuration, since apt-utils is not installed
Fetched 14.8 MB in 0s (147 MB/s)
Selecting previously unselected package libarchive13:amd64.
(Reading database ... 
(Reading database ... 5%
(Reading database ... 10%
(Reading database ... 15%
(Reading database ... 20%
(Reading database ... 25%
(Reading database ... 30%
(Reading database ... 35%
(Reading database ... 40%
(Reading database ... 45%
(Reading database ... 50%
(Reading database ... 55%
(Reading database ... 60%
(Reading database ... 65%
(Reading database ... 70%
(Reading database ... 75%
(Reading database ... 80%
(Reading database ... 85%
(Reading database ... 90%
(Reading database ... 95%
(Reading database ... 100%
(Reading database ... 23974 files and directories currently installed.)
Preparing to unpack .../00-libarchive13_3.6.2-1_amd64.deb ...
Unpacking libarchive13:amd64 (3.6.2-1) ...
Selecting previously unselected package libjsoncpp25:amd64.
Preparing to unpack .../01-libjsoncpp25_1.9.5-4_amd64.deb ...
Unpacking libjsoncpp25:amd64 (1.9.5-4) ...
Selecting previously unselected package librhash0:amd64.
Preparing to unpack .../02-librhash0_1.4.3-3_amd64.deb ...
Unpacking librhash0:amd64 (1.4.3-3) ...
Selecting previously unselected package libuv1:amd64.
Preparing to unpack .../03-libuv1_1.44.2-1_amd64.deb ...
Unpacking libuv1:amd64 (1.44.2-1) ...
Selecting previously unselected package cmake-data.
Preparing to unpack .../04-cmake-data_3.25.1-1_all.deb ...
Unpacking cmake-data (3.25.1-1) ...
Selecting previously unselected package cmake.
Preparing to unpack .../05-cmake_3.25.1-1_amd64.deb ...
Unpacking cmake (3.25.1-1) ...
Selecting previously unselected package libprotobuf32:amd64.
Preparing to unpack .../06-libprotobuf32_3.21.12-3_amd64.deb ...
Unpacking libprotobuf32:amd64 (3.21.12-3) ...
Selecting previously unselected package libprotobuf-lite32:amd64.
Preparing to unpack .../07-libprotobuf-lite32_3.21.12-3_amd64.deb ...
Unpacking libprotobuf-lite32:amd64 (3.21.12-3) ...
Selecting previously unselected package libprotobuf-dev:amd64.
Preparing to unpack .../08-libprotobuf-dev_3.21.12-3_amd64.deb ...
Unpacking libprotobuf-dev:amd64 (3.21.12-3) ...
Selecting previously unselected package libprotoc32:amd64.
Preparing to unpack .../09-libprotoc32_3.21.12-3_amd64.deb ...
Unpacking libprotoc32:amd64 (3.21.12-3) ...
Selecting previously unselected package protobuf-compiler.
Preparing to unpack .../10-protobuf-compiler_3.21.12-3_amd64.deb ...
Unpacking protobuf-compiler (3.21.12-3) ...
Setting up libarchive13:amd64 (3.6.2-1) ...
Setting up libuv1:amd64 (1.44.2-1) ...
Setting up libjsoncpp25:amd64 (1.9.5-4) ...
Setting up libprotobuf32:amd64 (3.21.12-3) ...
Setting up librhash0:amd64 (1.4.3-3) ...
Setting up libprotobuf-lite32:amd64 (3.21.12-3) ...
Setting up cmake-data (3.25.1-1) ...
Setting up libprotoc32:amd64 (3.21.12-3) ...
Setting up protobuf-compiler (3.21.12-3) ...
Setting up libprotobuf-dev:amd64 (3.21.12-3) ...
Setting up cmake (3.25.1-1) ...
Processing triggers for libc-bin (2.36-9+deb12u4) ...
DONE 3.9s

--> WORKDIR /code
DONE 0.0s

--> COPY requirements.txt /code/requirements.txt
DONE 0.0s

--> RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
Collecting fastapi==0.74.
  Downloading fastapi-0.74.0-py3-none-any.whl (53 kB)
     ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 53.6/53.6 kB 8.0 MB/s eta 0:00:00
Collecting requests==2.27.
  Downloading requests-2.27.0-py2.py3-none-any.whl (63 kB)
     ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 63.1/63.1 kB 73.8 MB/s eta 0:00:00
Collecting sentencepiece==0.1.
  Downloading sentencepiece-0.1.0.tar.gz (492 kB)
     ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 492.6/492.6 kB 163.8 MB/s eta 0:00:00
  Preparing metadata (setup.py): started
  Preparing metadata (setup.py): finished with status 'error'
  error: subprocess-exited-with-error
  
  Γ— python setup.py egg_info did not run successfully.
  β”‚ exit code: 1
  ╰─> [5 lines of output]
      Package sentencepiece was not found in the pkg-config search path.
      Perhaps you should add the directory containing `sentencepiece.pc'
      to the PKG_CONFIG_PATH environment variable
      Package 'sentencepiece', required by 'virtual:world', not found
      Failed to find sentencepiece pkgconfig
      [end of output]
  
  note: This error originates from a subprocess, and is likely not a problem with pip.
error: metadata-generation-failed

Γ— Encountered error while generating package metadata.
╰─> See above for output.

note: This is an issue with the package mentioned above, not pip.
hint: See above for details.

[notice] A new release of pip is available: 23.0.1 -> 24.0
[notice] To update, run: pip install --upgrade pip

--> ERROR: process "/bin/sh -c pip install --no-cache-dir --upgrade -r /code/requirements.txt" did not complete successfully: exit code: 1

Last issue was about version numbers in requested.txt. Now I chose another method : I try to use HF API from my Space without downloading the model each time. I debugged the code but nothing happens when i click the submit button. Please help !
scrpt.js :

function revealContent() {
  const loader = document.getElementById('loader');
  const textGenContainer = document.getElementById('text-gen-container');
  loader.classList.remove('hidden');
  textGenContainer.classList.remove('hidden');
}

// Call the function once the DOM is loaded
document.addEventListener('DOMContentLoaded', () => {
  revealContent();
});

const textGenForm = document.querySelector(".text-gen-form");

const generateText = async (text) => {
  const response = await fetch('/generate_text', {
    method: 'POST',
    headers: {
      'Content-Type': 'text/plain'
    },
    body: text
  });

  const data = await response.json();

  return data.output;
};

function handleFormSubmit(event) {
  event.preventDefault();
  console.log("Form submitted");
  const textGenInput = document.getElementById("text-gen-input");
  const textGenParagraph = document.querySelector(".text-gen-output");

  textGenParagraph.textContent = await generateText(textGenInput.value);
}

index.html :

<!DOCTYPE html>
<html lang="en">
<head>
  <meta charset="UTF-8">
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  <title>Text Generation using Miku Model</title>
  <link rel="stylesheet" href="/static/style.css">
</head>
<body>
  <div class="container">
    <h1>Text Generation using Miqu Model</h1>
    <p>Model: <a href="https://huggingface.co/152334H/miqu-1-70b-sf" target="_blank">152334H/miqu-1-70b-sf</a></p>
    <form id="text-gen-form" onsubmit="handleFormSubmit(event)">
      <label for="text-gen-input">Text prompt</label>
      <input type="text" id="text-gen-input" value="[INST] Eloquent high camp prose about a cute catgirl [/INST]">
      <button type="submit" id="text-gen-submit">Submit</button>
    </form>
    <div id="loader" class="hidden">
      <img src="/static/loading.gif" alt="Loading...">
    </div>
    <p id="text-gen-output"></p>
  </div>
  <script src="/static/script.js"></script>
</body>
</html>
Empereur-Pirate changed discussion status to closed

Sign up or log in to comment