koutch commited on
Commit
b465350
·
1 Parent(s): 6f94172

trying something else

Browse files
Files changed (2) hide show
  1. Dockerfile +18 -10
  2. app.py → main.py +11 -8
Dockerfile CHANGED
@@ -1,20 +1,28 @@
1
- # Read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
  # you will also find guides on how best to write your Dockerfile
3
 
4
  FROM python:3.9
5
 
 
 
 
 
 
 
 
6
  RUN useradd -m -u 1000 user
7
- USER user
8
- ENV PATH="/home/user/.local/bin:$PATH"
9
 
10
- WORKDIR /app
 
11
 
12
- COPY --chown=user ./requirements.txt requirements.txt
13
- RUN pip install --no-cache-dir --upgrade -r requirements.txt
 
14
 
15
- COPY --chown=user . /app
 
16
 
17
- # Fast API
18
- # ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
19
- CMD ["python", "app.py"]
20
 
 
 
1
+ # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
  # you will also find guides on how best to write your Dockerfile
3
 
4
  FROM python:3.9
5
 
6
+ WORKDIR /code
7
+
8
+ COPY ./requirements.txt /code/requirements.txt
9
+
10
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
11
+
12
+ # Set up a new user named "user" with user ID 1000
13
  RUN useradd -m -u 1000 user
 
 
14
 
15
+ # Switch to the "user" user
16
+ USER user
17
 
18
+ # Set home to the user's home directory
19
+ ENV HOME=/home/user \
20
+ PATH=/home/user/.local/bin:$PATH
21
 
22
+ # Set the working directory to the user's home directory
23
+ WORKDIR $HOME/app
24
 
25
+ # Copy the current directory contents into the container at $HOME/app setting the owner to the user
26
+ COPY --chown=user . $HOME/app
 
27
 
28
+ CMD ["python", "main.py"]
app.py → main.py RENAMED
@@ -1,17 +1,20 @@
1
  import gradio as gr
2
- import subprocess
3
 
4
  def greet(input_text):
5
  # Example call to a compiled llama.cpp binary (replace with real model path)
6
  # subprocess.run(["./llama.cpp/main", "-m", "models/llama-7b.gguf", "-p", input_text])
7
  return f"Llama.cpp would process: {input_text}"
8
 
9
- demo = gr.Interface(
10
- fn=greet,
11
- inputs=gr.Textbox(label="Enter text"),
12
- outputs="text",
13
- title="Llama.cpp + Gradio Demo"
14
- )
 
 
15
 
16
- if __name__ == "__main__":
17
  demo.launch(server_name="0.0.0.0", server_port=7860)
 
 
 
 
1
  import gradio as gr
 
2
 
3
  def greet(input_text):
4
  # Example call to a compiled llama.cpp binary (replace with real model path)
5
  # subprocess.run(["./llama.cpp/main", "-m", "models/llama-7b.gguf", "-p", input_text])
6
  return f"Llama.cpp would process: {input_text}"
7
 
8
+ def run():
9
+
10
+ demo = gr.Interface(
11
+ fn=greet,
12
+ inputs=gr.Textbox(label="Enter text"),
13
+ outputs="text",
14
+ title="Llama.cpp + Gradio Demo"
15
+ )
16
 
 
17
  demo.launch(server_name="0.0.0.0", server_port=7860)
18
+
19
+ if __name__ == "__main__":
20
+ run()