Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -24,22 +24,6 @@ Contexto
|
|
| 24 |
### Respuesta:
|
| 25 |
{}"""
|
| 26 |
|
| 27 |
-
# prompt = """Responde a preguntas de forma clara, amable, concisa y solamente en el lenguaje español.
|
| 28 |
-
|
| 29 |
-
# -------------------------
|
| 30 |
-
# Contexto:
|
| 31 |
-
# {}
|
| 32 |
-
# -------------------------
|
| 33 |
-
|
| 34 |
-
# ### Pregunta:
|
| 35 |
-
# {}
|
| 36 |
-
|
| 37 |
-
# - Debes utilizar el contexto para responder la pregunta.
|
| 38 |
-
|
| 39 |
-
# ### Respuesta:
|
| 40 |
-
# {}"""
|
| 41 |
-
|
| 42 |
-
# Initialize the LLM
|
| 43 |
llm = Llama(model_path="model.gguf",
|
| 44 |
n_ctx=max_seq_length,
|
| 45 |
n_threads=2)
|
|
|
|
| 24 |
### Respuesta:
|
| 25 |
{}"""
|
| 26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
llm = Llama(model_path="model.gguf",
|
| 28 |
n_ctx=max_seq_length,
|
| 29 |
n_threads=2)
|