Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -58,14 +58,11 @@ def generate(prompt, history,max_new_tokens,health,temperature=temperature,top_p
|
|
| 58 |
)
|
| 59 |
cnt=0
|
| 60 |
history1=history
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
Magic: 24
|
| 67 |
-
*******************
|
| 68 |
-
'''
|
| 69 |
for ea in history:
|
| 70 |
print (ea)
|
| 71 |
for l in ea:
|
|
@@ -81,9 +78,9 @@ def generate(prompt, history,max_new_tokens,health,temperature=temperature,top_p
|
|
| 81 |
for response in stream:
|
| 82 |
output += response.token.text
|
| 83 |
if history:
|
| 84 |
-
yield "",[(prompt,output)],
|
| 85 |
else:
|
| 86 |
-
yield "",[(prompt,output)],
|
| 87 |
generate_kwargs2 = dict(
|
| 88 |
temperature=temperature,
|
| 89 |
max_new_tokens=128,
|
|
@@ -101,32 +98,36 @@ def generate(prompt, history,max_new_tokens,health,temperature=temperature,top_p
|
|
| 101 |
# output2 += response.token.text
|
| 102 |
|
| 103 |
lines = output.strip().strip("\n").split("\n")
|
|
|
|
| 104 |
for i,line in enumerate(lines):
|
| 105 |
-
if "
|
| 106 |
try:
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
|
|
|
|
|
|
|
|
|
| 110 |
except Exception as e:
|
| 111 |
-
print (f'
|
| 112 |
-
if "Power: " in line:
|
| 113 |
-
print(line)
|
| 114 |
-
if line.startswith("3. "):
|
| 115 |
-
print(line)
|
| 116 |
-
if line.startswith("4. "):
|
| 117 |
-
print(line)
|
| 118 |
-
if line.startswith("5. "):
|
| 119 |
-
print(line)
|
| 120 |
-
else:
|
| 121 |
print(f'Line:: {line}')
|
|
|
|
| 122 |
if history:
|
| 123 |
history.append((prompt,output))
|
| 124 |
-
yield "",history,
|
| 125 |
else:
|
| 126 |
-
yield "",[(prompt,output)],
|
| 127 |
|
| 128 |
def clear_fn():
|
| 129 |
return None,None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 130 |
with gr.Blocks() as app:
|
| 131 |
gr.HTML("""<center><h1>Mixtral 8x7B RPG</h1><h3>Role Playing Game Master</h3>""")
|
| 132 |
chatbot = gr.Chatbot(label="Mixtral 8x7B Chatbot",show_copy_button=True)
|
|
@@ -142,13 +143,13 @@ with gr.Blocks() as app:
|
|
| 142 |
clear_btn = gr.Button("Clear")
|
| 143 |
with gr.Row():
|
| 144 |
tokens = gr.Slider(label="Max new tokens",value=2096,minimum=0,maximum=1048*10,step=64,interactive=True,info="The maximum numbers of new tokens")
|
| 145 |
-
json_out=gr.JSON()
|
| 146 |
health=gr.Number(value=100)
|
| 147 |
#text=gr.JSON()
|
| 148 |
#inp_query.change(search_models,inp_query,models_dd)
|
| 149 |
#test_b=test_btn.click(itt,url,e_box)
|
| 150 |
clear_btn.click(clear_fn,None,[prompt,chatbot])
|
| 151 |
-
go=button.click(generate,[prompt,chatbot,tokens,
|
| 152 |
stop_button.click(None,None,None,cancels=[go])
|
| 153 |
app.launch(show_api=False)
|
| 154 |
|
|
|
|
| 58 |
)
|
| 59 |
cnt=0
|
| 60 |
history1=history
|
| 61 |
+
|
| 62 |
+
stats="*******************\n"
|
| 63 |
+
for eac in health:
|
| 64 |
+
stats+=f'{eac}\n'
|
| 65 |
+
stats+="*******************\n"
|
|
|
|
|
|
|
|
|
|
| 66 |
for ea in history:
|
| 67 |
print (ea)
|
| 68 |
for l in ea:
|
|
|
|
| 78 |
for response in stream:
|
| 79 |
output += response.token.text
|
| 80 |
if history:
|
| 81 |
+
yield "",[(prompt,output)],stats
|
| 82 |
else:
|
| 83 |
+
yield "",[(prompt,output)],stats
|
| 84 |
generate_kwargs2 = dict(
|
| 85 |
temperature=temperature,
|
| 86 |
max_new_tokens=128,
|
|
|
|
| 98 |
# output2 += response.token.text
|
| 99 |
|
| 100 |
lines = output.strip().strip("\n").split("\n")
|
| 101 |
+
skills=[]
|
| 102 |
for i,line in enumerate(lines):
|
| 103 |
+
if ": " in line:
|
| 104 |
try:
|
| 105 |
+
lab_1 = line.split(": ")[0]
|
| 106 |
+
skill_1 = line.split(": ")[1].split(" ")[0]
|
| 107 |
+
skill_1=int(skill_1)
|
| 108 |
+
skill ={lab_1:skill_1}
|
| 109 |
+
skills.append(skill)
|
| 110 |
+
print(skills)
|
| 111 |
except Exception as e:
|
| 112 |
+
print (f'--Error :: {e}')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 113 |
print(f'Line:: {line}')
|
| 114 |
+
stats=skills
|
| 115 |
if history:
|
| 116 |
history.append((prompt,output))
|
| 117 |
+
yield "",history,stats
|
| 118 |
else:
|
| 119 |
+
yield "",[(prompt,output)],stats
|
| 120 |
|
| 121 |
def clear_fn():
|
| 122 |
return None,None
|
| 123 |
+
|
| 124 |
+
base_stats=[
|
| 125 |
+
{"Health":100},
|
| 126 |
+
{"Power":20},
|
| 127 |
+
{"Strength":24},
|
| 128 |
+
]
|
| 129 |
+
|
| 130 |
+
|
| 131 |
with gr.Blocks() as app:
|
| 132 |
gr.HTML("""<center><h1>Mixtral 8x7B RPG</h1><h3>Role Playing Game Master</h3>""")
|
| 133 |
chatbot = gr.Chatbot(label="Mixtral 8x7B Chatbot",show_copy_button=True)
|
|
|
|
| 143 |
clear_btn = gr.Button("Clear")
|
| 144 |
with gr.Row():
|
| 145 |
tokens = gr.Slider(label="Max new tokens",value=2096,minimum=0,maximum=1048*10,step=64,interactive=True,info="The maximum numbers of new tokens")
|
| 146 |
+
json_out=gr.JSON(value=base_stats)
|
| 147 |
health=gr.Number(value=100)
|
| 148 |
#text=gr.JSON()
|
| 149 |
#inp_query.change(search_models,inp_query,models_dd)
|
| 150 |
#test_b=test_btn.click(itt,url,e_box)
|
| 151 |
clear_btn.click(clear_fn,None,[prompt,chatbot])
|
| 152 |
+
go=button.click(generate,[prompt,chatbot,tokens,json_out],[prompt,chatbot,json_out])
|
| 153 |
stop_button.click(None,None,None,cancels=[go])
|
| 154 |
app.launch(show_api=False)
|
| 155 |
|