Spaces:
Runtime error
Runtime error
Kajise
commited on
Commit
•
3cebf55
1
Parent(s):
7f01d00
Update app.py
Browse files
app.py
CHANGED
@@ -7,13 +7,6 @@ from gradio.themes.utils import colors, fonts, sizes
|
|
7 |
from llama_cpp import Llama
|
8 |
from huggingface_hub import hf_hub_download
|
9 |
|
10 |
-
import requests
|
11 |
-
from bs4 import BeautifulSoup
|
12 |
-
|
13 |
-
headers = {
|
14 |
-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"
|
15 |
-
}
|
16 |
-
|
17 |
hf_hub_download(repo_id="TheBloke/dolphin-2.5-mixtral-8x7b-GGUF", filename="dolphin-2.5-mixtral-8x7b.Q2_K.gguf", local_dir=".")
|
18 |
llm = Llama(model_path="./dolphin-2.5-mixtral-8x7b.Q2_K.gguf")
|
19 |
|
@@ -23,40 +16,15 @@ ins = '''<|im_start|>user
|
|
23 |
'''
|
24 |
|
25 |
theme = gr.themes.Monochrome(
|
26 |
-
primary_hue="
|
27 |
-
secondary_hue="
|
28 |
neutral_hue="neutral",
|
29 |
radius_size=gr.themes.sizes.radius_sm,
|
30 |
-
font=[gr.themes.GoogleFont("
|
31 |
)
|
32 |
|
33 |
-
def search_ddg(question: str):
|
34 |
-
response = requests.get("https://duckduckgo.com/html/", headers=headers, params={"q": question})
|
35 |
-
data = response.text
|
36 |
-
soup = BeautifulSoup(data, "html.parser")
|
37 |
-
|
38 |
-
result_texts = soup.find_all("a", class_="result__snippet")
|
39 |
-
results: list[str] = []
|
40 |
-
output_string: str = ""
|
41 |
-
|
42 |
-
for element in result_texts:
|
43 |
-
if len(results) < 2:
|
44 |
-
text_content = element.get_text()
|
45 |
-
results.append(text_content)
|
46 |
-
else:
|
47 |
-
continue
|
48 |
-
|
49 |
-
for step in range(len(results)):
|
50 |
-
output_string += f"{results[step]} \n"
|
51 |
-
|
52 |
-
return output_string
|
53 |
-
|
54 |
def generate(instruction):
|
55 |
-
# feeding_data = "\n" + search_ddg(instruction)
|
56 |
-
# prompt = ins.format(question=instruction, data=feeding_data)
|
57 |
prompt = ins.format(question=instruction)
|
58 |
-
print(prompt)
|
59 |
-
|
60 |
response = llm(prompt, stop=['<|im_start|>user', '<|im_end|>'])
|
61 |
result = response['choices'][0]['text']
|
62 |
return result
|
@@ -75,11 +43,11 @@ def process_example(args):
|
|
75 |
|
76 |
css = ".generating {visibility: hidden}"
|
77 |
|
78 |
-
class
|
79 |
def __init__(
|
80 |
self,
|
81 |
*,
|
82 |
-
primary_hue: colors.Color | str = colors.
|
83 |
secondary_hue: colors.Color | str = colors.cyan,
|
84 |
neutral_hue: colors.Color | str = colors.neutral,
|
85 |
spacing_size: sizes.Size | str = sizes.spacing_md,
|
@@ -122,17 +90,17 @@ class TealTheme(Base):
|
|
122 |
)
|
123 |
|
124 |
|
125 |
-
custom_theme =
|
126 |
|
127 |
with gr.Blocks(theme=custom_theme, analytics_enabled=False, css=css) as demo:
|
128 |
with gr.Column():
|
129 |
gr.Markdown(
|
130 |
-
"""
|
131 |
|
132 |
-
8x7b quantized 2bit (
|
133 |
|
134 |
Type in the box below and click the button to generate answers to your most pressing questions!
|
135 |
-
|
136 |
|
137 |
with gr.Row():
|
138 |
with gr.Column(scale=3):
|
@@ -150,8 +118,6 @@ with gr.Blocks(theme=custom_theme, analytics_enabled=False, css=css) as demo:
|
|
150 |
outputs=[output],
|
151 |
)
|
152 |
|
153 |
-
|
154 |
-
|
155 |
submit.click(generate, inputs=[instruction], outputs=[output])
|
156 |
instruction.submit(generate, inputs=[instruction], outputs=[output])
|
157 |
|
|
|
7 |
from llama_cpp import Llama
|
8 |
from huggingface_hub import hf_hub_download
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
hf_hub_download(repo_id="TheBloke/dolphin-2.5-mixtral-8x7b-GGUF", filename="dolphin-2.5-mixtral-8x7b.Q2_K.gguf", local_dir=".")
|
11 |
llm = Llama(model_path="./dolphin-2.5-mixtral-8x7b.Q2_K.gguf")
|
12 |
|
|
|
16 |
'''
|
17 |
|
18 |
theme = gr.themes.Monochrome(
|
19 |
+
primary_hue="blue",
|
20 |
+
secondary_hue="cyan",
|
21 |
neutral_hue="neutral",
|
22 |
radius_size=gr.themes.sizes.radius_sm,
|
23 |
+
font=[gr.themes.GoogleFont("Space Grotesk"), "ui-sans-serif", "system-ui", "sans-serif"],
|
24 |
)
|
25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
def generate(instruction):
|
|
|
|
|
27 |
prompt = ins.format(question=instruction)
|
|
|
|
|
28 |
response = llm(prompt, stop=['<|im_start|>user', '<|im_end|>'])
|
29 |
result = response['choices'][0]['text']
|
30 |
return result
|
|
|
43 |
|
44 |
css = ".generating {visibility: hidden}"
|
45 |
|
46 |
+
class BlueTheme(Base):
|
47 |
def __init__(
|
48 |
self,
|
49 |
*,
|
50 |
+
primary_hue: colors.Color | str = colors.blue,
|
51 |
secondary_hue: colors.Color | str = colors.cyan,
|
52 |
neutral_hue: colors.Color | str = colors.neutral,
|
53 |
spacing_size: sizes.Size | str = sizes.spacing_md,
|
|
|
90 |
)
|
91 |
|
92 |
|
93 |
+
custom_theme = BlueTheme()
|
94 |
|
95 |
with gr.Blocks(theme=custom_theme, analytics_enabled=False, css=css) as demo:
|
96 |
with gr.Column():
|
97 |
gr.Markdown(
|
98 |
+
""" # 🐬 Dolphin4ALL
|
99 |
|
100 |
+
8x7b quantized 2bit (q2_k)
|
101 |
|
102 |
Type in the box below and click the button to generate answers to your most pressing questions!
|
103 |
+
""")
|
104 |
|
105 |
with gr.Row():
|
106 |
with gr.Column(scale=3):
|
|
|
118 |
outputs=[output],
|
119 |
)
|
120 |
|
|
|
|
|
121 |
submit.click(generate, inputs=[instruction], outputs=[output])
|
122 |
instruction.submit(generate, inputs=[instruction], outputs=[output])
|
123 |
|