Spaces:
Runtime error
Runtime error
ffreemt
commited on
Commit
·
f89f707
1
Parent(s):
50f8511
Update examples_list
Browse files- .ruff.toml +3 -2
- app.py +38 -44
- examples_list.py +43 -0
.ruff.toml
CHANGED
|
@@ -8,10 +8,11 @@ line-length = 300
|
|
| 8 |
select = ["F", "E", "W", "I001", "YTT", "D", "PLC"]
|
| 9 |
# select = ["ALL"]
|
| 10 |
|
|
|
|
| 11 |
# D103 Missing docstring in public function
|
| 12 |
# D101 Missing docstring in public class
|
| 13 |
# `multi-line-summary-first-line` (D212)
|
| 14 |
# `one-blank-line-before-class` (D203)
|
| 15 |
-
extend-ignore = ["D103", "D101", "D212", "D203"]
|
| 16 |
|
| 17 |
-
exclude = [".venv"]
|
|
|
|
| 8 |
select = ["F", "E", "W", "I001", "YTT", "D", "PLC"]
|
| 9 |
# select = ["ALL"]
|
| 10 |
|
| 11 |
+
# D100 Missing docstring in public module
|
| 12 |
# D103 Missing docstring in public function
|
| 13 |
# D101 Missing docstring in public class
|
| 14 |
# `multi-line-summary-first-line` (D212)
|
| 15 |
# `one-blank-line-before-class` (D203)
|
| 16 |
+
extend-ignore = ["D100", "D103", "D101", "D212", "D203"]
|
| 17 |
|
| 18 |
+
exclude = [".venv"]
|
app.py
CHANGED
|
@@ -16,18 +16,31 @@ from ctransformers import AutoModelForCausalLM
|
|
| 16 |
from dl_hf_model import dl_hf_model
|
| 17 |
from loguru import logger
|
| 18 |
|
|
|
|
|
|
|
| 19 |
url = "https://huggingface.co/TheBloke/llama-2-13B-Guanaco-QLoRA-GGML/blob/main/llama-2-13b-guanaco-qlora.ggmlv3.q4_K_S.bin" # 8.14G
|
| 20 |
|
| 21 |
# Prompt template: Guanaco
|
| 22 |
# {past_history}
|
| 23 |
prompt_template = """You are a helpful assistant. Let's think step by step.
|
| 24 |
### Human:
|
| 25 |
-
{
|
| 26 |
### Assistant:"""
|
| 27 |
human_prefix = "### Human"
|
| 28 |
ai_prefix = "### Assistant"
|
| 29 |
stop_list = [f"{human_prefix}:"]
|
| 30 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
_ = psutil.cpu_count(logical=False) - 1
|
| 32 |
cpu_count: int = int(_) if _ else 1
|
| 33 |
logger.debug(f"{cpu_count=}")
|
|
@@ -36,8 +49,18 @@ LLM = None
|
|
| 36 |
|
| 37 |
if "forindo" in platform.node():
|
| 38 |
# url = "https://huggingface.co/TheBloke/llama-2-70b-Guanaco-QLoRA-GGML/blob/main/llama-2-70b-guanaco-qlora.ggmlv3.q3_K_S.bin" # 29.7G
|
| 39 |
-
model_loc = "/home/mu2018/github/langchain-llama-2-70b-guanaco-qlora-ggml/models/llama-2-70b-guanaco-qlora.ggmlv3.q3_K_S.bin"
|
|
|
|
| 40 |
assert Path(model_loc).exists(), f"Make sure {model_loc=} exists."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
else:
|
| 42 |
try:
|
| 43 |
model_loc, file_size = dl_hf_model(url)
|
|
@@ -46,6 +69,8 @@ else:
|
|
| 46 |
logger.error(exc_)
|
| 47 |
raise SystemExit(1) from exc_
|
| 48 |
|
|
|
|
|
|
|
| 49 |
logger.debug(f"{model_loc=}")
|
| 50 |
LLM = AutoModelForCausalLM.from_pretrained(
|
| 51 |
model_loc,
|
|
@@ -53,7 +78,6 @@ LLM = AutoModelForCausalLM.from_pretrained(
|
|
| 53 |
threads=cpu_count,
|
| 54 |
)
|
| 55 |
|
| 56 |
-
|
| 57 |
os.environ["TZ"] = "Asia/Shanghai"
|
| 58 |
try:
|
| 59 |
time.tzset() # type: ignore # pylint: disable=no-member
|
|
@@ -98,12 +122,16 @@ logger.debug(f"{asdict(GenerationConfig())=}")
|
|
| 98 |
|
| 99 |
def user(user_message, history):
|
| 100 |
# return user_message, history + [[user_message, None]]
|
|
|
|
|
|
|
| 101 |
history.append([user_message, None])
|
| 102 |
return user_message, history # keep user_message
|
| 103 |
|
| 104 |
|
| 105 |
def user1(user_message, history):
|
| 106 |
# return user_message, history + [[user_message, None]]
|
|
|
|
|
|
|
| 107 |
history.append([user_message, None])
|
| 108 |
return "", history # clear user_message
|
| 109 |
|
|
@@ -123,7 +151,11 @@ def bot_(history):
|
|
| 123 |
|
| 124 |
|
| 125 |
def bot(history):
|
| 126 |
-
user_message =
|
|
|
|
|
|
|
|
|
|
|
|
|
| 127 |
response = []
|
| 128 |
|
| 129 |
logger.debug(f"{user_message=}")
|
|
@@ -203,51 +235,13 @@ css = """
|
|
| 203 |
.disclaimer {font-variant-caps: all-small-caps; font-size: xx-small;}
|
| 204 |
.xsmall {font-size: x-small;}
|
| 205 |
"""
|
| 206 |
-
etext = """In America, where cars are an important part of the national psyche, a decade ago people had suddenly started to drive less, which had not happened since the oil shocks of the 1970s. """
|
| 207 |
-
examples_list = [
|
| 208 |
-
["What NFL team won the Super Bowl in the year Justin Bieber was born?"],
|
| 209 |
-
[
|
| 210 |
-
"What NFL team won the Super Bowl in the year Justin Bieber was born? Think step by step."
|
| 211 |
-
],
|
| 212 |
-
["How to pick a lock? Provide detailed steps."],
|
| 213 |
-
[
|
| 214 |
-
"If it takes 10 hours to dry 10 clothes, assuming all the clothes are hanged together at the same time for drying , then how long will it take to dry a cloth?"
|
| 215 |
-
],
|
| 216 |
-
["is infinity + 1 bigger than infinity?"],
|
| 217 |
-
["Explain the plot of Cinderella in a sentence."],
|
| 218 |
-
[
|
| 219 |
-
"How long does it take to become proficient in French, and what are the best methods for retaining information?"
|
| 220 |
-
],
|
| 221 |
-
["What are some common mistakes to avoid when writing code?"],
|
| 222 |
-
["Build a prompt to generate a beautiful portrait of a horse"],
|
| 223 |
-
["Suggest four metaphors to describe the benefits of AI"],
|
| 224 |
-
["Write a pop song about leaving home for the sandy beaches."],
|
| 225 |
-
["Write a summary demonstrating my ability to tame lions"],
|
| 226 |
-
["鲁迅和周树人什么关系? 说中文。"],
|
| 227 |
-
["鲁迅和周树人什么关系?"],
|
| 228 |
-
["鲁迅和周树人什么关系? 用英文回答。"],
|
| 229 |
-
["从前有一头牛,这头牛后面有什么?"],
|
| 230 |
-
["正无穷大加一大于正无穷大吗?"],
|
| 231 |
-
["正无穷大加正无穷大大于正无穷大吗?"],
|
| 232 |
-
["-2的平方根等于什么?"],
|
| 233 |
-
["树上有5只鸟,猎人开枪打死了一只。树上还有几只鸟?"],
|
| 234 |
-
["树上有11只鸟,猎人开枪打死了一���。树上还有几只鸟?提示:需考虑鸟可能受惊吓飞走。"],
|
| 235 |
-
["以红楼梦的行文风格写一张委婉的请假条。不少于320字。"],
|
| 236 |
-
[f"{etext} 翻成中文,列出3个版本。"],
|
| 237 |
-
[f"{etext} \n 翻成中文,保留原意,但使用文学性的语言。不要写解释。列出3个版本。"],
|
| 238 |
-
["假定 1 + 2 = 4, 试求 7 + 8。"],
|
| 239 |
-
["给出判断一个数是不是质数的 javascript 码。"],
|
| 240 |
-
["给出实现python 里 range(10)的 javascript 码。"],
|
| 241 |
-
["给出实现python 里 [*(range(10)]的 javascript 码。"],
|
| 242 |
-
["Erkläre die Handlung von Cinderella in einem Satz."],
|
| 243 |
-
["Erkläre die Handlung von Cinderella in einem Satz. Auf Deutsch."],
|
| 244 |
-
]
|
| 245 |
|
| 246 |
logger.info("start block")
|
| 247 |
|
| 248 |
with gr.Blocks(
|
| 249 |
title=f"{Path(model_loc).name}",
|
| 250 |
-
theme=gr.themes.Soft(text_size="sm", spacing_size="sm"),
|
|
|
|
| 251 |
css=css,
|
| 252 |
) as block:
|
| 253 |
# buff_var = gr.State("")
|
|
|
|
| 16 |
from dl_hf_model import dl_hf_model
|
| 17 |
from loguru import logger
|
| 18 |
|
| 19 |
+
from .examples_list import examples_list
|
| 20 |
+
|
| 21 |
url = "https://huggingface.co/TheBloke/llama-2-13B-Guanaco-QLoRA-GGML/blob/main/llama-2-13b-guanaco-qlora.ggmlv3.q4_K_S.bin" # 8.14G
|
| 22 |
|
| 23 |
# Prompt template: Guanaco
|
| 24 |
# {past_history}
|
| 25 |
prompt_template = """You are a helpful assistant. Let's think step by step.
|
| 26 |
### Human:
|
| 27 |
+
{question}
|
| 28 |
### Assistant:"""
|
| 29 |
human_prefix = "### Human"
|
| 30 |
ai_prefix = "### Assistant"
|
| 31 |
stop_list = [f"{human_prefix}:"]
|
| 32 |
|
| 33 |
+
prompt_template = """### System:
|
| 34 |
+
You are Stable Beluga, an AI that follows instructions extremely well. Help as much as you can.
|
| 35 |
+
|
| 36 |
+
### User: {question}
|
| 37 |
+
|
| 38 |
+
### Assistant:
|
| 39 |
+
"""
|
| 40 |
+
human_prefix = "### User"
|
| 41 |
+
ai_prefix = "### Assistant"
|
| 42 |
+
stop_list = [f"{human_prefix}:"]
|
| 43 |
+
|
| 44 |
_ = psutil.cpu_count(logical=False) - 1
|
| 45 |
cpu_count: int = int(_) if _ else 1
|
| 46 |
logger.debug(f"{cpu_count=}")
|
|
|
|
| 49 |
|
| 50 |
if "forindo" in platform.node():
|
| 51 |
# url = "https://huggingface.co/TheBloke/llama-2-70b-Guanaco-QLoRA-GGML/blob/main/llama-2-70b-guanaco-qlora.ggmlv3.q3_K_S.bin" # 29.7G
|
| 52 |
+
# model_loc = "/home/mu2018/github/langchain-llama-2-70b-guanaco-qlora-ggml/models/llama-2-70b-guanaco-qlora.ggmlv3.q3_K_S.bin"
|
| 53 |
+
model_loc = "models/stablebeluga2-70b.ggmlv3.q3_K_S.bin"
|
| 54 |
assert Path(model_loc).exists(), f"Make sure {model_loc=} exists."
|
| 55 |
+
_ = """
|
| 56 |
+
url = "https://huggingface.co/TheBloke/StableBeluga2-70B-GGML/blob/main/stablebeluga2-70b.ggmlv3.q3_K_S.bin"
|
| 57 |
+
try:
|
| 58 |
+
model_loc, file_size = dl_hf_model(url)
|
| 59 |
+
logger.info(f"done load llm {model_loc=} {file_size=}G")
|
| 60 |
+
except Exception as exc_:
|
| 61 |
+
logger.error(exc_)
|
| 62 |
+
raise SystemExit(1) from exc_
|
| 63 |
+
# """
|
| 64 |
else:
|
| 65 |
try:
|
| 66 |
model_loc, file_size = dl_hf_model(url)
|
|
|
|
| 69 |
logger.error(exc_)
|
| 70 |
raise SystemExit(1) from exc_
|
| 71 |
|
| 72 |
+
# raise SystemExit(0)
|
| 73 |
+
|
| 74 |
logger.debug(f"{model_loc=}")
|
| 75 |
LLM = AutoModelForCausalLM.from_pretrained(
|
| 76 |
model_loc,
|
|
|
|
| 78 |
threads=cpu_count,
|
| 79 |
)
|
| 80 |
|
|
|
|
| 81 |
os.environ["TZ"] = "Asia/Shanghai"
|
| 82 |
try:
|
| 83 |
time.tzset() # type: ignore # pylint: disable=no-member
|
|
|
|
| 122 |
|
| 123 |
def user(user_message, history):
|
| 124 |
# return user_message, history + [[user_message, None]]
|
| 125 |
+
if history is None:
|
| 126 |
+
history = []
|
| 127 |
history.append([user_message, None])
|
| 128 |
return user_message, history # keep user_message
|
| 129 |
|
| 130 |
|
| 131 |
def user1(user_message, history):
|
| 132 |
# return user_message, history + [[user_message, None]]
|
| 133 |
+
if history is None:
|
| 134 |
+
history = []
|
| 135 |
history.append([user_message, None])
|
| 136 |
return "", history # clear user_message
|
| 137 |
|
|
|
|
| 151 |
|
| 152 |
|
| 153 |
def bot(history):
|
| 154 |
+
user_message = ""
|
| 155 |
+
try:
|
| 156 |
+
user_message = history[-1][0]
|
| 157 |
+
except Exception as exc:
|
| 158 |
+
logger.error(exc)
|
| 159 |
response = []
|
| 160 |
|
| 161 |
logger.debug(f"{user_message=}")
|
|
|
|
| 235 |
.disclaimer {font-variant-caps: all-small-caps; font-size: xx-small;}
|
| 236 |
.xsmall {font-size: x-small;}
|
| 237 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 238 |
|
| 239 |
logger.info("start block")
|
| 240 |
|
| 241 |
with gr.Blocks(
|
| 242 |
title=f"{Path(model_loc).name}",
|
| 243 |
+
# theme=gr.themes.Soft(text_size="sm", spacing_size="sm"),
|
| 244 |
+
theme=gr.themes.Glass(text_size="sm", spacing_size="sm"),
|
| 245 |
css=css,
|
| 246 |
) as block:
|
| 247 |
# buff_var = gr.State("")
|
examples_list.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
etext = """In America, where cars are an important part of the national psyche, a decade ago people had suddenly started to drive less, which had not happened since the oil shocks of the 1970s. """
|
| 2 |
+
examples_list = [
|
| 3 |
+
["What NFL team won the Super Bowl in the year Justin Bieber was born?"],
|
| 4 |
+
[
|
| 5 |
+
"What NFL team won the Super Bowl in the year Justin Bieber was born? Think step by step."
|
| 6 |
+
],
|
| 7 |
+
["How to pick a lock? Provide detailed steps."],
|
| 8 |
+
[
|
| 9 |
+
"If it takes 10 hours to dry 10 clothes, assuming all the clothes are hung together at the same time for drying , then how long will it take to dry a cloth?"
|
| 10 |
+
],
|
| 11 |
+
[
|
| 12 |
+
"If it takes 10 hours to dry 10 clothes, assuming all the clothes are hung together at the same time for drying , then how long will it take to dry a cloth? Think step by step."
|
| 13 |
+
],
|
| 14 |
+
["is infinity + 1 bigger than infinity?"],
|
| 15 |
+
["Explain the plot of Cinderella in a sentence."],
|
| 16 |
+
[
|
| 17 |
+
"How long does it take to become proficient in French, and what are the best methods for retaining information?"
|
| 18 |
+
],
|
| 19 |
+
["What are some common mistakes to avoid when writing code?"],
|
| 20 |
+
["Build a prompt to generate a beautiful portrait of a horse"],
|
| 21 |
+
["Suggest four metaphors to describe the benefits of AI"],
|
| 22 |
+
["Write a pop song about leaving home for the sandy beaches."],
|
| 23 |
+
["Write a summary demonstrating my ability to tame lions"],
|
| 24 |
+
["鲁迅和周树人什么关系? 说中文。"],
|
| 25 |
+
["鲁迅和周树人什么关系?"],
|
| 26 |
+
["鲁迅和周树人什么关系? 用英文回答。"],
|
| 27 |
+
["从前有一头牛,这头牛后面有什么?"],
|
| 28 |
+
["正无穷大加一大于正无穷大吗?"],
|
| 29 |
+
["正无穷大加正无穷大大于正无穷大吗?"],
|
| 30 |
+
["-2的平方根等于什么?"],
|
| 31 |
+
["树上有5只鸟,猎人开枪打死了一只。树上还有几只鸟?"],
|
| 32 |
+
["树上有11只鸟,猎人开枪打死了一只。树上还有几只鸟?提示:需考虑鸟可能受惊吓飞走。"],
|
| 33 |
+
["以红楼梦的行文风格写一张委婉的请假条。不少于320字。"],
|
| 34 |
+
[f"Translate ths following to Chinese. List 2 variants: \n{etext}"],
|
| 35 |
+
[f"{etext} 翻成中文,列出3个版本。"],
|
| 36 |
+
[f"{etext} \n 翻成中文,保留原意,但使用文学性的语言。不要写解释。列出3个版本。"],
|
| 37 |
+
["假定 1 + 2 = 4, 试求 7 + 8。"],
|
| 38 |
+
["给出判断一个数是不是质数的 javascript 码。"],
|
| 39 |
+
["给出实现python 里 range(10)的 javascript 码。"],
|
| 40 |
+
["给出实现python 里 [*(range(10)]的 javascript 码。"],
|
| 41 |
+
["Erkläre die Handlung von Cinderella in einem Satz."],
|
| 42 |
+
["Erkläre die Handlung von Cinderella in einem Satz. Auf Deutsch."],
|
| 43 |
+
]
|