chansung commited on
Commit
2fad7fa
·
1 Parent(s): efcfdac

Update strings.py

Browse files
Files changed (1) hide show
  1. strings.py +67 -37
strings.py CHANGED
@@ -1,51 +1,81 @@
1
  TITLE = "Alpaca-LoRA Playground"
2
 
3
  ABSTRACT = """
4
- Thanks to [tolen](https://github.com/tloen/alpaca-lora), this simple application runs Alpaca-LoRA which is instruction fine-tuned version of [LLaMA](https://ai.facebook.com/blog/large-language-model-llama-meta-ai/) from Meta AI. Alpaca-LoRA is *Low-Rank LLaMA Instruct-Tuning* which is inspired by [Stanford Alpaca project](https://github.com/tatsu-lab/stanford_alpaca). This demo application currently runs 13B version on a A10 instance.
 
 
5
  """
6
 
7
  BOTTOM_LINE = """
8
-
9
  This demo application runs the open source project, [Alpaca-LoRA-Serve](https://github.com/deep-diver/Alpaca-LoRA-Serve). By default, it runs with streaming mode, but you can also run with dynamic batch generation model. Please visit the repo, find more information, and contribute if you can.
10
 
11
  Alpaca-LoRA is built on the same concept as Standford Alpaca project, but it lets us train and inference on a smaller GPUs such as RTX4090 for 7B version. Also, we could build very small size of checkpoints on top of base models thanks to [🤗 transformers](https://huggingface.co/docs/transformers/index), [🤗 peft](https://github.com/huggingface/peft), and [bitsandbytes](https://github.com/TimDettmers/bitsandbytes/tree/main) libraries.
 
 
12
  """
13
 
14
- DEFAULT_EXAMPLES = [
15
- {
16
- "title": "1️⃣ List all Canadian provinces in alphabetical order.",
17
- "examples": [
18
- ["1", "List all Canadian provinces in alphabetical order."],
19
- ["2", "Which ones are on the east side?"],
20
- ["3", "What foods are famous in each province on the east side?"],
21
- ["4", "What about sightseeing? or landmarks? list one per province"],
22
- ],
23
- },
24
- {
25
- "title": "2️⃣ Tell me about Alpacas.",
26
- "examples": [
27
- ["1", "Tell me about alpacas in two sentences"],
28
- ["2", "What other animals are living in the same area?"],
29
- ["3", "Are they the same species?"],
30
- ["4", "Write a Python program to return those species"],
31
- ],
32
- },
33
- {
34
- "title": "3️⃣ Tell me about the king of France in 2019.",
35
- "examples": [
36
- ["1", "Tell me about the king of France in 2019."],
37
- ["2", "What about before him?"],
38
- ]
39
- },
40
- {
41
- "title": "4️⃣ Write a Python program that prints the first 10 Fibonacci numbers.",
42
- "examples": [
43
- ["1", "Write a Python program that prints the first 10 Fibonacci numbers."],
44
- ["2", "Could you explain how the code works?"],
45
- ["3", "What is recursion?"],
46
- ]
47
- }
48
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
 
50
  SPECIAL_STRS = {
51
  "continue": "continue.",
 
1
  TITLE = "Alpaca-LoRA Playground"
2
 
3
  ABSTRACT = """
4
+ Thanks to [tolen](https://github.com/tloen/alpaca-lora), this application runs Alpaca-LoRA which is instruction fine-tuned version of [LLaMA](https://ai.facebook.com/blog/large-language-model-llama-meta-ai/). This demo currently runs 30B version on a 3*A6000 instance at [Jarvislabs.ai](https://jarvislabs.ai/).
5
+
6
+ NOTE: too long input (context, instruction) will not be allowed. Please keep them < 150
7
  """
8
 
9
  BOTTOM_LINE = """
 
10
  This demo application runs the open source project, [Alpaca-LoRA-Serve](https://github.com/deep-diver/Alpaca-LoRA-Serve). By default, it runs with streaming mode, but you can also run with dynamic batch generation model. Please visit the repo, find more information, and contribute if you can.
11
 
12
  Alpaca-LoRA is built on the same concept as Standford Alpaca project, but it lets us train and inference on a smaller GPUs such as RTX4090 for 7B version. Also, we could build very small size of checkpoints on top of base models thanks to [🤗 transformers](https://huggingface.co/docs/transformers/index), [🤗 peft](https://github.com/huggingface/peft), and [bitsandbytes](https://github.com/TimDettmers/bitsandbytes/tree/main) libraries.
13
+
14
+ We are thankful to the [Jarvislabs.ai](https://jarvislabs.ai/) who generously provided free GPU instances.
15
  """
16
 
17
+ DEFAULT_EXAMPLES = {
18
+ "Typical Questions": [
19
+ {
20
+ "title": "List all Canadian provinces in alphabetical order.",
21
+ "examples": [
22
+ ["1", "List all Canadian provinces in alphabetical order."],
23
+ ["2", "Which ones are on the east side?"],
24
+ ["3", "What foods are famous in each province on the east side?"],
25
+ ["4", "What about sightseeing? or landmarks? list one per province"],
26
+ ],
27
+ },
28
+ {
29
+ "title": "Tell me about Alpacas.",
30
+ "examples": [
31
+ ["1", "Tell me about alpacas in two sentences"],
32
+ ["2", "What other animals are living in the same area?"],
33
+ ["3", "Are they the same species?"],
34
+ ["4", "Write a Python program to return those species"],
35
+ ],
36
+ },
37
+ {
38
+ "title": "Tell me about the king of France in 2019.",
39
+ "examples": [
40
+ ["1", "Tell me about the king of France in 2019."],
41
+ ["2", "What about before him?"],
42
+ ]
43
+ },
44
+ {
45
+ "title": "Write a Python program that prints the first 10 Fibonacci numbers.",
46
+ "examples": [
47
+ ["1", "Write a Python program that prints the first 10 Fibonacci numbers."],
48
+ ["2", "Could you explain how the code works?"],
49
+ ["3", "What is recursion?"],
50
+ ]
51
+ }
52
+ ],
53
+ "Identity": [
54
+ {
55
+ "title": "Conversation with the planet Pluto",
56
+ "examples": [
57
+ ["1", "Conversation with the planet Pluto", "I'am so curious about you"],
58
+ ["2", "Conversation with the planet Pluto", "Tell me what I would see if I visited"],
59
+ ["3", "Conversation with the planet Pluto", "It sounds beautiful"],
60
+ ["4", "Conversation with the planet Pluto", "I'll keep that in mind. Hey I was wondering have you ever had any visitor?"],
61
+ ["5", "Conversation with the planet Pluto", "That must have been exciting"],
62
+ ["6", "Conversation with the planet Pluto", "That's so great. What else do you wish people knew about you?"],
63
+ ["7", "Conversation with the planet Pluto", "Thanks for talking with me"],
64
+ ]
65
+ },
66
+ {
67
+ "title": "Conversation with a paper airplane",
68
+ "examples": [
69
+ ["1", "Conversation with a paper airplane", "What's it like being thrown through the air"],
70
+ ["2", "Conversation with a paper airplane", "What's the worst place you've ever landed"],
71
+ ["3", "Conversation with a paper airplane", "Have you ever stucked?"],
72
+ ["4", "Conversation with a paper airplane", "What's the secret to a really good paper airplane?"],
73
+ ["5", "Conversation with a paper airplane", "What's the farthest you've ever flown?"],
74
+ ["6", "Conversation with a paper airplane", "Good to talk to you!"]
75
+ ]
76
+ }
77
+ ]
78
+ }
79
 
80
  SPECIAL_STRS = {
81
  "continue": "continue.",