Spaces:
Paused
Paused
Add NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO (#700)
Browse files- .env.template +27 -0
- PROMPTS.md +6 -0
.env.template
CHANGED
|
@@ -60,6 +60,33 @@ MODELS=`[
|
|
| 60 |
"max_new_tokens": 1024
|
| 61 |
}
|
| 62 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 63 |
{
|
| 64 |
"name": "codellama/CodeLlama-34b-Instruct-hf",
|
| 65 |
"displayName": "codellama/CodeLlama-34b-Instruct-hf",
|
|
|
|
| 60 |
"max_new_tokens": 1024
|
| 61 |
}
|
| 62 |
},
|
| 63 |
+
{
|
| 64 |
+
"name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|
| 65 |
+
"description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
|
| 66 |
+
"websiteUrl" : "https://nousresearch.com/",
|
| 67 |
+
"chatPromptTemplate" : "<|im_start|>system\n{{#if @root.preprompt}}{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
|
| 68 |
+
"promptExamples": [
|
| 69 |
+
{
|
| 70 |
+
"title": "Write an email from bullet list",
|
| 71 |
+
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
|
| 72 |
+
}, {
|
| 73 |
+
"title": "Code a snake game",
|
| 74 |
+
"prompt": "Code a basic snake game in python, give explanations for each step."
|
| 75 |
+
}, {
|
| 76 |
+
"title": "Assist in a task",
|
| 77 |
+
"prompt": "How do I make a delicious lemon cheesecake?"
|
| 78 |
+
}
|
| 79 |
+
],
|
| 80 |
+
"parameters": {
|
| 81 |
+
"temperature": 0.7,
|
| 82 |
+
"top_p": 0.95,
|
| 83 |
+
"repetition_penalty": 1,
|
| 84 |
+
"top_k": 50,
|
| 85 |
+
"truncate": 3072,
|
| 86 |
+
"max_new_tokens": 512,
|
| 87 |
+
"stop": ["<|im_end|>"]
|
| 88 |
+
}
|
| 89 |
+
},
|
| 90 |
{
|
| 91 |
"name": "codellama/CodeLlama-34b-Instruct-hf",
|
| 92 |
"displayName": "codellama/CodeLlama-34b-Instruct-hf",
|
PROMPTS.md
CHANGED
|
@@ -49,3 +49,9 @@ System: {{preprompt}}\nUser:{{#each messages}}{{#ifUser}}{{content}}\nFalcon:{{/
|
|
| 49 |
```env
|
| 50 |
<s> {{#each messages}}{{#ifUser}}[INST]{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}} {{content}}</s> {{/ifAssistant}}{{/each}}
|
| 51 |
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
```env
|
| 50 |
<s> {{#each messages}}{{#ifUser}}[INST]{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}} {{content}}</s> {{/ifAssistant}}{{/each}}
|
| 51 |
```
|
| 52 |
+
|
| 53 |
+
## ChatML
|
| 54 |
+
|
| 55 |
+
```env
|
| 56 |
+
<|im_start|>system\n{{#if @root.preprompt}}{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}
|
| 57 |
+
```
|