Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
machineuser
commited on
Commit
·
3f534ed
1
Parent(s):
e54c25c
Sync widgets demo
Browse files- packages/widgets/src/lib/components/Icons/IconRefresh.svelte +19 -0
- packages/widgets/src/lib/components/InferenceWidget/shared/WidgetExamples/WidgetExamples.svelte +2 -1
- packages/widgets/src/lib/components/InferenceWidget/shared/WidgetHeader/WidgetHeader.svelte +24 -4
- packages/widgets/src/lib/components/InferenceWidget/shared/WidgetOutputConvo/WidgetOutputConvo.svelte +6 -1
- packages/widgets/src/lib/components/InferenceWidget/shared/WidgetQuickInput/WidgetQuickInput.svelte +1 -0
- packages/widgets/src/lib/components/InferenceWidget/shared/inputValidation.ts +1 -1
- packages/widgets/src/lib/components/InferenceWidget/widgets/ConversationalWidget/ConversationalWidget.svelte +41 -10
- packages/widgets/src/routes/+page.svelte +7 -5
packages/widgets/src/lib/components/Icons/IconRefresh.svelte
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<script lang="ts">
|
| 2 |
+
export let classNames = "";
|
| 3 |
+
</script>
|
| 4 |
+
|
| 5 |
+
<svg
|
| 6 |
+
class={classNames}
|
| 7 |
+
xmlns="http://www.w3.org/2000/svg"
|
| 8 |
+
xmlns:xlink="http://www.w3.org/1999/xlink"
|
| 9 |
+
aria-hidden="true"
|
| 10 |
+
role="img"
|
| 11 |
+
width="1em"
|
| 12 |
+
height="1em"
|
| 13 |
+
preserveAspectRatio="xMidYMid meet"
|
| 14 |
+
viewBox="0 0 32 32"
|
| 15 |
+
><path
|
| 16 |
+
d="M25.95 7.65l.005-.004c-.092-.11-.197-.206-.293-.312c-.184-.205-.367-.41-.563-.603c-.139-.136-.286-.262-.43-.391c-.183-.165-.366-.329-.558-.482c-.16-.128-.325-.247-.49-.367c-.192-.14-.385-.277-.585-.406a13.513 13.513 0 0 0-.533-.324q-.308-.179-.625-.341c-.184-.094-.37-.185-.56-.27c-.222-.1-.449-.191-.678-.28c-.19-.072-.378-.145-.571-.208c-.246-.082-.498-.15-.75-.217c-.186-.049-.368-.102-.556-.143c-.29-.063-.587-.107-.883-.15c-.16-.023-.315-.056-.476-.073A12.933 12.933 0 0 0 6 7.703V4H4v8h8v-2H6.811A10.961 10.961 0 0 1 16 5a11.111 11.111 0 0 1 1.189.067c.136.015.268.042.403.061c.25.037.501.075.746.128c.16.035.315.08.472.121c.213.057.425.114.633.183c.164.054.325.116.486.178c.193.074.384.15.57.235c.162.072.32.15.477.23q.268.136.526.286c.153.09.305.18.453.276c.168.11.33.224.492.342c.14.102.282.203.417.312c.162.13.316.268.47.406c.123.11.248.217.365.332c.167.164.323.338.479.512A10.993 10.993 0 1 1 5 16H3a13 13 0 1 0 22.95-8.35z"
|
| 17 |
+
fill="currentColor"
|
| 18 |
+
/></svg
|
| 19 |
+
>
|
packages/widgets/src/lib/components/InferenceWidget/shared/WidgetExamples/WidgetExamples.svelte
CHANGED
|
@@ -12,6 +12,7 @@
|
|
| 12 |
|
| 13 |
export let isLoading = false;
|
| 14 |
export let callApiOnMount: WidgetProps["callApiOnMount"];
|
|
|
|
| 15 |
export let exampleQueryParams: WidgetExampleAttribute[] = [];
|
| 16 |
export let applyWidgetExample: (sample: TWidgetExample, opts?: ExampleRunOpts) => void;
|
| 17 |
|
|
@@ -117,7 +118,7 @@
|
|
| 117 |
|
| 118 |
<svelte:window on:click={onClick} />
|
| 119 |
|
| 120 |
-
<div class=
|
| 121 |
<!-- Example Groups -->
|
| 122 |
{#if exampleGroups.length > 1}
|
| 123 |
<WidgetExamplesGroup
|
|
|
|
| 12 |
|
| 13 |
export let isLoading = false;
|
| 14 |
export let callApiOnMount: WidgetProps["callApiOnMount"];
|
| 15 |
+
export let classNames: string;
|
| 16 |
export let exampleQueryParams: WidgetExampleAttribute[] = [];
|
| 17 |
export let applyWidgetExample: (sample: TWidgetExample, opts?: ExampleRunOpts) => void;
|
| 18 |
|
|
|
|
| 118 |
|
| 119 |
<svelte:window on:click={onClick} />
|
| 120 |
|
| 121 |
+
<div class={classNames}>
|
| 122 |
<!-- Example Groups -->
|
| 123 |
{#if exampleGroups.length > 1}
|
| 124 |
<WidgetExamplesGroup
|
packages/widgets/src/lib/components/InferenceWidget/shared/WidgetHeader/WidgetHeader.svelte
CHANGED
|
@@ -1,13 +1,17 @@
|
|
| 1 |
<script lang="ts" generics="TWidgetExample extends WidgetExample">
|
|
|
|
|
|
|
| 2 |
import { updateWidgetState } from "../../stores.js";
|
| 3 |
import { TASKS_DATA } from "@huggingface/tasks";
|
| 4 |
import type { WidgetExample, WidgetExampleAttribute } from "@huggingface/tasks";
|
| 5 |
import type { WidgetProps, ExampleRunOpts } from "../types.js";
|
| 6 |
import { getPipelineTask } from "../../../../utils/ViewUtils.js";
|
| 7 |
import IconInfo from "../../..//Icons/IconInfo.svelte";
|
|
|
|
| 8 |
import IconLightning from "../../..//Icons/IconLightning.svelte";
|
| 9 |
import PipelineTag from "../../../PipelineTag/PipelineTag.svelte";
|
| 10 |
import WidgetExamples from "../WidgetExamples/WidgetExamples.svelte";
|
|
|
|
| 11 |
|
| 12 |
export let model: WidgetProps["model"];
|
| 13 |
export let noTitle = false;
|
|
@@ -18,7 +22,9 @@
|
|
| 18 |
export let validateExample: ((sample: WidgetExample) => sample is TWidgetExample) | undefined = undefined;
|
| 19 |
export let callApiOnMount: WidgetProps["callApiOnMount"] = false;
|
| 20 |
export let exampleQueryParams: WidgetExampleAttribute[] = [];
|
|
|
|
| 21 |
|
|
|
|
| 22 |
const pipeline = model?.pipeline_tag;
|
| 23 |
|
| 24 |
$: task = pipeline ? getPipelineTask(pipeline) : undefined;
|
|
@@ -62,7 +68,7 @@
|
|
| 62 |
{/if}
|
| 63 |
{/if}
|
| 64 |
</div>
|
| 65 |
-
<div class="mb-0.5 flex w-full max-w-full flex-wrap items-center
|
| 66 |
{#if pipeline && task}
|
| 67 |
<div class="flex gap-4 items-center mb-1.5">
|
| 68 |
<a
|
|
@@ -75,7 +81,21 @@
|
|
| 75 |
</div>
|
| 76 |
{/if}
|
| 77 |
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
</div>
|
|
|
|
| 1 |
<script lang="ts" generics="TWidgetExample extends WidgetExample">
|
| 2 |
+
import { fade } from "svelte/transition";
|
| 3 |
+
|
| 4 |
import { updateWidgetState } from "../../stores.js";
|
| 5 |
import { TASKS_DATA } from "@huggingface/tasks";
|
| 6 |
import type { WidgetExample, WidgetExampleAttribute } from "@huggingface/tasks";
|
| 7 |
import type { WidgetProps, ExampleRunOpts } from "../types.js";
|
| 8 |
import { getPipelineTask } from "../../../../utils/ViewUtils.js";
|
| 9 |
import IconInfo from "../../..//Icons/IconInfo.svelte";
|
| 10 |
+
import IconRefresh from "../../..//Icons/IconRefresh.svelte";
|
| 11 |
import IconLightning from "../../..//Icons/IconLightning.svelte";
|
| 12 |
import PipelineTag from "../../../PipelineTag/PipelineTag.svelte";
|
| 13 |
import WidgetExamples from "../WidgetExamples/WidgetExamples.svelte";
|
| 14 |
+
import { createEventDispatcher } from "svelte";
|
| 15 |
|
| 16 |
export let model: WidgetProps["model"];
|
| 17 |
export let noTitle = false;
|
|
|
|
| 22 |
export let validateExample: ((sample: WidgetExample) => sample is TWidgetExample) | undefined = undefined;
|
| 23 |
export let callApiOnMount: WidgetProps["callApiOnMount"] = false;
|
| 24 |
export let exampleQueryParams: WidgetExampleAttribute[] = [];
|
| 25 |
+
export let showReset = false;
|
| 26 |
|
| 27 |
+
const dispatch = createEventDispatcher<{ reset: void }>();
|
| 28 |
const pipeline = model?.pipeline_tag;
|
| 29 |
|
| 30 |
$: task = pipeline ? getPipelineTask(pipeline) : undefined;
|
|
|
|
| 68 |
{/if}
|
| 69 |
{/if}
|
| 70 |
</div>
|
| 71 |
+
<div class="mb-0.5 flex w-full max-w-full flex-wrap items-center text-sm text-gray-500">
|
| 72 |
{#if pipeline && task}
|
| 73 |
<div class="flex gap-4 items-center mb-1.5">
|
| 74 |
<a
|
|
|
|
| 81 |
</div>
|
| 82 |
{/if}
|
| 83 |
|
| 84 |
+
<div class="flex gap-2 ml-auto">
|
| 85 |
+
{#if showReset && !isDisabled}
|
| 86 |
+
<button class="flex items-center mb-1.5 text-gray-400" on:click={() => dispatch("reset")} transition:fade>
|
| 87 |
+
<IconRefresh />
|
| 88 |
+
</button>
|
| 89 |
+
{/if}
|
| 90 |
+
{#if validExamples.length && applyWidgetExample}
|
| 91 |
+
<WidgetExamples
|
| 92 |
+
classNames="flex gap-x-1 peer:"
|
| 93 |
+
{validExamples}
|
| 94 |
+
{isLoading}
|
| 95 |
+
{applyWidgetExample}
|
| 96 |
+
{callApiOnMount}
|
| 97 |
+
{exampleQueryParams}
|
| 98 |
+
/>
|
| 99 |
+
{/if}
|
| 100 |
+
</div>
|
| 101 |
</div>
|
packages/widgets/src/lib/components/InferenceWidget/shared/WidgetOutputConvo/WidgetOutputConvo.svelte
CHANGED
|
@@ -4,11 +4,13 @@
|
|
| 4 |
import { isFullyScrolled, scrollToMax } from "../../../../utils/ViewUtils.js";
|
| 5 |
import WidgetOutputConvoBubble from "../WidgetOuputConvoBubble/WidgetOutputConvoBubble.svelte";
|
| 6 |
import type { ChatMessage } from "@huggingface/tasks";
|
|
|
|
| 7 |
|
| 8 |
export let modelId: string;
|
| 9 |
export let messages: ChatMessage[];
|
| 10 |
|
| 11 |
let wrapperEl: HTMLElement;
|
|
|
|
| 12 |
|
| 13 |
afterUpdate(() => {
|
| 14 |
if (wrapperEl && !isFullyScrolled(wrapperEl)) {
|
|
@@ -17,7 +19,10 @@
|
|
| 17 |
});
|
| 18 |
</script>
|
| 19 |
|
| 20 |
-
<div
|
|
|
|
|
|
|
|
|
|
| 21 |
<div class="p-3 pt-6 text-center text-sm text-gray-400 text-balance">
|
| 22 |
Input a message to start chatting with
|
| 23 |
<strong>{modelId}</strong>.
|
|
|
|
| 4 |
import { isFullyScrolled, scrollToMax } from "../../../../utils/ViewUtils.js";
|
| 5 |
import WidgetOutputConvoBubble from "../WidgetOuputConvoBubble/WidgetOutputConvoBubble.svelte";
|
| 6 |
import type { ChatMessage } from "@huggingface/tasks";
|
| 7 |
+
import { widgetStates } from "../../stores.js";
|
| 8 |
|
| 9 |
export let modelId: string;
|
| 10 |
export let messages: ChatMessage[];
|
| 11 |
|
| 12 |
let wrapperEl: HTMLElement;
|
| 13 |
+
$: isMaximized = $widgetStates?.[modelId]?.isMaximized;
|
| 14 |
|
| 15 |
afterUpdate(() => {
|
| 16 |
if (wrapperEl && !isFullyScrolled(wrapperEl)) {
|
|
|
|
| 19 |
});
|
| 20 |
</script>
|
| 21 |
|
| 22 |
+
<div
|
| 23 |
+
bind:this={wrapperEl}
|
| 24 |
+
class="overflow-y-auto rounded-t-lg border border-b-0 leading-tight {isMaximized ? 'flex-1' : 'h-64'}"
|
| 25 |
+
>
|
| 26 |
<div class="p-3 pt-6 text-center text-sm text-gray-400 text-balance">
|
| 27 |
Input a message to start chatting with
|
| 28 |
<strong>{modelId}</strong>.
|
packages/widgets/src/lib/components/InferenceWidget/shared/WidgetQuickInput/WidgetQuickInput.svelte
CHANGED
|
@@ -18,6 +18,7 @@
|
|
| 18 |
required={true}
|
| 19 |
type="text"
|
| 20 |
disabled={isLoading || isDisabled}
|
|
|
|
| 21 |
/>
|
| 22 |
<WidgetSubmitBtn
|
| 23 |
classNames="rounded-l-none border-l-0 {flatTop ? 'rounded-t-none' : ''}"
|
|
|
|
| 18 |
required={true}
|
| 19 |
type="text"
|
| 20 |
disabled={isLoading || isDisabled}
|
| 21 |
+
autocomplete="off"
|
| 22 |
/>
|
| 23 |
<WidgetSubmitBtn
|
| 24 |
classNames="rounded-l-none border-l-0 {flatTop ? 'rounded-t-none' : ''}"
|
packages/widgets/src/lib/components/InferenceWidget/shared/inputValidation.ts
CHANGED
|
@@ -14,7 +14,7 @@ import type {
|
|
| 14 |
WidgetExampleZeroShotTextInput,
|
| 15 |
} from "@huggingface/tasks";
|
| 16 |
|
| 17 |
-
function isObject(arg: unknown): arg is Record<string, unknown> {
|
| 18 |
return !!arg && arg?.constructor === Object;
|
| 19 |
}
|
| 20 |
function isStrArray(arg: unknown): arg is string[] {
|
|
|
|
| 14 |
WidgetExampleZeroShotTextInput,
|
| 15 |
} from "@huggingface/tasks";
|
| 16 |
|
| 17 |
+
export function isObject(arg: unknown): arg is Record<string, unknown> {
|
| 18 |
return !!arg && arg?.constructor === Object;
|
| 19 |
}
|
| 20 |
function isStrArray(arg: unknown): arg is string[] {
|
packages/widgets/src/lib/components/InferenceWidget/widgets/ConversationalWidget/ConversationalWidget.svelte
CHANGED
|
@@ -22,9 +22,8 @@
|
|
| 22 |
import { addInferenceParameters, updateUrl } from "../../shared/helpers.js";
|
| 23 |
import { widgetStates, getTgiSupportedModels } from "../../stores.js";
|
| 24 |
import type { Writable } from "svelte/store";
|
| 25 |
-
import { isChatInput, isTextInput } from "../../shared/inputValidation.js";
|
| 26 |
import { isValidOutputText } from "../../shared/outputValidation.js";
|
| 27 |
-
import WidgetExamples from "../../shared/WidgetExamples/WidgetExamples.svelte";
|
| 28 |
|
| 29 |
export let apiToken: WidgetProps["apiToken"];
|
| 30 |
export let apiUrl: WidgetProps["apiUrl"];
|
|
@@ -49,6 +48,7 @@
|
|
| 49 |
let compiledTemplate: Template;
|
| 50 |
let tokenizerConfig: TokenizerConfig;
|
| 51 |
let inferenceClient: HfInference | undefined = undefined;
|
|
|
|
| 52 |
|
| 53 |
// Check config and compile template
|
| 54 |
onMount(() => {
|
|
@@ -146,7 +146,10 @@
|
|
| 146 |
};
|
| 147 |
addInferenceParameters(input, model);
|
| 148 |
|
|
|
|
|
|
|
| 149 |
text = "";
|
|
|
|
| 150 |
try {
|
| 151 |
if ($tgiSupportedModels?.has(model.id)) {
|
| 152 |
console.debug("Starting text generation using the TGI streaming API");
|
|
@@ -155,11 +158,14 @@
|
|
| 155 |
content: "",
|
| 156 |
} satisfies ChatMessage;
|
| 157 |
const previousMessages = [...messages];
|
| 158 |
-
const tokenStream = inferenceClient.textGenerationStream(
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
|
|
|
|
|
|
|
|
|
| 163 |
for await (const newToken of tokenStream) {
|
| 164 |
if (newToken.token.special) continue;
|
| 165 |
newMessage.content = newMessage.content + newToken.token.text;
|
|
@@ -171,13 +177,20 @@
|
|
| 171 |
input.parameters.max_new_tokens = 100;
|
| 172 |
const output = await inferenceClient.textGeneration(
|
| 173 |
{ ...input, model: model.id, accessToken: apiToken },
|
| 174 |
-
{ includeCredentials, dont_load_model: !withModelLoading }
|
| 175 |
);
|
| 176 |
messages = [...messages, { role: "assistant", content: output.generated_text }];
|
| 177 |
await tick();
|
| 178 |
}
|
| 179 |
} catch (e) {
|
| 180 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 181 |
}
|
| 182 |
}
|
| 183 |
|
|
@@ -218,10 +231,28 @@
|
|
| 218 |
function validateExample(sample: WidgetExample): sample is Example {
|
| 219 |
return (isTextInput(sample) || isChatInput(sample)) && (!sample.output || isValidOutputText(sample.output));
|
| 220 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 221 |
</script>
|
| 222 |
|
| 223 |
<WidgetWrapper {apiUrl} {includeCredentials} {model} let:WidgetInfo let:WidgetHeader let:WidgetFooter>
|
| 224 |
-
<WidgetHeader
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 225 |
<WidgetOutputConvo modelId={model.id} {messages} />
|
| 226 |
|
| 227 |
<WidgetQuickInput
|
|
|
|
| 22 |
import { addInferenceParameters, updateUrl } from "../../shared/helpers.js";
|
| 23 |
import { widgetStates, getTgiSupportedModels } from "../../stores.js";
|
| 24 |
import type { Writable } from "svelte/store";
|
| 25 |
+
import { isChatInput, isObject, isTextInput } from "../../shared/inputValidation.js";
|
| 26 |
import { isValidOutputText } from "../../shared/outputValidation.js";
|
|
|
|
| 27 |
|
| 28 |
export let apiToken: WidgetProps["apiToken"];
|
| 29 |
export let apiUrl: WidgetProps["apiUrl"];
|
|
|
|
| 48 |
let compiledTemplate: Template;
|
| 49 |
let tokenizerConfig: TokenizerConfig;
|
| 50 |
let inferenceClient: HfInference | undefined = undefined;
|
| 51 |
+
let abort: AbortController | undefined = undefined;
|
| 52 |
|
| 53 |
// Check config and compile template
|
| 54 |
onMount(() => {
|
|
|
|
| 146 |
};
|
| 147 |
addInferenceParameters(input, model);
|
| 148 |
|
| 149 |
+
isLoading = true;
|
| 150 |
+
abort = new AbortController();
|
| 151 |
text = "";
|
| 152 |
+
error = "";
|
| 153 |
try {
|
| 154 |
if ($tgiSupportedModels?.has(model.id)) {
|
| 155 |
console.debug("Starting text generation using the TGI streaming API");
|
|
|
|
| 158 |
content: "",
|
| 159 |
} satisfies ChatMessage;
|
| 160 |
const previousMessages = [...messages];
|
| 161 |
+
const tokenStream = inferenceClient.textGenerationStream(
|
| 162 |
+
{
|
| 163 |
+
...input,
|
| 164 |
+
model: model.id,
|
| 165 |
+
accessToken: apiToken,
|
| 166 |
+
},
|
| 167 |
+
{ signal: abort?.signal }
|
| 168 |
+
);
|
| 169 |
for await (const newToken of tokenStream) {
|
| 170 |
if (newToken.token.special) continue;
|
| 171 |
newMessage.content = newMessage.content + newToken.token.text;
|
|
|
|
| 177 |
input.parameters.max_new_tokens = 100;
|
| 178 |
const output = await inferenceClient.textGeneration(
|
| 179 |
{ ...input, model: model.id, accessToken: apiToken },
|
| 180 |
+
{ includeCredentials, dont_load_model: !withModelLoading, signal: abort?.signal }
|
| 181 |
);
|
| 182 |
messages = [...messages, { role: "assistant", content: output.generated_text }];
|
| 183 |
await tick();
|
| 184 |
}
|
| 185 |
} catch (e) {
|
| 186 |
+
if (!!e && typeof e === "object" && "message" in e && typeof e.message === "string") {
|
| 187 |
+
error = e.message;
|
| 188 |
+
} else {
|
| 189 |
+
error = `Something went wrong with the request.`;
|
| 190 |
+
}
|
| 191 |
+
} finally {
|
| 192 |
+
isLoading = false;
|
| 193 |
+
abort = undefined;
|
| 194 |
}
|
| 195 |
}
|
| 196 |
|
|
|
|
| 231 |
function validateExample(sample: WidgetExample): sample is Example {
|
| 232 |
return (isTextInput(sample) || isChatInput(sample)) && (!sample.output || isValidOutputText(sample.output));
|
| 233 |
}
|
| 234 |
+
|
| 235 |
+
async function clearConversation() {
|
| 236 |
+
error = "";
|
| 237 |
+
abort?.abort();
|
| 238 |
+
messages = [];
|
| 239 |
+
text = "";
|
| 240 |
+
await tick();
|
| 241 |
+
}
|
| 242 |
</script>
|
| 243 |
|
| 244 |
<WidgetWrapper {apiUrl} {includeCredentials} {model} let:WidgetInfo let:WidgetHeader let:WidgetFooter>
|
| 245 |
+
<WidgetHeader
|
| 246 |
+
{noTitle}
|
| 247 |
+
{model}
|
| 248 |
+
{isLoading}
|
| 249 |
+
{isDisabled}
|
| 250 |
+
{callApiOnMount}
|
| 251 |
+
{applyWidgetExample}
|
| 252 |
+
{validateExample}
|
| 253 |
+
on:reset={clearConversation}
|
| 254 |
+
showReset={!!messages.length}
|
| 255 |
+
/>
|
| 256 |
<WidgetOutputConvo modelId={model.id} {messages} />
|
| 257 |
|
| 258 |
<WidgetQuickInput
|
packages/widgets/src/routes/+page.svelte
CHANGED
|
@@ -31,19 +31,21 @@
|
|
| 31 |
|
| 32 |
const models: ModelData[] = [
|
| 33 |
{
|
| 34 |
-
id: "
|
| 35 |
pipeline_tag: "text-generation",
|
| 36 |
tags: ["conversational"],
|
| 37 |
inference: InferenceDisplayability.Yes,
|
| 38 |
config: {
|
|
|
|
|
|
|
| 39 |
tokenizer_config: {
|
| 40 |
-
bos_token: "<s>",
|
| 41 |
chat_template:
|
| 42 |
-
"{% for message in messages %}
|
|
|
|
|
|
|
| 43 |
eos_token: "</s>",
|
| 44 |
-
pad_token: "</s>",
|
| 45 |
unk_token: "<unk>",
|
| 46 |
-
|
| 47 |
},
|
| 48 |
},
|
| 49 |
widgetData: [
|
|
|
|
| 31 |
|
| 32 |
const models: ModelData[] = [
|
| 33 |
{
|
| 34 |
+
id: "mistralai/Mistral-7B-Instruct-v0.2",
|
| 35 |
pipeline_tag: "text-generation",
|
| 36 |
tags: ["conversational"],
|
| 37 |
inference: InferenceDisplayability.Yes,
|
| 38 |
config: {
|
| 39 |
+
architectures: ["MistralForCausalLM"],
|
| 40 |
+
model_type: "mistral",
|
| 41 |
tokenizer_config: {
|
|
|
|
| 42 |
chat_template:
|
| 43 |
+
"{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
|
| 44 |
+
use_default_system_prompt: false,
|
| 45 |
+
bos_token: "<s>",
|
| 46 |
eos_token: "</s>",
|
|
|
|
| 47 |
unk_token: "<unk>",
|
| 48 |
+
pad_token: undefined,
|
| 49 |
},
|
| 50 |
},
|
| 51 |
widgetData: [
|