Spaces:
Sleeping
Sleeping
spacing
Browse files
src/lib/components/InferencePlayground/inferencePlaygroundUtils.ts
CHANGED
|
@@ -2,10 +2,12 @@ import { type ChatCompletionInputMessage } from "@huggingface/tasks";
|
|
| 2 |
import { HfInference } from "@huggingface/inference";
|
| 3 |
import type { Conversation, ModelEntryWithTokenizer } from "$lib/types";
|
| 4 |
|
|
|
|
| 5 |
export function createHfInference(token: string): HfInference {
|
| 6 |
return new HfInference(token);
|
| 7 |
}
|
| 8 |
|
|
|
|
| 9 |
export async function handleStreamingResponse(
|
| 10 |
hf: HfInference,
|
| 11 |
conversation: Conversation,
|
|
@@ -34,6 +36,7 @@ export async function handleStreamingResponse(
|
|
| 34 |
}
|
| 35 |
}
|
| 36 |
|
|
|
|
| 37 |
export async function handleNonStreamingResponse(
|
| 38 |
hf: HfInference,
|
| 39 |
conversation: Conversation,
|
|
@@ -57,6 +60,7 @@ export async function handleNonStreamingResponse(
|
|
| 57 |
throw new Error("No response from the model");
|
| 58 |
}
|
| 59 |
|
|
|
|
| 60 |
export function isSystemPromptSupported(model: ModelEntryWithTokenizer) {
|
| 61 |
return model.tokenizerConfig?.chat_template?.includes("system");
|
| 62 |
}
|
|
|
|
| 2 |
import { HfInference } from "@huggingface/inference";
|
| 3 |
import type { Conversation, ModelEntryWithTokenizer } from "$lib/types";
|
| 4 |
|
| 5 |
+
|
| 6 |
export function createHfInference(token: string): HfInference {
|
| 7 |
return new HfInference(token);
|
| 8 |
}
|
| 9 |
|
| 10 |
+
|
| 11 |
export async function handleStreamingResponse(
|
| 12 |
hf: HfInference,
|
| 13 |
conversation: Conversation,
|
|
|
|
| 36 |
}
|
| 37 |
}
|
| 38 |
|
| 39 |
+
|
| 40 |
export async function handleNonStreamingResponse(
|
| 41 |
hf: HfInference,
|
| 42 |
conversation: Conversation,
|
|
|
|
| 60 |
throw new Error("No response from the model");
|
| 61 |
}
|
| 62 |
|
| 63 |
+
|
| 64 |
export function isSystemPromptSupported(model: ModelEntryWithTokenizer) {
|
| 65 |
return model.tokenizerConfig?.chat_template?.includes("system");
|
| 66 |
}
|