:recycle: [Refactor] ChatCompletionRequester: post
Browse files- networks/llm_requester.js +22 -22
- server.js +0 -2
networks/llm_requester.js
CHANGED
|
@@ -74,29 +74,29 @@ export class ChatCompletionsRequester {
|
|
| 74 |
create_messager("user", this.prompt);
|
| 75 |
create_messager("assistant", "", this.model, this.temperature);
|
| 76 |
}
|
| 77 |
-
post() {
|
| 78 |
this.construct_request_params();
|
| 79 |
-
|
| 80 |
-
.
|
| 81 |
-
.
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
}
|
| 101 |
stop() {
|
| 102 |
this.controller.abort();
|
|
|
|
| 74 |
create_messager("user", this.prompt);
|
| 75 |
create_messager("assistant", "", this.model, this.temperature);
|
| 76 |
}
|
| 77 |
+
async post() {
|
| 78 |
this.construct_request_params();
|
| 79 |
+
const response = await fetch(
|
| 80 |
+
this.backend_request_endpoint,
|
| 81 |
+
this.backend_request_params
|
| 82 |
+
);
|
| 83 |
+
const reader = response.body.getReader();
|
| 84 |
+
let buffer = "";
|
| 85 |
+
return reader.read().then(function process({ done, value }) {
|
| 86 |
+
if (done) {
|
| 87 |
+
return;
|
| 88 |
+
}
|
| 89 |
+
buffer += stringify_stream_bytes(value);
|
| 90 |
+
let boundary = buffer.lastIndexOf("\n");
|
| 91 |
+
if (boundary !== -1) {
|
| 92 |
+
let input = buffer.substring(0, boundary);
|
| 93 |
+
buffer = buffer.substring(boundary + 1);
|
| 94 |
+
let json_chunks = jsonize_stream_data(input);
|
| 95 |
+
console.log(json_chunks);
|
| 96 |
+
update_message(json_chunks);
|
| 97 |
+
}
|
| 98 |
+
return reader.read().then(process);
|
| 99 |
+
});
|
| 100 |
}
|
| 101 |
stop() {
|
| 102 |
this.controller.abort();
|
server.js
CHANGED
|
@@ -28,7 +28,6 @@ app.post("/chat/completions", async (req, res) => {
|
|
| 28 |
headers: openai_request_headers,
|
| 29 |
responseType: "stream",
|
| 30 |
});
|
| 31 |
-
|
| 32 |
response.data.pipe(res);
|
| 33 |
} catch (error) {
|
| 34 |
console.error(error);
|
|
@@ -50,7 +49,6 @@ app.post("/models", async (req, res) => {
|
|
| 50 |
headers: openai_request_headers,
|
| 51 |
});
|
| 52 |
res.json(response.data);
|
| 53 |
-
|
| 54 |
} catch (error) {
|
| 55 |
console.error(error);
|
| 56 |
res.status(500).json({ error: "Failed to request OpenAI Endpoint" });
|
|
|
|
| 28 |
headers: openai_request_headers,
|
| 29 |
responseType: "stream",
|
| 30 |
});
|
|
|
|
| 31 |
response.data.pipe(res);
|
| 32 |
} catch (error) {
|
| 33 |
console.error(error);
|
|
|
|
| 49 |
headers: openai_request_headers,
|
| 50 |
});
|
| 51 |
res.json(response.data);
|
|
|
|
| 52 |
} catch (error) {
|
| 53 |
console.error(error);
|
| 54 |
res.status(500).json({ error: "Failed to request OpenAI Endpoint" });
|