engineofperplexity commited on
Commit
6aaf221
Β·
1 Parent(s): 13afdbf

first upload

Browse files
Files changed (8) hide show
  1. .gitignore +0 -0
  2. package-lock.json +0 -0
  3. package.json +20 -0
  4. src/chatwrapper.ts +69 -0
  5. src/mapper.ts +124 -0
  6. src/remoteimage.ts +9 -0
  7. src/server.ts +97 -0
  8. tsconfig.json +17 -0
.gitignore ADDED
Binary file (32 Bytes). View file
 
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
package.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "gcli_oai_proxy",
3
+ "version": "1.0.0",
4
+ "main": "index.js",
5
+ "scripts": {
6
+ "test": "echo \"Error: no test specified\" && exit 1"
7
+ },
8
+ "keywords": [],
9
+ "author": "",
10
+ "license": "ISC",
11
+ "description": "",
12
+ "dependencies": {
13
+ "@google/gemini-cli": "^0.1.2"
14
+ },
15
+ "devDependencies": {
16
+ "@types/node": "^24.0.4",
17
+ "ts-node": "^10.9.2",
18
+ "typescript": "^5.8.3"
19
+ }
20
+ }
src/chatwrapper.ts ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // src/chatwrapper.ts
2
+ import {
3
+ AuthType,
4
+ createContentGeneratorConfig,
5
+ createContentGenerator,
6
+ } from '@google/gemini-cli-core/dist/src/core/contentGenerator.js';
7
+
8
+ /* ------------------------------------------------------------------ */
9
+ /* 1. Build the ContentGenerator exactly like the CLI does */
10
+ /* ------------------------------------------------------------------ */
11
+ let modelName: string; // we'll fill this once
12
+ const generatorPromise = (async () => {
13
+ // Pass undefined for model so the helper falls back to DEFAULT_GEMINI_MODEL
14
+ const cfg = await createContentGeneratorConfig(
15
+ undefined, // let helper pick default (Gemini-2.5-Pro)
16
+ AuthType.USE_GEMINI // same mode the CLI defaults to
17
+ );
18
+ modelName = cfg.model; // remember the actual model string
19
+ return await createContentGenerator(cfg);
20
+ })();
21
+
22
+ /* ------------------------------------------------------------------ */
23
+ /* 2. Helpers consumed by server.ts */
24
+ /* ------------------------------------------------------------------ */
25
+ type GenConfig = Record<string, unknown>;
26
+
27
+ export async function sendChat({
28
+ contents,
29
+ generationConfig = {},
30
+ }: {
31
+ contents: any[];
32
+ generationConfig?: GenConfig;
33
+ tools?: unknown; // accepted but ignored for now
34
+ }) {
35
+ const generator: any = await generatorPromise;
36
+ return await generator.generateContent({
37
+ model: modelName,
38
+ contents,
39
+ config: generationConfig,
40
+ });
41
+ }
42
+
43
+ export async function* sendChatStream({
44
+ contents,
45
+ generationConfig = {},
46
+ }: {
47
+ contents: any[];
48
+ generationConfig?: GenConfig;
49
+ tools?: unknown;
50
+ }) {
51
+ const generator: any = await generatorPromise;
52
+ const stream = await generator.generateContentStream({
53
+ model: modelName,
54
+ contents,
55
+ config: generationConfig,
56
+ });
57
+ for await (const chunk of stream) yield chunk;
58
+ }
59
+
60
+ /* ------------------------------------------------------------------ */
61
+ /* 3. Minimal stubs so server.ts compiles (extend later) */
62
+ /* ------------------------------------------------------------------ */
63
+ export function listModels() {
64
+ return [{ id: modelName }];
65
+ }
66
+
67
+ export async function embed(_input: unknown) {
68
+ throw new Error('Embeddings endpoint not implemented yet.');
69
+ }
src/mapper.ts ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* ------------------------------------------------------------------ */
2
+ /* mapper.ts – OpenAI ⇆ Gemini (with reasoning/1 M context) */
3
+ /* ------------------------------------------------------------------ */
4
+ import { fetchAndEncode } from './remoteimage';
5
+ import { z } from 'zod';
6
+ import { ToolRegistry } from '@google/gemini-cli-core/dist/src/tools/tool-registry.js';
7
+
8
+ /* ------------------------------------------------------------------ */
9
+ type Part = { text?: string; inlineData?: { mimeType: string; data: string } };
10
+
11
+ /* ------------------------------------------------------------------ */
12
+ async function callLocalFunction(_name: string, _args: unknown) {
13
+ return { ok: true };
14
+ }
15
+
16
+ /* ================================================================== */
17
+ /* Request mapper: OpenAI ➞ Gemini */
18
+ /* ================================================================== */
19
+ export async function mapRequest(body: any) {
20
+ const parts: Part[] = [];
21
+
22
+ /* ---- convert messages & vision --------------------------------- */
23
+ for (const m of body.messages) {
24
+ if (Array.isArray(m.content)) {
25
+ for (const item of m.content) {
26
+ if (item.type === 'image_url') {
27
+ parts.push({ inlineData: await fetchAndEncode(item.image_url.url) });
28
+ } else if (item.type === 'text') {
29
+ parts.push({ text: item.text });
30
+ }
31
+ }
32
+ } else {
33
+ parts.push({ text: m.content });
34
+ }
35
+ }
36
+
37
+ /* ---- base generationConfig ------------------------------------- */
38
+ const generationConfig: Record<string, unknown> = {
39
+ temperature: body.temperature,
40
+ maxOutputTokens: body.max_tokens,
41
+ topP: body.top_p,
42
+ ...(body.generationConfig ?? {}), // copy anything ST already merged
43
+ };
44
+ if (body.include_reasoning === true) {
45
+ generationConfig.enable_thoughts = true; // ← current flag
46
+ generationConfig.thinking_budget ??= 2048; // optional limit
47
+ }
48
+
49
+ /* ---- auto-enable reasoning & 1 M context ----------------------- */
50
+ if (body.include_reasoning === true && generationConfig.thinking !== true) {
51
+ generationConfig.thinking = true;
52
+ generationConfig.thinking_budget ??= 2048;
53
+ }
54
+ generationConfig.maxInputTokens ??= 1_000_000; // lift context cap
55
+
56
+ const geminiReq = {
57
+ contents: [{ role: 'user', parts }],
58
+ generationConfig,
59
+ stream: body.stream,
60
+ };
61
+
62
+ /* ---- Tool / function mapping ----------------------------------- */
63
+ const tools = new ToolRegistry({} as any);
64
+
65
+ if (body.functions?.length) {
66
+ const reg = tools as any;
67
+ body.functions.forEach((fn: any) =>
68
+ reg.registerTool(
69
+ fn.name,
70
+ {
71
+ title: fn.name,
72
+ description: fn.description ?? '',
73
+ inputSchema: z.object(fn.parameters?.properties ?? {}),
74
+ },
75
+ async (args: unknown) => callLocalFunction(fn.name, args),
76
+ ),
77
+ );
78
+ }
79
+
80
+ return { geminiReq, tools };
81
+ }
82
+
83
+ /* ================================================================== */
84
+ /* Non-stream response: Gemini ➞ OpenAI */
85
+ /* ================================================================== */
86
+ export function mapResponse(gResp: any) {
87
+ const usage = gResp.usageMetadata ?? {};
88
+ return {
89
+ id: `chatcmpl-${Date.now()}`,
90
+ object: 'chat.completion',
91
+ created: Math.floor(Date.now() / 1000),
92
+ model: 'gemini-2.5-pro-latest',
93
+ choices: [
94
+ {
95
+ index: 0,
96
+ message: { role: 'assistant', content: gResp.text },
97
+ finish_reason: 'stop',
98
+ },
99
+ ],
100
+ usage: {
101
+ prompt_tokens: usage.promptTokens ?? 0,
102
+ completion_tokens: usage.candidatesTokens ?? 0,
103
+ total_tokens: usage.totalTokens ?? 0,
104
+ },
105
+ };
106
+ }
107
+
108
+ /* ================================================================== */
109
+ /* Stream chunk mapper: Gemini ➞ OpenAI */
110
+ /* ================================================================== */
111
+
112
+ export function mapStreamChunk(chunk: any) {
113
+ const part = chunk?.candidates?.[0]?.content?.parts?.[0] ?? {};
114
+ const delta: any = { role: 'assistant' };
115
+
116
+ if (part.thought === true) {
117
+ delta.content = `<think>${part.text ?? ''}`; // ST renders grey bubble
118
+ } else if (typeof part.text === 'string') {
119
+ delta.content = part.text;
120
+ }
121
+ return { choices: [ { delta, index: 0 } ] };
122
+ }
123
+
124
+
src/remoteimage.ts ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import { fetch } from 'undici'; // Node β‰₯18 has global fetch; otherwise add undici
2
+
3
+ export async function fetchAndEncode(url: string) {
4
+ const res = await fetch(url);
5
+ if (!res.ok) throw new Error(`Failed to fetch image: ${url}`);
6
+ const buf = Buffer.from(await res.arrayBuffer());
7
+ const mimeType = res.headers.get('content-type') || 'image/png';
8
+ return { mimeType, data: buf.toString('base64') };
9
+ }
src/server.ts ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import http from 'http';
2
+ import { sendChat, sendChatStream } from './chatwrapper';
3
+ import { mapRequest, mapResponse, mapStreamChunk } from './mapper';
4
+
5
+ /* ── basic config ─────────────────────────────────────────────────── */
6
+ const PORT = Number(process.env.PORT ?? 11434);
7
+
8
+ /* ── CORS helper ──────────────────────────────────────────────────── */
9
+ function allowCors(res: http.ServerResponse) {
10
+ res.setHeader('Access-Control-Allow-Origin', '*');
11
+ res.setHeader('Access-Control-Allow-Headers', '*');
12
+ res.setHeader('Access-Control-Allow-Methods', 'GET,POST,OPTIONS');
13
+ }
14
+
15
+ /* ── JSON body helper ─────────────────────────────────────────────── */
16
+ function readJSON(
17
+ req: http.IncomingMessage,
18
+ res: http.ServerResponse,
19
+ ): Promise<any | null> {
20
+ return new Promise((resolve) => {
21
+ let data = '';
22
+ req.on('data', (c) => (data += c));
23
+ req.on('end', () => {
24
+ try {
25
+ resolve(data ? JSON.parse(data) : {});
26
+ } catch {
27
+ res.writeHead(400).end(); // malformed JSON
28
+ resolve(null);
29
+ }
30
+ });
31
+ });
32
+ }
33
+
34
+ /* ── server ───────────────────────────────────────────────────────── */
35
+ http
36
+ .createServer(async (req, res) => {
37
+ allowCors(res);
38
+
39
+ /* -------- pre-flight ---------- */
40
+ if (req.method === 'OPTIONS') {
41
+ res.writeHead(204).end();
42
+ return;
43
+ }
44
+
45
+ /* -------- /v1/models ---------- */
46
+ if (req.url === '/v1/models') {
47
+ res.writeHead(200, { 'Content-Type': 'application/json' });
48
+ res.end(
49
+ JSON.stringify({
50
+ data: [
51
+ {
52
+ id: 'gemini-2.5-pro-latest',
53
+ object: 'model',
54
+ owned_by: 'google',
55
+ },
56
+ ],
57
+ }),
58
+ );
59
+ return;
60
+ }
61
+
62
+ /* ---- /v1/chat/completions ---- */
63
+ if (req.url === '/v1/chat/completions' && req.method === 'POST') {
64
+ const body = await readJSON(req, res);
65
+ if (!body) return;
66
+
67
+ try {
68
+ const { geminiReq, tools } = await mapRequest(body);
69
+
70
+ if (body.stream) {
71
+ res.writeHead(200, {
72
+ 'Content-Type': 'text/event-stream',
73
+ 'Cache-Control': 'no-cache',
74
+ Connection: 'keep-alive',
75
+ });
76
+
77
+ for await (const chunk of sendChatStream({ ...geminiReq, tools })) {
78
+ res.write(`data: ${JSON.stringify(mapStreamChunk(chunk))}\n\n`);
79
+ }
80
+ res.end('data: [DONE]\n\n');
81
+ } else {
82
+ const gResp = await sendChat({ ...geminiReq, tools });
83
+ res.writeHead(200, { 'Content-Type': 'application/json' });
84
+ res.end(JSON.stringify(mapResponse(gResp)));
85
+ }
86
+ } catch (err: any) {
87
+ console.error('Proxy error ➜', err);
88
+ res.writeHead(500, { 'Content-Type': 'application/json' });
89
+ res.end(JSON.stringify({ error: { message: err.message } }));
90
+ }
91
+ return;
92
+ }
93
+
94
+ /* ---- anything else ---------- */
95
+ res.writeHead(404).end();
96
+ })
97
+ .listen(PORT, () => console.log(`OpenAI proxy on :${PORT}`));
tsconfig.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "compilerOptions": {
3
+ /* ---- target & module ---- */
4
+ "target": "ES2020",
5
+ "module": "commonjs",
6
+ "moduleResolution": "node",
7
+
8
+ /* ---- quality-of-life ---- */
9
+ "esModuleInterop": true,
10
+ "skipLibCheck": true,
11
+ "forceConsistentCasingInFileNames": true,
12
+
13
+ /* ---- output dir ---- */
14
+ "outDir": "dist"
15
+ },
16
+ "include": ["src/**/*"]
17
+ }