noobydp commited on
Commit
94fa108
·
unverified ·
2 Parent(s): dd4dd2c 05c0137

Merge pull request #1 from ZerxZ/main

Browse files

chore: Add environment variables for OpenAI Like integration

.env.example CHANGED
@@ -29,5 +29,11 @@ GOOGLE_GENERATIVE_AI_API_KEY=
29
  # EXAMPLE http://localhost:11434
30
  OLLAMA_API_BASE_URL=
31
 
 
 
 
 
 
 
32
  # Include this environment variable if you want more logging for debugging locally
33
  VITE_LOG_LEVEL=debug
 
29
  # EXAMPLE http://localhost:11434
30
  OLLAMA_API_BASE_URL=
31
 
32
+ # You only need this environment variable set if you want to use OpenAI Like models
33
+ OPENAI_LIKE_API_BASE_URL=
34
+
35
+ # Get your OpenAI Like API Key
36
+ OPENAI_LIKE_API_KEY=
37
+
38
  # Include this environment variable if you want more logging for debugging locally
39
  VITE_LOG_LEVEL=debug
app/components/chat/BaseChat.tsx CHANGED
@@ -28,7 +28,7 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
28
  const [provider, setProvider] = useState(DEFAULT_PROVIDER);
29
  return (
30
  <div className="mb-2">
31
- <select
32
  value={provider}
33
  onChange={(e) => {
34
  setProvider(e.target.value);
@@ -42,9 +42,12 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
42
  {provider}
43
  </option>
44
  ))}
45
- <option key="Ollama" value="Ollama">
46
- Ollama
47
- </option>
 
 
 
48
  </select>
49
  <select
50
  value={model}
@@ -263,4 +266,4 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
263
  </div>
264
  );
265
  },
266
- );
 
28
  const [provider, setProvider] = useState(DEFAULT_PROVIDER);
29
  return (
30
  <div className="mb-2">
31
+ <select
32
  value={provider}
33
  onChange={(e) => {
34
  setProvider(e.target.value);
 
42
  {provider}
43
  </option>
44
  ))}
45
+ <option key="Ollama" value="Ollama">
46
+ Ollama
47
+ </option>
48
+ <option key="OpenAILike" value="OpenAILike">
49
+ OpenAILike
50
+ </option>
51
  </select>
52
  <select
53
  value={model}
 
266
  </div>
267
  );
268
  },
269
+ );
app/lib/.server/llm/api-key.ts CHANGED
@@ -19,6 +19,17 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
19
  return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
20
  case 'OpenRouter':
21
  return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
 
 
 
 
 
 
 
 
 
 
 
22
  default:
23
  return "";
24
  }
 
19
  return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
20
  case 'OpenRouter':
21
  return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
22
+ case "OpenAILike":
23
+ return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
24
+ default:
25
+ return "";
26
+ }
27
+ }
28
+
29
+ export function getBaseURL(cloudflareEnv: Env, provider: string) {
30
+ switch (provider) {
31
+ case 'OpenAILike':
32
+ return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
33
  default:
34
  return "";
35
  }
app/lib/.server/llm/model.ts CHANGED
@@ -1,6 +1,6 @@
1
  // @ts-nocheck
2
  // Preventing TS checks with files presented in the video for a better presentation.
3
- import { getAPIKey } from '~/lib/.server/llm/api-key';
4
  import { createAnthropic } from '@ai-sdk/anthropic';
5
  import { createOpenAI } from '@ai-sdk/openai';
6
  import { createGoogleGenerativeAI } from '@ai-sdk/google';
@@ -14,7 +14,14 @@ export function getAnthropicModel(apiKey: string, model: string) {
14
 
15
  return anthropic(model);
16
  }
 
 
 
 
 
17
 
 
 
18
  export function getOpenAIModel(apiKey: string, model: string) {
19
  const openai = createOpenAI({
20
  apiKey,
@@ -54,7 +61,7 @@ export function getOpenRouterModel(apiKey: string, model: string) {
54
 
55
  export function getModel(provider: string, model: string, env: Env) {
56
  const apiKey = getAPIKey(env, provider);
57
-
58
 
59
  switch (provider) {
60
  case 'Anthropic':
@@ -67,6 +74,8 @@ export function getModel(provider: string, model: string, env: Env) {
67
  return getOpenRouterModel(apiKey, model);
68
  case 'Google':
69
  return getGoogleModel(apiKey, model)
 
 
70
  default:
71
  return getOllamaModel(model);
72
  }
 
1
  // @ts-nocheck
2
  // Preventing TS checks with files presented in the video for a better presentation.
3
+ import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
4
  import { createAnthropic } from '@ai-sdk/anthropic';
5
  import { createOpenAI } from '@ai-sdk/openai';
6
  import { createGoogleGenerativeAI } from '@ai-sdk/google';
 
14
 
15
  return anthropic(model);
16
  }
17
+ export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) {
18
+ const openai = createOpenAI({
19
+ baseURL,
20
+ apiKey,
21
+ });
22
 
23
+ return openai(model);
24
+ }
25
  export function getOpenAIModel(apiKey: string, model: string) {
26
  const openai = createOpenAI({
27
  apiKey,
 
61
 
62
  export function getModel(provider: string, model: string, env: Env) {
63
  const apiKey = getAPIKey(env, provider);
64
+ const baseURL = getBaseURL(env, provider);
65
 
66
  switch (provider) {
67
  case 'Anthropic':
 
74
  return getOpenRouterModel(apiKey, model);
75
  case 'Google':
76
  return getGoogleModel(apiKey, model)
77
+ case 'OpenAILike':
78
+ return getOpenAILikeModel(baseURL,apiKey, model);
79
  default:
80
  return getOllamaModel(model);
81
  }
app/utils/constants.ts CHANGED
@@ -36,7 +36,9 @@ export let MODEL_LIST: ModelInfo[] = [...staticModels];
36
 
37
  async function getOllamaModels(): Promise<ModelInfo[]> {
38
  try {
39
- const response = await fetch(`http://localhost:11434/api/tags`);
 
 
40
  const data = await response.json();
41
 
42
  return data.models.map((model: any) => ({
@@ -49,9 +51,36 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
49
  }
50
  }
51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  async function initializeModelList(): Promise<void> {
53
  const ollamaModels = await getOllamaModels();
54
- MODEL_LIST = [...ollamaModels, ...staticModels];
 
 
55
  }
56
  initializeModelList().then();
57
  export { getOllamaModels, initializeModelList };
 
36
 
37
  async function getOllamaModels(): Promise<ModelInfo[]> {
38
  try {
39
+ const base_url =import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
40
+ const url = new URL(base_url).toString();
41
+ const response = await fetch(`${url}/api/tags`);
42
  const data = await response.json();
43
 
44
  return data.models.map((model: any) => ({
 
51
  }
52
  }
53
 
54
+ async function getOpenAILikeModels(): Promise<ModelInfo[]> {
55
+
56
+ try {
57
+ const base_url =import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
58
+ if (!base_url) {
59
+ return [];
60
+ }
61
+ const url = new URL(base_url).toString();
62
+ const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
63
+ const response = await fetch(`${url}/models`, {
64
+ headers: {
65
+ Authorization: `Bearer ${api_key}`,
66
+ }
67
+ });
68
+ const res = await response.json();
69
+ return res.data.map((model: any) => ({
70
+ name: model.id,
71
+ label: model.id,
72
+ provider: 'OpenAILike',
73
+ }));
74
+ }catch (e) {
75
+ return []
76
+ }
77
+
78
+ }
79
  async function initializeModelList(): Promise<void> {
80
  const ollamaModels = await getOllamaModels();
81
+ const openAiLikeModels = await getOpenAILikeModels();
82
+ console.log(openAiLikeModels);
83
+ MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
84
  }
85
  initializeModelList().then();
86
  export { getOllamaModels, initializeModelList };
vite.config.ts CHANGED
@@ -27,6 +27,7 @@ export default defineConfig((config) => {
27
  chrome129IssuePlugin(),
28
  config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
29
  ],
 
30
  };
31
  });
32
 
 
27
  chrome129IssuePlugin(),
28
  config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
29
  ],
30
+ envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"],
31
  };
32
  });
33
 
worker-configuration.d.ts CHANGED
@@ -4,4 +4,6 @@ interface Env {
4
  GROQ_API_KEY: string;
5
  OPEN_ROUTER_API_KEY: string;
6
  OLLAMA_API_BASE_URL: string;
 
 
7
  }
 
4
  GROQ_API_KEY: string;
5
  OPEN_ROUTER_API_KEY: string;
6
  OLLAMA_API_BASE_URL: string;
7
+ OPENAI_LIKE_API_KEY: string;
8
+ OPENAI_LIKE_API_BASE_URL: string;
9
  }