Spaces:
Running
Running
Commit
·
d239855
1
Parent(s):
e02039d
removed usable models
Browse files- src/config.py +4 -69
src/config.py
CHANGED
@@ -37,39 +37,13 @@ DEFAULT_SAFETY_SETTINGS = [
|
|
37 |
|
38 |
# Supported Models (for /v1beta/models endpoint)
|
39 |
SUPPORTED_MODELS = [
|
40 |
-
{
|
41 |
-
"name": "models/gemini-1.5-pro",
|
42 |
-
"version": "001",
|
43 |
-
"displayName": "Gemini 1.5 Pro",
|
44 |
-
"description": "Mid-size multimodal model that supports up to 2 million tokens",
|
45 |
-
"inputTokenLimit": 2097152,
|
46 |
-
"outputTokenLimit": 8192,
|
47 |
-
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
48 |
-
"temperature": 1.0,
|
49 |
-
"maxTemperature": 2.0,
|
50 |
-
"topP": 0.95,
|
51 |
-
"topK": 64
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"name": "models/gemini-1.5-flash",
|
55 |
-
"version": "001",
|
56 |
-
"displayName": "Gemini 1.5 Flash",
|
57 |
-
"description": "Fast and versatile multimodal model for scaling across diverse tasks",
|
58 |
-
"inputTokenLimit": 1048576,
|
59 |
-
"outputTokenLimit": 8192,
|
60 |
-
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
61 |
-
"temperature": 1.0,
|
62 |
-
"maxTemperature": 2.0,
|
63 |
-
"topP": 0.95,
|
64 |
-
"topK": 64
|
65 |
-
},
|
66 |
{
|
67 |
"name": "models/gemini-2.5-pro-preview-05-06",
|
68 |
"version": "001",
|
69 |
"displayName": "Gemini 2.5 Pro Preview 05-06",
|
70 |
"description": "Preview version of Gemini 2.5 Pro from May 6th",
|
71 |
"inputTokenLimit": 1048576,
|
72 |
-
"outputTokenLimit":
|
73 |
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
74 |
"temperature": 1.0,
|
75 |
"maxTemperature": 2.0,
|
@@ -82,7 +56,7 @@ SUPPORTED_MODELS = [
|
|
82 |
"displayName": "Gemini 2.5 Pro Preview 06-05",
|
83 |
"description": "Preview version of Gemini 2.5 Pro from June 5th",
|
84 |
"inputTokenLimit": 1048576,
|
85 |
-
"outputTokenLimit":
|
86 |
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
87 |
"temperature": 1.0,
|
88 |
"maxTemperature": 2.0,
|
@@ -108,7 +82,7 @@ SUPPORTED_MODELS = [
|
|
108 |
"displayName": "Gemini 2.5 Flash Preview 05-20",
|
109 |
"description": "Preview version of Gemini 2.5 Flash from May 20th",
|
110 |
"inputTokenLimit": 1048576,
|
111 |
-
"outputTokenLimit":
|
112 |
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
113 |
"temperature": 1.0,
|
114 |
"maxTemperature": 2.0,
|
@@ -121,50 +95,11 @@ SUPPORTED_MODELS = [
|
|
121 |
"displayName": "Gemini 2.5 Flash",
|
122 |
"description": "Fast and efficient multimodal model with latest improvements",
|
123 |
"inputTokenLimit": 1048576,
|
124 |
-
"outputTokenLimit":
|
125 |
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
126 |
"temperature": 1.0,
|
127 |
"maxTemperature": 2.0,
|
128 |
"topP": 0.95,
|
129 |
"topK": 64
|
130 |
-
},
|
131 |
-
{
|
132 |
-
"name": "models/gemini-2.0-flash",
|
133 |
-
"version": "001",
|
134 |
-
"displayName": "Gemini 2.0 Flash",
|
135 |
-
"description": "Latest generation fast multimodal model",
|
136 |
-
"inputTokenLimit": 1048576,
|
137 |
-
"outputTokenLimit": 8192,
|
138 |
-
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
139 |
-
"temperature": 1.0,
|
140 |
-
"maxTemperature": 2.0,
|
141 |
-
"topP": 0.95,
|
142 |
-
"topK": 64
|
143 |
-
},
|
144 |
-
{
|
145 |
-
"name": "models/gemini-2.0-flash-preview-image-generation",
|
146 |
-
"version": "001",
|
147 |
-
"displayName": "Gemini 2.0 Flash Preview Image Generation",
|
148 |
-
"description": "Preview version with image generation capabilities",
|
149 |
-
"inputTokenLimit": 32000,
|
150 |
-
"outputTokenLimit": 8192,
|
151 |
-
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
152 |
-
"temperature": 1.0,
|
153 |
-
"maxTemperature": 2.0,
|
154 |
-
"topP": 0.95,
|
155 |
-
"topK": 64
|
156 |
-
},
|
157 |
-
{
|
158 |
-
"name": "models/gemini-embedding-001",
|
159 |
-
"version": "001",
|
160 |
-
"displayName": "Gemini Embedding 001",
|
161 |
-
"description": "Text embedding model for semantic similarity and search",
|
162 |
-
"inputTokenLimit": 2048,
|
163 |
-
"outputTokenLimit": 1,
|
164 |
-
"supportedGenerationMethods": ["embedContent"],
|
165 |
-
"temperature": 0.0,
|
166 |
-
"maxTemperature": 0.0,
|
167 |
-
"topP": 1.0,
|
168 |
-
"topK": 1
|
169 |
}
|
170 |
]
|
|
|
37 |
|
38 |
# Supported Models (for /v1beta/models endpoint)
|
39 |
SUPPORTED_MODELS = [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
{
|
41 |
"name": "models/gemini-2.5-pro-preview-05-06",
|
42 |
"version": "001",
|
43 |
"displayName": "Gemini 2.5 Pro Preview 05-06",
|
44 |
"description": "Preview version of Gemini 2.5 Pro from May 6th",
|
45 |
"inputTokenLimit": 1048576,
|
46 |
+
"outputTokenLimit": 65535,
|
47 |
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
48 |
"temperature": 1.0,
|
49 |
"maxTemperature": 2.0,
|
|
|
56 |
"displayName": "Gemini 2.5 Pro Preview 06-05",
|
57 |
"description": "Preview version of Gemini 2.5 Pro from June 5th",
|
58 |
"inputTokenLimit": 1048576,
|
59 |
+
"outputTokenLimit": 65535,
|
60 |
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
61 |
"temperature": 1.0,
|
62 |
"maxTemperature": 2.0,
|
|
|
82 |
"displayName": "Gemini 2.5 Flash Preview 05-20",
|
83 |
"description": "Preview version of Gemini 2.5 Flash from May 20th",
|
84 |
"inputTokenLimit": 1048576,
|
85 |
+
"outputTokenLimit": 65535,
|
86 |
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
87 |
"temperature": 1.0,
|
88 |
"maxTemperature": 2.0,
|
|
|
95 |
"displayName": "Gemini 2.5 Flash",
|
96 |
"description": "Fast and efficient multimodal model with latest improvements",
|
97 |
"inputTokenLimit": 1048576,
|
98 |
+
"outputTokenLimit": 65535,
|
99 |
"supportedGenerationMethods": ["generateContent", "streamGenerateContent"],
|
100 |
"temperature": 1.0,
|
101 |
"maxTemperature": 2.0,
|
102 |
"topP": 0.95,
|
103 |
"topK": 64
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
}
|
105 |
]
|