Rooni commited on
Commit
93e1e4e
·
verified ·
1 Parent(s): dedba9f

Update server.js

Browse files
Files changed (1) hide show
  1. server.js +31 -24
server.js CHANGED
@@ -1,7 +1,7 @@
1
  require('dotenv').config();
2
  const express = require('express');
3
  const rateLimit = require('express-rate-limit');
4
- const { Configuration, OpenAIApi } = require('openai');
5
 
6
  const app = express();
7
  app.use(express.json());
@@ -37,37 +37,44 @@ app.post('/update', async (req, res) => {
37
 
38
  async function sendRequest(prompt, prs) {
39
  const apiKey = getRandomApiKey();
40
-
41
- const configuration = new Configuration({
42
- apiKey: apiKey,
43
- basePath: process.env.BASE_URL || undefined,
44
- });
45
-
46
- const openai = new OpenAIApi(configuration);
47
 
48
  try {
49
- const response = await openai.createChatCompletion({
50
- model: process.env.MODEL_NAME || 'gpt-3.5-turbo',
51
- messages: [
52
- {
53
- role: 'system',
54
- content: prs
55
- },
56
- {
57
- role: 'user',
58
- content: prompt
 
 
 
 
 
59
  }
60
- ],
61
- max_tokens: 1200,
62
- });
63
-
64
- if (response.data && response.data.choices && response.data.choices.length > 0) {
 
 
 
65
  return response.data.choices[0].message.content.trim();
66
  } else {
67
  throw new Error('Ошибка прочтения ответа');
68
  }
69
  } catch (error) {
70
- console.error('Ошибка при обращении к OpenAI:', error.message);
 
 
 
 
71
  throw new Error('Ошибка при генерации');
72
  }
73
  }
 
1
  require('dotenv').config();
2
  const express = require('express');
3
  const rateLimit = require('express-rate-limit');
4
+ const axios = require('axios');
5
 
6
  const app = express();
7
  app.use(express.json());
 
37
 
38
  async function sendRequest(prompt, prs) {
39
  const apiKey = getRandomApiKey();
40
+ const baseUrl = process.env.BASE_URL || 'https://api.openai.com/v1/chat/completions';
41
+ const modelName = process.env.MODEL_NAME || 'gpt-3.5-turbo';
 
 
 
 
 
42
 
43
  try {
44
+ const response = await axios.post(
45
+ baseUrl,
46
+ {
47
+ model: modelName,
48
+ messages: [
49
+ { role: 'system', content: prs },
50
+ { role: 'user', content: prompt }
51
+ ],
52
+ max_tokens: 1200,
53
+ temperature: 0.7
54
+ },
55
+ {
56
+ headers: {
57
+ 'Authorization': `Bearer ${apiKey}`,
58
+ 'Content-Type': 'application/json'
59
  }
60
+ }
61
+ );
62
+
63
+ if (
64
+ response.data &&
65
+ response.data.choices &&
66
+ response.data.choices.length > 0
67
+ ) {
68
  return response.data.choices[0].message.content.trim();
69
  } else {
70
  throw new Error('Ошибка прочтения ответа');
71
  }
72
  } catch (error) {
73
+ console.error('Ошибка при обращении к модели:', error.message);
74
+ if (error.response) {
75
+ console.error('Статус ошибки:', error.response.status);
76
+ console.error('Тело ошибки:', error.response.data);
77
+ }
78
  throw new Error('Ошибка при генерации');
79
  }
80
  }