Spaces:
Running
Running
File size: 1,509 Bytes
f5ed9bf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
import express from 'express';
import {
getAvailableChatModelProviders,
getAvailableEmbeddingModelProviders,
} from '../lib/providers';
import {
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
updateConfig,
} from '../config';
const router = express.Router();
router.get('/', async (_, res) => {
const config = {};
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
getAvailableChatModelProviders(),
getAvailableEmbeddingModelProviders(),
]);
config['chatModelProviders'] = {};
config['embeddingModelProviders'] = {};
for (const provider in chatModelProviders) {
config['chatModelProviders'][provider] = Object.keys(
chatModelProviders[provider],
);
}
for (const provider in embeddingModelProviders) {
config['embeddingModelProviders'][provider] = Object.keys(
embeddingModelProviders[provider],
);
}
config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['groqApiKey'] = getGroqApiKey();
res.status(200).json(config);
});
router.post('/', async (req, res) => {
const config = req.body;
const updatedConfig = {
API_KEYS: {
OPENAI: config.openaiApiKey,
GROQ: config.groqApiKey,
},
API_ENDPOINTS: {
OLLAMA: config.ollamaApiUrl,
},
};
updateConfig(updatedConfig);
res.status(200).json({ message: 'Config updated' });
});
export default router;
|