matt HOFFNER commited on
Commit
f12f03e
·
1 Parent(s): 5cfea3f
pages/api/chat.ts CHANGED
@@ -1,5 +1,5 @@
1
  import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from '@/utils/app/const';
2
- import { OpenAIError, OpenAIStream } from '@/utils/server';
3
 
4
  import { ChatBody, Message } from '@/types/chat';
5
 
@@ -52,12 +52,12 @@ const handler = async (req: Request): Promise<Response> => {
52
 
53
  encoding.free();
54
 
55
- const stream = await OpenAIStream(model, promptToSend, temperatureToUse, key, messagesToSend);
56
 
57
  return new Response(stream);
58
  } catch (error) {
59
  console.error(error);
60
- if (error instanceof OpenAIError) {
61
  return new Response('Error', { status: 500, statusText: error.message });
62
  } else {
63
  return new Response('Error', { status: 500 });
 
1
  import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from '@/utils/app/const';
2
+ import { LLMError, LLMStream } from '@/utils/server';
3
 
4
  import { ChatBody, Message } from '@/types/chat';
5
 
 
52
 
53
  encoding.free();
54
 
55
+ const stream = await LLMStream(model, promptToSend, temperatureToUse, key, messagesToSend);
56
 
57
  return new Response(stream);
58
  } catch (error) {
59
  console.error(error);
60
+ if (error instanceof LLMError) {
61
  return new Response('Error', { status: 500, statusText: error.message });
62
  } else {
63
  return new Response('Error', { status: 500 });
pages/api/google.ts CHANGED
@@ -1,7 +1,6 @@
1
  import { NextApiRequest, NextApiResponse } from 'next';
2
 
3
  import { OPENAI_API_HOST } from '@/utils/app/const';
4
- import { cleanSourceText } from '@/utils/server/google';
5
 
6
  import { Message } from '@/types/chat';
7
  import { GoogleBody, GoogleSource } from '@/types/google';
@@ -64,7 +63,7 @@ const handler = async (req: NextApiRequest, res: NextApiResponse<any>) => {
64
  const parsed = new Readability(doc).parse();
65
 
66
  if (parsed) {
67
- let sourceText = cleanSourceText(parsed.textContent);
68
 
69
  return {
70
  ...source,
 
1
  import { NextApiRequest, NextApiResponse } from 'next';
2
 
3
  import { OPENAI_API_HOST } from '@/utils/app/const';
 
4
 
5
  import { Message } from '@/types/chat';
6
  import { GoogleBody, GoogleSource } from '@/types/google';
 
63
  const parsed = new Readability(doc).parse();
64
 
65
  if (parsed) {
66
+ let sourceText = parsed.textContent;
67
 
68
  return {
69
  ...source,
utils/server/google.ts DELETED
@@ -1,9 +0,0 @@
1
- export const cleanSourceText = (text: string) => {
2
- return text
3
- .trim()
4
- .replace(/(\n){4,}/g, '\n\n\n')
5
- .replace(/\n\n/g, ' ')
6
- .replace(/ {3,}/g, ' ')
7
- .replace(/\t/g, '')
8
- .replace(/\n+(\s*\n)*/g, '\n');
9
- };
 
 
 
 
 
 
 
 
 
 
utils/server/index.ts CHANGED
@@ -1,7 +1,7 @@
1
  import { Message } from '@/types/chat';
2
  import { OpenAIModel } from '@/types/openai';
3
 
4
- import { AZURE_DEPLOYMENT_ID, OPENAI_API_HOST, OPENAI_API_TYPE, OPENAI_API_VERSION, OPENAI_ORGANIZATION } from '../app/const';
5
 
6
  import {
7
  ParsedEvent,
@@ -9,21 +9,21 @@ import {
9
  createParser,
10
  } from 'eventsource-parser';
11
 
12
- export class OpenAIError extends Error {
13
  type: string;
14
  param: string;
15
  code: string;
16
 
17
  constructor(message: string, type: string, param: string, code: string) {
18
  super(message);
19
- this.name = 'OpenAIError';
20
  this.type = type;
21
  this.param = param;
22
  this.code = code;
23
  }
24
  }
25
 
26
- export const OpenAIStream = async (
27
  model: OpenAIModel,
28
  systemPrompt: string,
29
  temperature : number,
@@ -31,25 +31,12 @@ export const OpenAIStream = async (
31
  messages: Message[],
32
  ) => {
33
  let url = `${OPENAI_API_HOST}/v1/chat/completions`;
34
- if (OPENAI_API_TYPE === 'azure') {
35
- url = `${OPENAI_API_HOST}/openai/deployments/${AZURE_DEPLOYMENT_ID}/chat/completions?api-version=${OPENAI_API_VERSION}`;
36
- }
37
  const res = await fetch(url, {
38
  headers: {
39
- 'Content-Type': 'application/json',
40
- ...(OPENAI_API_TYPE === 'openai' && {
41
- Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`
42
- }),
43
- ...(OPENAI_API_TYPE === 'azure' && {
44
- 'api-key': `${key ? key : process.env.OPENAI_API_KEY}`
45
- }),
46
- ...((OPENAI_API_TYPE === 'openai' && OPENAI_ORGANIZATION) && {
47
- 'OpenAI-Organization': OPENAI_ORGANIZATION,
48
- }),
49
  },
50
  method: 'POST',
51
  body: JSON.stringify({
52
- ...(OPENAI_API_TYPE === 'openai' && {model: model.id}),
53
  messages: [
54
  {
55
  role: 'system',
@@ -69,7 +56,7 @@ export const OpenAIStream = async (
69
  if (res.status !== 200) {
70
  const result = await res.json();
71
  if (result.error) {
72
- throw new OpenAIError(
73
  result.error.message,
74
  result.error.type,
75
  result.error.param,
@@ -77,7 +64,7 @@ export const OpenAIStream = async (
77
  );
78
  } else {
79
  throw new Error(
80
- `OpenAI API returned an error: ${
81
  decoder.decode(result?.value) || result.statusText
82
  }`,
83
  );
 
1
  import { Message } from '@/types/chat';
2
  import { OpenAIModel } from '@/types/openai';
3
 
4
+ import { OPENAI_API_HOST } from '../app/const';
5
 
6
  import {
7
  ParsedEvent,
 
9
  createParser,
10
  } from 'eventsource-parser';
11
 
12
+ export class LLMError extends Error {
13
  type: string;
14
  param: string;
15
  code: string;
16
 
17
  constructor(message: string, type: string, param: string, code: string) {
18
  super(message);
19
+ this.name = 'LLMError';
20
  this.type = type;
21
  this.param = param;
22
  this.code = code;
23
  }
24
  }
25
 
26
+ export const LLMStream = async (
27
  model: OpenAIModel,
28
  systemPrompt: string,
29
  temperature : number,
 
31
  messages: Message[],
32
  ) => {
33
  let url = `${OPENAI_API_HOST}/v1/chat/completions`;
 
 
 
34
  const res = await fetch(url, {
35
  headers: {
36
+ 'Content-Type': 'application/json'
 
 
 
 
 
 
 
 
 
37
  },
38
  method: 'POST',
39
  body: JSON.stringify({
 
40
  messages: [
41
  {
42
  role: 'system',
 
56
  if (res.status !== 200) {
57
  const result = await res.json();
58
  if (result.error) {
59
+ throw new LLMError(
60
  result.error.message,
61
  result.error.type,
62
  result.error.param,
 
64
  );
65
  } else {
66
  throw new Error(
67
+ `LLM API returned an error: ${
68
  decoder.decode(result?.value) || result.statusText
69
  }`,
70
  );