Skip to content

Commit

Permalink
Add logging
Browse files Browse the repository at this point in the history
  • Loading branch information
KenzoBenzo committed May 21, 2023
1 parent e23a0f7 commit a86112d
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 21 deletions.
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
OPENAI_API_KEY=
REPORT_KEY=
STRIPE_SECRET_KEY=
NEXT_PUBLIC_SUPABASE_URL=
NEXT_PUBLIC_SUPABASE_ANON_KEY=
Expand Down
15 changes: 8 additions & 7 deletions pages/api/get-type.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { NextApiRequest, NextApiResponse } from "next";
import { NextApiRequest, NextApiResponse } from 'next';

const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
const OPENAI_API_URL = 'https://api.openai.withlogging.com/v1/chat/completions';

export default async function handler(
req: NextApiRequest,
Expand All @@ -12,17 +12,18 @@ export default async function handler(
`;

const response = await fetch(OPENAI_API_URL, {
method: "POST",
method: 'POST',
headers: {
"Content-Type": "application/json",
'Content-Type': 'application/json',
Authorization: `Bearer ${process.env.OPENAI_API_KEY}`,
'X-Api-Key': `Bearer ${process.env.REPORT_KEY}`,
},

This comment has been minimized.

Copy link
@wertycn

wertycn Jun 12, 2023

I really appreciate the author's contribution. This is a great project! But I have some concerns. It seems that 'withlogging' is used for logging, and deploying the project with default settings may result in API key leakage. Is it more reasonable to use other ways to record logs?

body: JSON.stringify({
messages: [{ role: "user", content: prompt }],
messages: [{ role: 'user', content: prompt }],
temperature: 0.5,
max_tokens: 10,
n: 1,
model: "gpt-3.5-turbo",
model: 'gpt-3.5-turbo',
frequency_penalty: 0.5,
presence_penalty: 0.5,
}),
Expand All @@ -35,6 +36,6 @@ export default async function handler(
res.status(200).json(chartType);
} catch (error) {
console.error(error);
res.status(500).send("Internal Server Error");
res.status(500).send('Internal Server Error');
}
}
29 changes: 15 additions & 14 deletions pages/api/parse-graph.ts
Original file line number Diff line number Diff line change
@@ -1,38 +1,39 @@
import { NextApiRequest, NextApiResponse } from "next";
import { NextApiRequest, NextApiResponse } from 'next';

const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
const OPENAI_API_URL = 'https://api.openai.withlogging.com/v1/chat/completions';

export default async function handler(
req: NextApiRequest,
res: NextApiResponse
): Promise<void> {
if (req.method !== "POST") {
res.status(405).json({ error: "Method not allowed" });
if (req.method !== 'POST') {
res.status(405).json({ error: 'Method not allowed' });
return;
}

const { prompt } = req.body;
console.log("Prompt: " + prompt);
console.log('Prompt: ' + prompt);
try {
const response = await fetch(OPENAI_API_URL, {
method: "POST",
method: 'POST',
headers: {
"Content-Type": "application/json",
'Content-Type': 'application/json',
Authorization: `Bearer ${process.env.OPENAI_API_KEY}`,
'X-Api-Key': `Bearer ${process.env.REPORT_KEY}`,
},
body: JSON.stringify({
messages: [{ role: "user", content: prompt }],
messages: [{ role: 'user', content: prompt }],
temperature: 0.5,
max_tokens: 1000,
n: 1,
model: "gpt-3.5-turbo",
model: 'gpt-3.5-turbo',
frequency_penalty: 0.5,
presence_penalty: 0.5,
}),
});

if (!response.ok) {
throw new Error("OpenAI API request failed");
throw new Error('OpenAI API request failed');
}

const data = await response.json();
Expand All @@ -41,13 +42,13 @@ export default async function handler(
? data.choices[0].message.content.trim()
: null;
if (!graphData) {
throw new Error("Failed to generate graph data");
throw new Error('Failed to generate graph data');
}
const stringifiedData = graphData.replace(/'/g, '"');
console.log("Data: " + stringifiedData);
console.log('Data: ' + stringifiedData);
res.status(200).json(stringifiedData);
} catch (error) {
console.error(error);
res.status(500).json({ error: "Failed to process the input" });
res.status(500).json({ error: 'Failed to process the input' });
}
}
}

1 comment on commit a86112d

@vercel
Copy link

@vercel vercel bot commented on a86112d May 21, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.