Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support multiple resources #72

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
237 changes: 171 additions & 66 deletions cf-openai-azure-proxy.js
Original file line number Diff line number Diff line change
@@ -1,20 +1,49 @@
// The name of your Azure OpenAI Resource.
const resourceName=RESOURCE_NAME
const resourceNames = {
'gpt-4-1106-preview': RESOURCE_NAME_GPT4,
'gpt-4-0613': RESOURCE_NAME_GPT4,
'gpt-4-32k-0613': RESOURCE_NAME_GPT4,
'gpt-4-0314': RESOURCE_NAME_GPT4,
'gpt-4-32k-0314': RESOURCE_NAME_GPT4,
'gpt-4-vision-preview': RESOURCE_NAME_GPT4,
'gpt-3.5-turbo-1106': RESOURCE_NAME_GPT35,
'gpt-3.5-instruct-0914': RESOURCE_NAME_GPT35,
'gpt-3.5-turbo-0613': RESOURCE_NAME_GPT35,
'gpt-3.5-turbo-16k-0613': RESOURCE_NAME_GPT35,
'gpt-3.5-turbo-0301': RESOURCE_NAME_GPT35,
'dall-e-3': RESOURCE_NAME_DALLE3,
'gpt-4': RESOURCE_NAME_GPT4,
'gpt-3.5-turbo': RESOURCE_NAME_GPT35,
};

// The resourceKey when you chose your Azure OpenAI Resource
const resourceKeys = {
RESOURCE_NAME_GPT4: RESOURCE_KEY_GPT4,
RESOURCE_NAME_GPT35: RESOURCE_KEY_GPT35,
RESOURCE_NAME_DALLE3: RESOURCE_KEY_DALLE3,
};

// The deployment name you chose when you deployed the model.
const mapper = {
'gpt-3.5-turbo': DEPLOY_NAME_GPT35,
'gpt-3.5-turbo-0613': DEPLOY_NAME_GPT35,
'gpt-3.5-turbo-1106': DEPLOY_NAME_GPT35,
'gpt-3.5-turbo-16k': DEPLOY_NAME_GPT35,
'gpt-4': DEPLOY_NAME_GPT4,
'gpt-4-0613': DEPLOY_NAME_GPT4,
'gpt-4-1106-preview': DEPLOY_NAME_GPT4,
'gpt-4-32k': DEPLOY_NAME_GPT4,
'dall-e-3': typeof DEPLOY_NAME_DALLE3 !== 'undefined' ? DEPLOY_NAME_DALLE3 : "dalle3",
'gpt-4-1106-preview': DEPOLY_NAME_GPT4,
'gpt-4-0613': DEPOLY_NAME_GPT4,
'gpt-4-32k-0613': DEPOLY_NAME_GPT4,
'gpt-4-0314': DEPOLY_NAME_GPT4,
'gpt-4-32k-0314': DEPOLY_NAME_GPT4,
'gpt-4-vision-preview': DEPOLY_NAME_GPT4,
'gpt-3.5-turbo-1106': DEPOLY_NAME_GPT35,
'gpt-3.5-instruct-0914': DEPOLY_NAME_GPT35,
'gpt-3.5-turbo-0613': DEPOLY_NAME_GPT35,
'gpt-3.5-turbo-16k-0613': DEPOLY_NAME_GPT35,
'gpt-3.5-turbo-0301': DEPOLY_NAME_GPT35,
'dall-e-3': typeof DEPOLY_NAME_DALLE3 !== 'undefined' ? DEPOLY_NAME_DALLE3 : "dalle3",
'gpt-4': DEPOLY_NAME_GPT4,
'gpt-3.5-turbo': DEPOLY_NAME_GPT35,
};

const apiVersion="2023-12-01-preview"
const secretKey = SECRET_KEY; // Set your own secret key here

const apiVersion = "2023-12-01-preview";

addEventListener("fetch", (event) => {
event.respondWith(handleRequest(event.request));
Expand All @@ -27,14 +56,12 @@ async function handleRequest(request) {

const url = new URL(request.url);
if (url.pathname.startsWith("//")) {
url.pathname = url.pathname.replace('/',"")
url.pathname = url.pathname.replace('/', "")
}
if (url.pathname === '/v1/chat/completions') {
var path="chat/completions"
} else if (url.pathname === '/v1/images/generations') {
var path="images/generations"
var path = "chat/completions"
} else if (url.pathname === '/v1/completions') {
var path="completions"
var path = "completions"
} else if (url.pathname === '/v1/models') {
return handleModels(request)
} else {
Expand All @@ -46,18 +73,51 @@ async function handleRequest(request) {
body = await request.json();
}

const modelName = body?.model;
const deployName = mapper[modelName] || ''
const modelName = body?.model;
const deployName = mapper[modelName] || '';
const resourceName = resourceNames[modelName] || '';

// (optional) Convert image URL to Base64
if (modelName == "gpt-4-vision-preview") {
for (let message of body.messages) {
for (let content of message.content) {
if (content.type === 'image_url') {
const imageUrl = content.image_url.url;

if (deployName === '') {
return new Response('Missing model mapper', {
status: 403
if (!imageUrl.startsWith('data:')) {
continue;
}

try {
console.log(`Fetching image from URL: ${imageUrl}`);
const imageResponse = await fetch(imageUrl);
console.log(`Received response with status: ${imageResponse.status}`);

if (imageResponse.ok) {
const arrayBuffer = await imageResponse.arrayBuffer();
console.log(`Image loaded, converting to Base64...`);
const base64String = bufferToBase64(arrayBuffer);
console.log(`Image converted to Base64: data:${imageResponse.headers.get('content-type')};base64,...`);
content.image_url.url = `data:${imageResponse.headers.get('content-type')};base64,${base64String}`;
} else {
console.error(`Failed to fetch image. Status: ${imageResponse.status}`);
}
} catch (error) {
console.error(`Error fetching image: ${error}`);
}
}
}
}
}

if (deployName === '' || resourceName === '') {
return new Response('Missing model mapper or resource name', {
status: 403
});
}
const fetchAPI = `https://${resourceName}.openai.azure.com/openai/deployments/${deployName}/${path}?api-version=${apiVersion}`

const authKey = request.headers.get('Authorization');
if (!authKey) {
if (!authKey || authKey !== 'Bearer ' + secretKey) {
return new Response("Not allowed", {
status: 403
});
Expand All @@ -67,71 +127,117 @@ async function handleRequest(request) {
method: request.method,
headers: {
"Content-Type": "application/json",
"api-key": authKey.replace('Bearer ', ''),
"api-key": resourceKeys[resourceName],
},
body: typeof body === 'object' ? JSON.stringify(body) : '{}',
};

let response = await fetch(fetchAPI, payload);
response = new Response(response.body, response);
response.headers.set("Access-Control-Allow-Origin", "*");
let response;
try {
response = await fetch(fetchAPI, payload);
console.log(response);
} catch (error) {
return new Response('Error fetching API', { status: 500 });
}

if (body?.stream != true){
return response
}
if (body?.stream !== true) {
return new Response(response.body, {
status: response.status,
headers: { 'Access-Control-Allow-Origin': '*' }
});
}

let { readable, writable } = new TransformStream()
stream(response.body, writable);
return new Response(readable, response);
let { readable, writable } = new TransformStream();
stream(response.body, writable, body).catch((error) => {
console.error('Stream error:', error);
});
return new Response(readable, {
headers: { 'Access-Control-Allow-Origin': '*' }
});
}

function bufferToBase64(buffer) {
let binary = '';
const bytes = new Uint8Array(buffer);
for (let i = 0; i < bytes.byteLength; i++) {
binary += String.fromCharCode(bytes[i]);
}
return btoa(binary);
}

function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}

// support printer mode and add newline
async function stream(readable, writable) {
async function stream(readable, writable, requestData) {
const reader = readable.getReader();
const writer = writable.getWriter();

// const decoder = new TextDecoder();
const encoder = new TextEncoder();
const decoder = new TextDecoder();
// let decodedValue = decoder.decode(value);
const newline = "\n";
const delimiter = "\n\n"
const encodedNewline = encoder.encode(newline);

const decoder = new TextDecoder("utf-8");
let buffer = "";
while (true) {
let { value, done } = await reader.read();

console.log("Start streaming...");

async function push() {
const { done, value } = await reader.read();
if (done) {
break;
if (buffer) {
await processBuffer();
}
await writer.close();
return;
}
buffer += decoder.decode(value, { stream: true }); // stream: true is important here,fix the bug of incomplete line
let lines = buffer.split(delimiter);
const chunk = decoder.decode(value, { stream: true });
buffer += chunk;
await processBuffer();
push();
}

// Loop through all but the last line, which may be incomplete.
async function processBuffer() {
const delimiter = "\n\n";
const lines = buffer.split(delimiter);
for (let i = 0; i < lines.length - 1; i++) {
await writer.write(encoder.encode(lines[i] + delimiter));
await sleep(20);
await processLine(lines[i]);
}

buffer = lines[lines.length - 1];
}

if (buffer) {
await writer.write(encoder.encode(buffer));
// Refine the data to match the OpenAI API
async function processLine(line) {
if (!line.startsWith("data: ")) return;
const raw = line.substring(6);
if (raw.startsWith("[DONE]")) {
await writer.write(encoder.encode("data: [DONE]\n\n"));
return;
}
try {
let data = JSON.parse(raw);
if (data.choices.length === 0 || !data.choices[0].delta)
data.choices[0] = { delta: {} };
if (data.choices[0].delta?.content === null)
data.choices[0].delta.content = "";
if (data.choices[0].finish_reason === undefined)
data.choices[0].finish_reason = null;
if (data.model === undefined && requestData.model !== undefined)
data.model = requestData.model;
if (data.object === undefined)
data.object = "chat.completion.chunk";
const to_send = `data: ${JSON.stringify(data)}\n\n`;
console.log("Sending data: ", to_send);
await writer.write(encoder.encode(to_send));
await sleep(20);
} catch (e) {
console.error("Failed to parse JSON:", e);
}
}
await writer.write(encodedNewline)
await writer.close();

push();
}

async function handleModels(request) {
const data = {
"object": "list",
"data": []
"data": []
};

for (let key in mapper) {
Expand All @@ -156,7 +262,7 @@ async function handleModels(request) {
}],
"root": key,
"parent": null
});
});
}

const json = JSON.stringify(data, null, 2);
Expand All @@ -166,12 +272,11 @@ async function handleModels(request) {
}

async function handleOPTIONS(request) {
return new Response(null, {
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*'
}
})
return new Response(null, {
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*'
}
})
}