Skip to content

Commit

Permalink
Merge pull request #4072 from BerriAI/litellm_allow_custom_logout_url
Browse files Browse the repository at this point in the history
[Fix] UI - Allow custom logout url and show proxy base url on API Ref Page
  • Loading branch information
ishaan-jaff committed Jun 8, 2024
2 parents 366fc5e + c0d8c13 commit 4753813
Show file tree
Hide file tree
Showing 6 changed files with 114 additions and 10 deletions.
13 changes: 13 additions & 0 deletions litellm/proxy/proxy_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -12568,6 +12568,19 @@ async def login(request: Request):
)


@app.get(
"/sso/get/logout_url",
tags=["experimental"],
include_in_schema=False,
dependencies=[Depends(user_api_key_auth)],
)
async def get_logout_url(request: Request):
_proxy_base_url = os.getenv("PROXY_BASE_URL", None)
_logout_url = os.getenv("PROXY_LOGOUT_URL", None)

return {"PROXY_BASE_URL": _proxy_base_url, "PROXY_LOGOUT_URL": _logout_url}


@app.get("/onboarding/get_token", include_in_schema=False)
async def onboarding(invite_link: str):
"""
Expand Down
18 changes: 17 additions & 1 deletion ui/litellm-dashboard/src/app/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,23 @@ function formatUserRole(userRole: string) {
}
}

interface ProxySettings {
PROXY_BASE_URL: string;
PROXY_LOGOUT_URL: string;
}

const CreateKeyPage = () => {
const { Title, Paragraph } = Typography;
const [userRole, setUserRole] = useState("");
const [premiumUser, setPremiumUser] = useState(false);
const [userEmail, setUserEmail] = useState<null | string>(null);
const [teams, setTeams] = useState<null | any[]>(null);
const [keys, setKeys] = useState<null | any[]>(null);
const [proxySettings, setProxySettings] = useState<ProxySettings>({
PROXY_BASE_URL: "",
PROXY_LOGOUT_URL: "",
});

const [showSSOBanner, setShowSSOBanner] = useState<boolean>(true);
const searchParams = useSearchParams();
const [modelData, setModelData] = useState<any>({ data: [] });
Expand Down Expand Up @@ -115,6 +125,8 @@ const CreateKeyPage = () => {
userEmail={userEmail}
showSSOBanner={showSSOBanner}
premiumUser={premiumUser}
setProxySettings={setProxySettings}
proxySettings={proxySettings}
/>
<div className="flex flex-1 overflow-auto">
<div className="mt-8">
Expand All @@ -136,6 +148,8 @@ const CreateKeyPage = () => {
setUserEmail={setUserEmail}
setTeams={setTeams}
setKeys={setKeys}
setProxySettings={setProxySettings}
proxySettings={proxySettings}
/>
) : page == "models" ? (
<ModelDashboard
Expand Down Expand Up @@ -182,7 +196,9 @@ const CreateKeyPage = () => {
showSSOBanner={showSSOBanner}
/>
) : page == "api_ref" ? (
<APIRef />
<APIRef
proxySettings={proxySettings}
/>
) : page == "settings" ? (
<Settings
userID={userID}
Expand Down
24 changes: 19 additions & 5 deletions ui/litellm-dashboard/src/components/api_ref.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,22 @@ import { Statistic } from "antd"
import { modelAvailableCall } from "./networking";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";

interface ApiRefProps {
proxySettings: any;
}


const APIRef: React.FC<ApiRefProps> = ({
proxySettings,
}) => {

let base_url = "http://localhost:4000";

const APIRef = ({}) => {
if (proxySettings) {
if (proxySettings.PROXY_BASE_URL && proxySettings.PROXY_BASE_URL !== undefined) {
base_url = proxySettings.PROXY_BASE_URL;
}
}
return (
<>
<Grid className="gap-2 p-8 h-[80vh] w-full mt-2">
Expand All @@ -51,7 +65,7 @@ const APIRef = ({}) => {
import openai
client = openai.OpenAI(
api_key="your_api_key",
base_url="http://0.0.0.0:4000" # LiteLLM Proxy is OpenAI compatible, Read More: https://docs.litellm.ai/docs/proxy/user_keys
base_url="${base_url}" # LiteLLM Proxy is OpenAI compatible, Read More: https://docs.litellm.ai/docs/proxy/user_keys
)
response = client.chat.completions.create(
Expand Down Expand Up @@ -80,14 +94,14 @@ from llama_index import VectorStoreIndex, SimpleDirectoryReader, ServiceContext
llm = AzureOpenAI(
engine="azure-gpt-3.5", # model_name on litellm proxy
temperature=0.0,
azure_endpoint="http://0.0.0.0:4000", # litellm proxy endpoint
azure_endpoint="${base_url}", # litellm proxy endpoint
api_key="sk-1234", # litellm proxy API Key
api_version="2023-07-01-preview",
)
embed_model = AzureOpenAIEmbedding(
deployment_name="azure-embedding-model",
azure_endpoint="http://0.0.0.0:4000",
azure_endpoint="${base_url}",
api_key="sk-1234",
api_version="2023-07-01-preview",
)
Expand Down Expand Up @@ -116,7 +130,7 @@ from langchain.prompts.chat import (
from langchain.schema import HumanMessage, SystemMessage
chat = ChatOpenAI(
openai_api_base="http://0.0.0.0:4000",
openai_api_base="${base_url}",
model = "gpt-3.5-turbo",
temperature=0.1
)
Expand Down
24 changes: 20 additions & 4 deletions ui/litellm-dashboard/src/components/navbar.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import Link from "next/link";
import Image from "next/image";
import React, { useState } from "react";
import React, { useEffect, useState } from "react";
import type { MenuProps } from "antd";
import { Dropdown, Space } from "antd";
import { useSearchParams } from "next/navigation";
Expand All @@ -24,13 +24,17 @@ interface NavbarProps {
userEmail: string | null;
showSSOBanner: boolean;
premiumUser: boolean;
setProxySettings: React.Dispatch<React.SetStateAction<any>>;
proxySettings: any;
}
const Navbar: React.FC<NavbarProps> = ({
userID,
userRole,
userEmail,
showSSOBanner,
premiumUser,
setProxySettings,
proxySettings,
}) => {
console.log("User ID:", userID);
console.log("userEmail:", userEmail);
Expand All @@ -41,7 +45,19 @@ const Navbar: React.FC<NavbarProps> = ({
const isLocal = process.env.NODE_ENV === "development";
const proxyBaseUrl = isLocal ? "http://localhost:4000" : null;
const imageUrl = isLocal ? "http://localhost:4000/get_image" : "/get_image";
const logoutUrl = proxyBaseUrl ? `${proxyBaseUrl}` : `/`;
let logoutUrl = "";

console.log("PROXY_settings=", proxySettings);

if (proxySettings) {
if (proxySettings.PROXY_LOGOUT_URL && proxySettings.PROXY_LOGOUT_URL !== undefined) {
logoutUrl = proxySettings.PROXY_LOGOUT_URL;
}
}

console.log("logoutUrl=", logoutUrl);



const items: MenuProps["items"] = [
{
Expand All @@ -57,9 +73,9 @@ const Navbar: React.FC<NavbarProps> = ({
{
key: "2",
label: (
<Link href={logoutUrl}>
<a href={logoutUrl}>
<p>Logout</p>
</Link>
</a>
),
}
];
Expand Down
37 changes: 37 additions & 0 deletions ui/litellm-dashboard/src/components/networking.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2382,3 +2382,40 @@ export const healthCheckCall = async (accessToken: String) => {
throw error;
}
};

export const getProxyBaseUrlAndLogoutUrl = async (
accessToken: String,
) => {
/**
* Get all the models user has access to
*/
try {
let url = proxyBaseUrl
? `${proxyBaseUrl}/sso/get/logout_url`
: `/sso/get/logout_url`;

//message.info("Requesting model data");
const response = await fetch(url, {
method: "GET",
headers: {
Authorization: `Bearer ${accessToken}`,
"Content-Type": "application/json",
},
});

if (!response.ok) {
const errorData = await response.text();
message.error(errorData, 10);
throw new Error("Network response was not ok");
}

const data = await response.json();
//message.info("Received model data");
return data;
// Handle success - you might want to update some state or UI based on the created key
} catch (error) {
console.error("Failed to get callbacks:", error);
throw error;
}
};

8 changes: 8 additions & 0 deletions ui/litellm-dashboard/src/components/user_dashboard.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import {
userInfoCall,
modelAvailableCall,
getTotalSpendCall,
getProxyBaseUrlAndLogoutUrl,
} from "./networking";
import { Grid, Col, Card, Text, Title } from "@tremor/react";
import CreateKey from "./create_key_button";
Expand Down Expand Up @@ -33,6 +34,8 @@ interface UserDashboardProps {
setUserEmail: React.Dispatch<React.SetStateAction<string | null>>;
setTeams: React.Dispatch<React.SetStateAction<Object[] | null>>;
setKeys: React.Dispatch<React.SetStateAction<Object[] | null>>;
setProxySettings: React.Dispatch<React.SetStateAction<any>>;
proxySettings: any;
}

type TeamInterface = {
Expand All @@ -51,6 +54,8 @@ const UserDashboard: React.FC<UserDashboardProps> = ({
setUserEmail,
setTeams,
setKeys,
setProxySettings,
proxySettings,
}) => {
const [userSpendData, setUserSpendData] = useState<UserSpendData | null>(
null
Expand Down Expand Up @@ -144,6 +149,9 @@ const UserDashboard: React.FC<UserDashboardProps> = ({
} else {
const fetchData = async () => {
try {
const proxy_settings = await getProxyBaseUrlAndLogoutUrl(accessToken);
setProxySettings(proxy_settings);

const response = await userInfoCall(
accessToken,
userID,
Expand Down

0 comments on commit 4753813

Please sign in to comment.