diff --git a/docs/src/containers/DownloadButton/index.js b/docs/src/containers/DownloadButton/index.js
index a8b13ae2d..83d7914f2 100644
--- a/docs/src/containers/DownloadButton/index.js
+++ b/docs/src/containers/DownloadButton/index.js
@@ -7,17 +7,17 @@ import axios from "axios";
const systemsTemplate = [
{
name: "Download for Mac",
- logo: require("@site/static/img/apple-logo-white.png").default,
+ logo: require("@site/static/img/logos/apple-logo-white.png").default,
fileFormat: "{appname}-mac-arm64.zip",
},
{
name: "Download for Windows",
- logo: require("@site/static/img/windows-logo-white.png").default,
+ logo: require("@site/static/img/logos/windows-logo-white.png").default,
fileFormat: "{appname}-win-amd64.zip",
},
{
name: "Download for Linux",
- logo: require("@site/static/img/linux-logo-white.png").default,
+ logo: require("@site/static/img/logos/linux-logo-white.png").default,
fileFormat: "{appname}-linux-amd64.zip",
},
];
diff --git a/docs/src/containers/GithubButton/index.js b/docs/src/containers/GithubButton/index.js
new file mode 100644
index 000000000..642b0b148
--- /dev/null
+++ b/docs/src/containers/GithubButton/index.js
@@ -0,0 +1,16 @@
+import React from "react";
+import { FaGithub } from "react-icons/fa";
+
+export default function GithubButtob() {
+ return (
+
+
+
Llama.cpp
TensorRT-LLM
Coming Soon @@ -62,6 +72,11 @@ export default function Architecture() {
Whisper.cpp
Coming Soon @@ -71,6 +86,11 @@ export default function Architecture() {
StableDiffusion
Coming Soon diff --git a/docs/src/containers/Homepage/OpenAI/index.js b/docs/src/containers/Homepage/OpenAI/index.js index 898b7cd96..d59185438 100644 --- a/docs/src/containers/Homepage/OpenAI/index.js +++ b/docs/src/containers/Homepage/OpenAI/index.js @@ -11,10 +11,10 @@ import ThemedImage from "@theme/ThemedImage"; export default function OpenAI() { const clipboard = useClipboard({ timeout: 200 }); - const codeStringOpenAI = `curl http://localhost:3928/inferences/llamacpp/chat_completion + const codeStringNitro = `curl http://localhost:3928/inferences/llamacpp/chat_completion -H "Content-Type: application/json" -d '{ - "model": "gpt-3.5-turbo", + "model": "/path/to/your/model.gguf", "messages": [ { "role": "system", @@ -27,7 +27,7 @@ export default function OpenAI() { ] }'`; - const codeStringNitro = `curl https://api.openai.com/v1/chat/completions + const codeStringOpenAI = `curl https://api.openai.com/v1/chat/completions -H "Content-Type: application/json" -H "Authorization: Bearer $OPENAI_API_KEY" -d '{ @@ -53,15 +53,14 @@ export default function OpenAI() {
- https://api.openai.com/v1/chat/completions + https://localhost:1337/llama.cpp/v1/chat/completions
- https://localhost:1337/llama.cpp/v1/chat/completions + https://api.openai.com/v1/chat/completions
{lightweightTabActive @@ -48,12 +51,6 @@ export default function Statistic() { 3mb