diff --git a/docs/docs/architecture/cortexrc.mdx b/docs/docs/architecture/cortexrc.mdx index 24dc63a2d..061e2cffc 100644 --- a/docs/docs/architecture/cortexrc.mdx +++ b/docs/docs/architecture/cortexrc.mdx @@ -14,6 +14,7 @@ import TabItem from "@theme/TabItem"; Cortex.cpp supports reading its configuration from a file called `.cortexrc`. Using this file, you can also change the data folder, Cortex.cpp API server port, and host. ## File Location + The configuration file is stored in the following locations: - **Windows**: `C:\Users\\.cortexrc` @@ -21,8 +22,9 @@ The configuration file is stored in the following locations: - **macOS**: `/Users//.cortexrc` ## Configuration Parameters + You can configure the following parameters in the `.cortexrc` file: -| Parameter | Description | Default Value | +| Parameter | Description | Default Value | |------------------|--------------------------------------------------|--------------------------------| | `dataFolderPath` | Path to the folder where `.cortexrc` located. | User's home folder. | | `apiServerHost` | Host address for the Cortex.cpp API server. | `127.0.0.1` | @@ -37,6 +39,7 @@ You can configure the following parameters in the `.cortexrc` file: | `huggingFaceToken` | HuggingFace token. | Empty string | Example of the `.cortexrc` file: + ``` logFolderPath: /Users//cortexcpp logLlamaCppPath: ./logs/cortex.log @@ -49,4 +52,4 @@ apiServerPort: 39281 checkedForUpdateAt: 1730501224 latestRelease: v1.0.1 huggingFaceToken: "" -``` \ No newline at end of file +``` diff --git a/docs/docs/architecture/data-folder.mdx b/docs/docs/architecture/data-folder.mdx index cda2a4402..5a4fe1964 100644 --- a/docs/docs/architecture/data-folder.mdx +++ b/docs/docs/architecture/data-folder.mdx @@ -1,6 +1,6 @@ --- -title: Data Folder -description: Cortex.cpp's data folder. +title: Data Folder and App Folder +description: Cortex.cpp's data folder and app folder. slug: "data-folder" --- @@ -17,37 +17,25 @@ When you install Cortex.cpp, three types of files will be generated on your devi - **Configuration Files** - **Data Folder** -## Binary Files +## Binary Files - under the App Folder These are the executable files of the Cortex.cpp application. The file format varies depending on the operating system: -- **Windows**: `.exe` - - Stable: `C:\Users\\AppData\Local\cortexcpp\cortex.exe` - - Beta: `C:\Users\\AppData\Local\cortexcpp-beta\cortex-beta.exe` - - Nighty: `C:\Users\\AppData\Local\cortexcpp-nightly\cortex-nightly.exe` -- **Linux**: `.deb` or `.fedora` - - Stable: `/usr/bin/cortexcpp` - - Beta: `/usr/bin/cortexcpp-beta` - - Nighty: `/usr/bin/cortexcpp-nightly` -- **macOS**: `.pkg` - - Stable: `/usr/local/bin/cortexcpp` - - Beta: `/home//.cortexrc-beta` - - Nighty: `/home//.cortexrc-nightly` +- **Windows**: + - cli: `C:\Users\\AppData\Local\cortexcpp\cortex.exe` + - server: `C:\Users\\AppData\Local\cortexcpp\cortex-server.exe` +- **Linux**: + - cli: `/usr/bin/cortex` + - server: `/usr/bin/cortex-server` +- **macOS**: + - cli: `/usr/local/bin/cortex` + - server: `/usr/local/bin/cortex-server` ## Cortex.cpp Data Folder The data folder stores the engines, models, and logs required by Cortex.cpp. This folder is located at: -- **Windows**: - - Stable: `C:\Users\\.cortexcpp` - - Beta: `C:\Users\\.cortexcpp-beta` - - Nighty: `C:\Users\\.cortexcpp-nightly` -- **Linux**: - - Stable: `/home//.cortexcpp` - - Beta: `/home//.cortexcpp-beta` - - Nighty: `/home//.cortexcpp-nightly` -- **macOS**: - - Stable: `/Users/\.cortexcpp` - - Beta: `/Users//.cortexcpp-beta` - - Nighty: `/Users//.cortexcpp-nightly` +- **Windows**: `C:\Users\\cortexcpp` +- **Linux**: `/home//cortexcpp` +- **macOS**: `/Users/\cortexcpp` ### Folder Structure The Cortex.cpp data folder typically follows this structure: @@ -77,57 +65,9 @@ The Cortex.cpp data folder typically follows this structure: └── llamacpp ``` - - ```yaml - ~/.cortex-beta - ├── models/ - │ └── model.list - │ └── huggingface.co/ - │ └── / - └── / - └── model.yaml - └── model.gguf - │ └── cortex.so/ - │ └── / - │ └── / - └── ...engine_files - └── model.yaml - │ └── imported/ - └── imported_model.yaml - ├── logs/ - │ └── cortex.txt - └── cortex-cli.txt - └── engines/ - └── llamacpp - ``` - - - ```yaml - ~/.cortex-nightly - ├── models/ - │ └── model.list - │ └── huggingface.co/ - │ └── / - └── / - └── model.yaml - └── model.gguf - │ └── cortex.so/ - │ └── / - │ └── / - └── ...engine_files - └── model.yaml - │ └── imported/ - └── imported_model.yaml - ├── logs/ - │ └── cortex.txt - └── cortex-cli.txt - └── engines/ - └── llamacpp - ``` - -#### `.cortexcpp` +#### `cortexcpp` The main directory that stores all Cortex-related files, located in the user's home directory. #### `models/` Contains the AI models used by Cortex for processing and generating responses. diff --git a/docs/docs/installation.mdx b/docs/docs/installation.mdx new file mode 100644 index 000000000..80409e009 --- /dev/null +++ b/docs/docs/installation.mdx @@ -0,0 +1,109 @@ +--- +title: Installation +description: Cortex Installation +slug: "installation" +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; +import Admonition from '@theme/Admonition'; + +:::warning +🚧 Cortex.cpp is currently under development. Our documentation outlines the intended behavior of Cortex, which may not yet be fully implemented in the codebase. +::: + +## Cortex.cpp Installation +### Cortex.cpp offers four installer types +- Network Installers download a minimal system and require an internet connection to fetch packages during installation. +- Local Installers include all necessary packages, enabling offline installation without internet access. +- Dockerfile Installers are used to build a Docker image for Cortex. +- Binary files without package management. + +### Cortex.cpp supports three channels + +- Stable: The latest stable release on github. +- Beta: The release candidate for the next stable release, available on github release with the tag `vx.y.z-rc1` +- Nightly: The nightly build of the latest code on dev branch, available on [discord](https://discord.com/channels/1107178041848909847/1283654073488379904). + +For more information, please check out [different channels](#different-channels). + +### Download URLs + +| Type | Version | Operating System | Download Link | +|----------|---------------|------------------|---------------------------------------------------------------------------------------------------| +| **Local** | **Stable** | **Windows** | [Download](https://app.cortexcpp.com/download/latest/windows-amd64-local) | +| **Local** | **Stable** | **Linux** | [Download](https://app.cortexcpp.com/download/latest/linux-amd64-local) | +| **Local** | **Stable** | **MacOS** | [Download](https://app.cortexcpp.com/download/latest/mac-universal-local) | + +:::info +For other versions, please look at [cortex.cpp repo](https://github.com/janhq/cortex.cpp) or each installer page. +::: + + +## Minimum Requirements + +### OS +- MacOS 12 or later +- Windows 10 or later +- Linux: Ubuntu 20.04 or later, Debian 11 or later (For other distributions, please use the Dockerfile installer or binary files, we have not tested on other distributions yet.) + +### Hardware +#### CPU +:::info +- Cortex.cpp supports a processor that can handle AVX2. For the full list, please see [here](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions#CPUs_with_AVX2). +- We support older processors with AVX, AVX-512 and non-AVX, though this is not recommended. +::: + +##### Intel CPU +- Haswell processors (Q2 2013) and newer. +- Tiger Lake (Q3 2020) and newer for Celeron and Pentium processors. +##### AMD CPU +- Excavator processors (Q2 2015) and newer. + +#### RAM +:::info +We support DDR2 RAM as the minimum requirement but recommend using newer generations of RAM for improved performance. +::: +- 8GB for running up to 3B models (int4). +- 16GB for running up to 7B models (int4). +- 32GB for running up to 13B models (int4). + +#### GPU +:::info +Having at least 6GB VRAM when using NVIDIA, AMD, or Intel Arc GPUs is recommended. +::: +- 6GB can load the 3B model (int4) with `ngl` at 120 ~ full speed on CPU/ GPU. +- 8GB can load the 7B model (int4) with `ngl` at 120 ~ full speed on CPU/ GPU. +- 12GB can load the 13B model (int4) with `ngl` at 120 ~ full speed on CPU/ GPU. +:::info +- [NVIDIA driver](https://www.nvidia.com/Download/index.aspx) version 470.63.01 or higher. + *Note: Users must install the driver by themselves.* +- [CUDA Toolkit](https://developer.nvidia.com/cuda-toolkit) version 12.0 or higher. *Note: Cortex.cpp will automatically detect and install the required version of cudart to the user's machine.* +::: +#### Disk +- At least 10GB for app storage and model download. + +## Different channels + +Different channels have different features, stability levels, binary file name, app folder and data folder. + +### Stable +- App name: `cortexcpp` +- Binary file name: `cortex`, `cortex-server` for linux and mac; `cortex.exe`, `cortex-server.exe` for windows.; +- App folder (Windows Only): `C:\Users\\AppData\Local\cortexcpp` +- Data folder: `~/cortexcpp` for linux and mac; `C:\Users\\cortexcpp` for windows. +- Uninstaller script (Mac Only): `cortex-uninstall.sh` + +### Beta +- App name: `cortexcpp-beta` +- Binary file name: `cortex-beta`, `cortex-server-beta` for linux and mac; `cortex-beta.exe`, `cortex-server-beta.exe` for windows.; +- App folder (Windows Only): `C:\Users\\AppData\Local\cortexcpp-beta` +- Data folder: `~/cortexcpp-beta` for linux and mac; `C:\Users\\cortexcpp-beta` for windows. +- Uninstaller script (Mac Only): `cortex-beta-uninstall.sh` + +### Nightly +- App name: `cortexcpp-nightly` +- Binary file name: `cortex-nightly`, `cortex-server-nightly` for linux and mac; `cortex-nightly.exe`, `cortex-server-nightly.exe` for windows.; +- App folder (Windows Only): `C:\Users\\AppData\Local\cortexcpp-nightly` +- Data folder: `~/cortexcpp-nightly` for linux and mac; `C:\Users\\cortexcpp-nightly` for windows. +- Uninstaller script (Mac Only): `cortex-nightly-uninstall.sh` diff --git a/docs/docs/installation/docker.mdx b/docs/docs/installation/docker.mdx index 3aa0dcbac..c736c6577 100644 --- a/docs/docs/installation/docker.mdx +++ b/docs/docs/installation/docker.mdx @@ -1,8 +1,166 @@ --- title: Docker -description: Install Cortex through Docker. +description: Install Cortex using Docker. --- +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; +import Admonition from '@theme/Admonition'; + :::warning -🚧 Cortex.cpp is currently under development. Our documentation outlines the intended behavior of Cortex, which may not yet be fully implemented in the codebase. -::: \ No newline at end of file +🚧 **Cortex.cpp is currently in development.** The documentation describes the intended functionality, which may not yet be fully implemented. +::: + +## Setting Up Cortex with Docker + +This guide walks you through the setup and running of Cortex using Docker. + +### Prerequisites + +- Docker or Docker Desktop +- `nvidia-container-toolkit` (for GPU support) + +### Setup Instructions + +1. **Clone the Cortex Repository** + ```bash + git clone https://github.com/janhq/cortex.cpp.git + cd cortex.cpp + git submodule update --init + ``` + +2. **Build the Docker Image** + + + + ```sh + docker build -t cortex --build-arg CORTEX_CPP_VERSION=$(git rev-parse HEAD) -f docker/Dockerfile . + ``` + + + ```sh + docker build --build-arg CORTEX_LLAMACPP_VERSION=0.1.34 --build-arg CORTEX_CPP_VERSION=$(git rev-parse HEAD) -t cortex -f docker/Dockerfile . + ``` + + + +3. **Run the Docker Container** + - Create a Docker volume to store models and data: + ```bash + docker volume create cortex_data + ``` + + + + ```sh + # requires nvidia-container-toolkit + docker run --gpus all -it -d --name cortex -v cortex_data:/root/cortexcpp -p 39281:39281 cortex + ``` + + + ```sh + docker run -it -d --name cortex -v cortex_data:/root/cortexcpp -p 39281:39281 cortex + ``` + + + +4. **Check Logs (Optional)** + ```bash + docker logs cortex + ``` + +5. **Access the Cortex Documentation API** + - Open [http://localhost:39281](http://localhost:39281) in your browser. + +6. **Access the Container and Try Cortex CLI** + ```bash + docker exec -it cortex bash + cortex --help + ``` + +### Usage + +With Docker running, you can use the following commands to interact with Cortex. Ensure the container is running and `curl` is installed on your machine. + +#### 1. List Available Engines + +```bash +curl --request GET --url http://localhost:39281/v1/engines --header "Content-Type: application/json" +``` + +- **Example Response** + ```json + { + "data": [ + { + "description": "This extension enables chat completion API calls using the Onnx engine", + "format": "ONNX", + "name": "onnxruntime", + "status": "Incompatible" + }, + { + "description": "This extension enables chat completion API calls using the LlamaCPP engine", + "format": "GGUF", + "name": "llama-cpp", + "status": "Ready", + "variant": "linux-amd64-avx2", + "version": "0.1.37" + } + ], + "object": "list", + "result": "OK" + } + ``` + +#### 2. Pull Models from Hugging Face + +- Open a terminal and run `websocat ws://localhost:39281/events` to capture download events, follow [this instruction](https://github.com/vi/websocat?tab=readme-ov-file#installation) to install `websocat`. +- In another terminal, pull models using the commands below. + + + + ```sh + # requires nvidia-container-toolkit + curl --request POST --url http://localhost:39281/v1/models/pull --header 'Content-Type: application/json' --data '{"model": "tinyllama:gguf"}' + ``` + + + ```sh + curl --request POST --url http://localhost:39281/v1/models/pull --header 'Content-Type: application/json' --data '{"model": "https://huggingface.co/afrideva/zephyr-smol_llama-100m-sft-full-GGUF/blob/main/zephyr-smol_llama-100m-sft-full.q2_k.gguf"}' + ``` + + + +- After pull models successfully, run command below to list models. + ```bash + curl --request GET --url http://localhost:39281/v1/models + ``` + +#### 3. Start a Model and Send an Inference Request + +- **Start the model:** + ```bash + curl --request POST --url http://localhost:39281/v1/models/start --header 'Content-Type: application/json' --data '{"model": "tinyllama:gguf"}' + ``` + +- **Send an inference request:** + ```bash + curl --request POST --url http://localhost:39281/v1/chat/completions --header 'Content-Type: application/json' --data '{ + "frequency_penalty": 0.2, + "max_tokens": 4096, + "messages": [{"content": "Tell me a joke", "role": "user"}], + "model": "tinyllama:gguf", + "presence_penalty": 0.6, + "stop": ["End"], + "stream": true, + "temperature": 0.8, + "top_p": 0.95 + }' + ``` + +#### 4. Stop a Model + +- To stop a running model, use: + ```bash + curl --request POST --url http://localhost:39281/v1/models/stop --header 'Content-Type: application/json' --data '{"model": "tinyllama:gguf"}' + ``` diff --git a/docs/docs/installation/linux.mdx b/docs/docs/installation/linux.mdx index ce074f753..23e538a52 100644 --- a/docs/docs/installation/linux.mdx +++ b/docs/docs/installation/linux.mdx @@ -13,119 +13,111 @@ import Admonition from '@theme/Admonition'; ::: ## Cortex.cpp Installation -Cortex.cpp offers two installer types: -- Network Installers download a minimal system and require an internet connection to fetch packages during installation. -- Local Installers include all necessary packages, enabling offline installation without internet access. :::info -Before installation, make sure that you have met the required [dependencies](#dependencies) and [hardware](#hardware) to run Cortex. +Before installation, make sure that you have met the [minimum requirements](/docs/installation#minimum-requirements) to run Cortex. +This instruction is for stable releases. For beta and nightly releases, please replace `cortex` with `cortex-beta` and `cortex-nightly`, respectively. ::: -1. Download the Linux installer: - - `.deb`: - - Stable: https://github.com/janhq/cortex.cpp/releases - - Beta: https://github.com/janhq/cortex.cpp/releases - - Nightly: https://github.com/janhq/cortex.cpp/releases - - `.appImage`: - - Stable: https://github.com/janhq/cortex.cpp/releases - - Beta: https://github.com/janhq/cortex.cpp/releases - - Nightly: https://github.com/janhq/cortex.cpp/releases - -2. Ensure that Cortex.cpp is sucessfulyy installed: -```bash -# Stable -cortex -# Beta -cortex-beta +### Prerequisites +- OpenMPI -# Nightly -cortex-nightly -``` +### Install Cortex.cpp + +1. Download the Linux installer: + - From release: https://github.com/janhq/cortex.cpp/releases + - From quick download links: + - Local installer `.deb`: + - Stable: https://app.cortexcpp.com/download/latest/linux-amd64-local + - Beta: https://app.cortexcpp.com/download/beta/linux-amd64-local + - Nightly: https://app.cortexcpp.com/download/nightly/linux-amd64-local + - Network installer `.deb`: + - Stable: https://app.cortexcpp.com/download/latest/linux-amd64-network + - Beta: https://app.cortexcpp.com/download/beta/linux-amd64-network + - Nightly: https://app.cortexcpp.com/download/nightly/linux-amd64-network + - Binary: + - Stable: https://app.cortexcpp.com/download/latest/linux-amd64-binary + - Beta: https://app.cortexcpp.com/download/beta/linux-amd64-binary + - Nightly: https://app.cortexcpp.com/download/nightly/linux-amd64-binary + +2. Install Cortex.cpp using the following command: + ```bash + # Installer + sudo apt install ./cortex--linux-amd64-network-installer.deb + + + # Binary + tar -xvf cortex--linux-amd64.tar.gz + cd cortex + sudo mv cortex /usr/bin/cortex + sudo chmod +x /usr/bin/Cortexs + sudo mv cortex-server /usr/bin/cortex-server + + ## For binary, you need to install engine manually after extracting the binary + cortex engines install llama-cpp + ``` + +3. Ensure that Cortex.cpp is sucessfulyy installed: + ```bash + # Stable + cortex -v + ``` ### Data Folder By default, Cortex.cpp is installed in the following directory: ``` # Binary Location -/usr/bin/cortexcpp +/usr/bin/cortex +/usr/bin/cortex-server # Application Data (Engines, Models and Logs folders) -/home//.cortexcpp +/home//cortexcpp + +# Configuration File +/home//.cortexrc ``` + ## Uninstall Cortex.cpp ```bash # Stable version sudo apt remove cortexcpp - -# Beta version -sudo apt remove cortexcpp-beta - -# Nightly version -sudo apt remove cortexcpp-nightly ``` ## Build from Source -1. Clone the Cortex.cpp repository [here](https://github.com/janhq/cortex.cpp). -2. Navigate to the `engine > vcpkg` folder. -3. Configure the vpkg: - -```bash -cd vcpkg -./bootstrap-vcpkg.sh -vcpkg install -``` -4. Build the Cortex.cpp inside the `build` folder: -```bash -mkdir build -cd build -cmake .. -DCMAKE_TOOLCHAIN_FILE=path_to_vcpkg_folder/vcpkg/scripts/buildsystems/vcpkg.cmake -make -j4 -``` -5. Use Visual Studio with the C++ development kit to build the project using the files generated in the `build` folder. -6. Verify that Cortex.cpp is installed correctly by getting help information. - -```sh -# Get the help information -cortex -h -``` - -## Prerequisites -### Dependencies -- Node.js version 18 and higher -- NPM -- Homebrew +### Prerequisites - OpenMPI - -### Hardware -#### Operating System -- Debian-based (Supports `.deb` and `AppImage` ) -- Ubuntu version 22.04 and higher -#### CPU +- CMake >= 3.10 +- gcc/g++ >= 9 +- ninja-build +- make-gnu + +### Build Cortex.cpp + +1. **Clone the Cortex Repository** + ```bash + git clone https://github.com/janhq/cortex.cpp.git + cd cortex.cpp + git submodule update --init + ``` +2. Build the Cortex.cpp : + + ```bash + cd engine + make configure-vcpkg + make build CMAKE_EXTRA_FLAGS="-DCORTEX_CPP_VERSION=$(git rev-parse HEAD) -DCMAKE_BUILD_TEST=OFF -DCMAKE_TOOLCHAIN_FILE=vcpkg/scripts/buildsystems/vcpkg.cmake" + ``` + +3. Verify that Cortex.cpp is builded correctly by getting help information. + + ```sh + # Get the help information + ./build/cortex -h + ``` + +## Update cortex to latest version :::info -- Cortex.cpp supports a processor that can handle AVX2. For the full list, please see [here](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions#CPUs_with_AVX2). -- We support older processors with AVX and AVX-512, though this is not recommended. +The script requires sudo permission. Supported for debians based systems only (Ubuntu, Debian, etc). ::: -##### Intel CPU -- Haswell processors (Q2 2013) and newer. -- Tiger Lake (Q3 2020) and newer for Celeron and Pentium processors. -##### AMD CPU -- Excavator processors (Q2 2015) and newer. -#### RAM -:::info -We support DDR2 RAM as the minimum requirement but recommend using newer generations of RAM for improved performance. -::: -- 8GB for running up to 3B models (int4). -- 16GB for running up to 7B models (int4). -- 32GB for running up to 13B models (int4). -#### GPU -:::info -Having at least 6GB VRAM when using NVIDIA, AMD, or Intel Arc GPUs is recommended. -::: -- 6GB can load the 3B model (int4) with `ngl` at 120 ~ full speed on CPU/ GPU. -- 8GB can load the 7B model (int4) with `ngl` at 120 ~ full speed on CPU/ GPU. -- 12GB can load the 13B model (int4) with `ngl` at 120 ~ full speed on CPU/ GPU. -:::info -- [NVIDIA driver](https://www.nvidia.com/Download/index.aspx) version 470.63.01 or higher. -- [CUDA Toolkit](https://developer.nvidia.com/cuda-toolkit) version 12.3 or higher. -::: -#### Disk -- At least 10GB for app storage and model download. \ No newline at end of file +```bash +sudo cortex update +``` \ No newline at end of file diff --git a/docs/docs/installation/mac.mdx b/docs/docs/installation/mac.mdx index b7de8205e..51c4760a4 100644 --- a/docs/docs/installation/mac.mdx +++ b/docs/docs/installation/mac.mdx @@ -12,92 +12,105 @@ import TabItem from '@theme/TabItem'; ::: ## Cortex.cpp Installation -Cortex.cpp offers two installer types: -- Network Installers download a minimal system and require an internet connection to fetch packages during installation. -- Local Installers include all necessary packages, enabling offline installation without internet access. :::info -Before installation, make sure that you have met the required [dependencies](#dependencies) and [hardware](#hardware) to run Cortex. +Before installation, make sure that you have met the [minimum requirements](/docs/installation#minimum-requirements) to run Cortex. +This instruction is for stable releases. For beta and nightly releases, please replace `cortex` with `cortex-beta` and `cortex-nightly`, respectively. ::: -1. Download the MacOs installer: - - Stable: https://github.com/janhq/cortex.cpp/releases - - Beta: https://github.com/janhq/cortex.cpp/releases - - Nightly: https://github.com/janhq/cortex.cpp/releases -2. Ensure that Cortex.cpp is sucessfully installed: -```bash -# Stable -cortex -# Beta -cortex-beta +1. Download the Linux installer: + - From release: https://github.com/janhq/cortex.cpp/releases + - From quick download links: + - Local installer `.deb`: + - Stable: https://app.cortexcpp.com/download/latest/mac-universal-local + - Beta: https://app.cortexcpp.com/download/beta/mac-universal-local + - Nightly: https://app.cortexcpp.com/download/nightly/mac-universal-local + - Network installer `.deb`: + - Stable: https://app.cortexcpp.com/download/latest/mac-universal-network + - Beta: https://app.cortexcpp.com/download/beta/mac-universal-network + - Nightly: https://app.cortexcpp.com/download/nightly/mac-universal-network + - Binary: + - Stable: https://app.cortexcpp.com/download/latest/mac-universal-binary + - Beta: https://app.cortexcpp.com/download/beta/mac-universal-binary + - Nightly: https://app.cortexcpp.com/download/nightly/mac-universal-binary -# Nightly -cortex-nightly -``` +2. Install Cortex.cpp by double-clicking the pkg downloaded file. + +3. Ensure that Cortex.cpp is sucessfulyy installed: + ```bash + # Stable + cortex -v + ``` ### Data Folder By default, Cortex.cpp is installed in the following directory: ``` # Binary Location -/usr/local/bin/cortexcpp +/usr/local/bin/cortex +/usr/local/bin/cortex-server +/usr/local/bin/cortex-uninstall.sh # Application Data (Engines, Models and Logs folders) -/Users//.cortexcpp +/Users//cortexcpp + +# Configuration File +/Users//.cortexrc ``` ## Uninstall Cortex.cpp Run the uninstaller script: ```bash -# Stable version sudo sh cortex-uninstall.sh - -# Beta version -sudo sh cortex-beta-uninstall.sh - -# Stable version -sudo sh cortex-nightly-uninstall.sh ``` + :::info The script requires sudo permission. ::: + ## Build from Source -1. Clone the Cortex.cpp repository [here](https://github.com/janhq/cortex.cpp). -2. Navigate to the `engine > vcpkg` folder. -3. Configure the vpkg: -```bash -cd vcpkg -./bootstrap-vcpkg.sh -vcpkg install -``` -4. Build the Cortex.cpp inside the `build` folder: +### Prerequisites +- CMake >= 3.10 +- gcc/g++ >= 9 +- ninja-build +- make-gnu -```bash -mkdir build -cd build -cmake .. -DCMAKE_TOOLCHAIN_FILE=path_to_vcpkg_folder/vcpkg/scripts/buildsystems/vcpkg.cmake -make -j4 -``` -5. Use Visual Studio with the C++ development kit to build the project using the files generated in the `build` folder. -6. Verify that Cortex.cpp is installed correctly by getting help information. +### Build Cortex.cpp -```sh -# Get the help information -cortex -h -``` +1. **Clone the Cortex Repository** + ```bash + git clone https://github.com/janhq/cortex.cpp.git + cd cortex.cpp + git submodule update --init + ``` +2. Build the Cortex.cpp : + + + + ```sh + cd engine + make configure-vcpkg + make build CMAKE_EXTRA_FLAGS="-DCORTEX_CPP_VERSION=latest -DCMAKE_BUILD_TEST=OFF -DMAC_ARM64=ON -DCMAKE_TOOLCHAIN_FILE=vcpkg/scripts/buildsystems/vcpkg.cmake" + ``` + + + ```sh + cd engine + make configure-vcpkg + make build CMAKE_EXTRA_FLAGS="-DCORTEX_CPP_VERSION=latest -DCMAKE_BUILD_TEST=OFF -DCMAKE_TOOLCHAIN_FILE=vcpkg/scripts/buildsystems/vcpkg.cmake" + ``` + + + +3. Verify that Cortex.cpp is builded correctly by getting help information. -## Prerequisites - -### Dependencies -- Homebrew - -### Hardware -#### Operating System -- MacOSX 13.6 or higher. -#### CPU -- Mac Intel CPU -- Mac Apple Silicon -#### RAM -- 8GB for running up to 3B models. -- 16GB for running up to 7B models. -- 32GB for running up to 13B models. -#### Disk -- At least 10GB for app storage and model download. \ No newline at end of file + ```sh + # Get the help information + ./build/cortex -h + ``` + +## Update cortex to latest version +:::info +The script requires sudo permission. +::: +```bash +sudo cortex update +``` \ No newline at end of file diff --git a/docs/docs/installation/windows.mdx b/docs/docs/installation/windows.mdx index 11dce33a2..39855d44e 100644 --- a/docs/docs/installation/windows.mdx +++ b/docs/docs/installation/windows.mdx @@ -19,151 +19,87 @@ For Windows, Cortex.cpp can be installed in two ways: ## Windows ### Install Cortex.cpp -Cortex.cpp offers two installer types: -- Network Installers download a minimal system and require an internet connection to fetch packages during installation. -- Local Installers include all necessary packages, enabling offline installation without internet access. :::info -Before installation, make sure that you have met the required [dependencies](#dependencies) and [hardware](#hardware) to run Cortex. +Before installation, make sure that you have met the [minimum requirements](/docs/installation#minimum-requirements) to run Cortex. +This instruction is for stable releases. For beta and nightly releases, please replace `cortex` with `cortex-beta` and `cortex-nightly`, respectively. ::: -- Stable: https://github.com/janhq/cortex.cpp/releases -- Beta: https://github.com/janhq/cortex.cpp/releases -- Nightly: https://github.com/janhq/cortex.cpp/releases + +Download the windows installer: + - From release: https://github.com/janhq/cortex.cpp/releases + - From quick download links: + - Local installer `.deb`: + - Stable: https://app.cortexcpp.com/download/latest/windows-amd64-local + - Beta: https://app.cortexcpp.com/download/beta/windows-amd64-local + - Nightly: https://app.cortexcpp.com/download/nightly/windows-amd64-local + - Network installer `.deb`: + - Stable: https://app.cortexcpp.com/download/latest/windows-amd64-network + - Beta: https://app.cortexcpp.com/download/beta/windows-amd64-network + - Nightly: https://app.cortexcpp.com/download/nightly/windows-amd64-network + - Binary: + - Stable: https://app.cortexcpp.com/download/latest/windows-amd64-binary + - Beta: https://app.cortexcpp.com/download/beta/windows-amd64-binary + - Nightly: https://app.cortexcpp.com/download/nightly/windows-amd64-binary + + #### Data Folder By default, Cortex.cpp is installed in the following directory: ``` # Binary Location -C:\Users\\AppData\Local\cortexcpp +C:\Users\\AppData\Local\cortexcpp\ # Application Data (Engines, Models and Logs folders) -C:\Users\\.cortexcpp +C:\Users\\cortexcpp + +# Configuration File +C:\Users\\.cortexrc ``` + ### Uninstall Cortex.cpp To uninstall Cortex.cpp: +1. Open the **Control Panel**. 1. Navigate to **Add or Remove program**. -2. Search for Cortex.cpp and click **Uninstall**. +2. Search for cortexcpp and click **Uninstall**. + ## Windows Subsystem Linux :::info Windows Subsystem Linux allows running Linux tools and workflows seamlessly alongside Windows applications. For more information, please see this [article](https://learn.microsoft.com/en-us/windows/wsl/faq). ::: -### Install Cortex.cpp -Cortex.cpp offers two installer types: -- Network Installers download a minimal system and require an internet connection to fetch packages during installation. -- Local Installers include all necessary packages, enabling offline installation without internet access. -:::info -Before installation, make sure that you have met the required [dependencies](#dependencies) and [hardware](#hardware) to run Cortex. -::: -1. Download the Windows installer: - - Stable: https://github.com/janhq/cortex.cpp/releases - - Beta: https://github.com/janhq/cortex.cpp/releases - - Nightly: https://github.com/janhq/cortex.cpp/releases -2. Ensure that Cortex.cpp is sucessfulyy installed: -```bash -# Stable -cortex.exe -# Beta -cortex-beta.exe +Follow [linux installation steps](linux) to install Cortex.cpp on Windows Subsystem Linux. -# Nightly -cortex-nightly.exe -``` - -#### Data Folder -By default, Cortex.cpp is installed in the following directory: -``` -# Binary Location -C:\Users\\AppData\Local\cortexcpp\cortex.exe -# Application Data (Engines, Models and Logs folders) -C:\Users\\.cortexcpp -``` -### Uninstall Cortex.cpp -Run the uninstaller script: -```bash -# Stable version -sudo apt remove cortexcpp +## Build from Source -# Beta version -sudo apt remove cortexcpp-beta +### Prerequisites +- CMake >= 3.10 +- msvc (Visual Studio 2019/2022) +- ninja-build +- make-gnu -# Nightly version -sudo apt remove cortexcpp-nightly -``` -## Build from Source +### Build Cortex.cpp -1. Clone the Cortex.cpp repository [here](https://github.com/janhq/cortex.cpp). -2. Navigate to the `engine > vcpkg` folder. -3. Configure the vpkg: +1. **Clone the Cortex Repository** + ```cmd + git clone https://github.com/janhq/cortex.cpp.git + cd cortex.cpp + git submodule update --init + ``` +2. Build the Cortex.cpp : -```bash -cd vcpkg -## Windows -./bootstrap-vcpkg.bat -## WSL -./bootstrap-vcpkg.sh -vcpkg install -``` -4. Build the Cortex.cpp inside the `build` folder: + ```cmd + cd engine + make configure-vcpkg + make build CMAKE_EXTRA_FLAGS="-DCORTEX_CPP_VERSION=latest -DCMAKE_BUILD_TEST=OFF -DBUILD_SHARED_LIBS=OFF -DCMAKE_TOOLCHAIN_FILE=vcpkg\\scripts\\buildsystems\\vcpkg.cmake -DVCPKG_TARGET_TRIPLET=x64-windows-static -DCMAKE_BUILD_TYPE=RELEASE -GNinja" + ``` -```bash -mkdir build -cd build -## Windows -cmake .. -DBUILD_SHARED_LIBS=OFF -DCMAKE_TOOLCHAIN_FILE=path_to_vcpkg_folder/vcpkg/scripts/buildsystems/vcpkg.cmake -DVCPKG_TARGET_TRIPLET=x64-windows-static -## WSL -cmake .. -DCMAKE_TOOLCHAIN_FILE=path_to_vcpkg_folder/vcpkg/scripts/buildsystems/vcpkg.cmake -make -j4 -``` -5. Use Visual Studio with the C++ development kit to build the project using the files generated in the `build` folder. -6. Verify that Cortex.cpp is installed correctly by getting help information. +3. Verify that Cortex.cpp is builded correctly by getting help information. -```sh -# Get the help information -cortex -h -``` + ```cmd + # Get the help information + .\build\cortex.exe -h + ``` -## Prerequisites -### Dependencies -#### Windows -- Node.js version 18 and higher -- NPM -#### Windows Subsystem for Linux -- Node.js version 18 and higher -- NPM -- Homebrew -- Windows Subsystem for Linux (Ubuntu) -- OpenMPI - -### Hardware -#### Operating System -- Windows 10 or higher. -#### CPU -:::info -- Cortex.cpp supports a processor that can handle AVX2. For the full list, please see [here](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions#CPUs_with_AVX2). -- We support older processors with AVX and AVX-512, though this is not recommended. -::: -##### Intel CPU -- Haswell processors (Q2 2013) and newer. -- Tiger Lake (Q3 2020) and newer for Celeron and Pentium processors. -##### AMD CPU -- Excavator processors (Q2 2015) and newer. -#### RAM -:::info -We support DDR2 RAM as the minimum requirement but recommend using newer generations of RAM for improved performance. -::: -- 8GB for running up to 3B models (int4). -- 16GB for running up to 7B models (int4). -- 32GB for running up to 13B models (int4). -#### GPU -:::info -Having at least 6GB VRAM when using NVIDIA, AMD, or Intel Arc GPUs is recommended. -::: -- 6GB can load the 3B model (int4) with `ngl` at 120 ~ full speed on CPU/ GPU. -- 8GB can load the 7B model (int4) with `ngl` at 120 ~ full speed on CPU/ GPU. -- 12GB can load the 13B model (int4) with `ngl` at 120 ~ full speed on CPU/ GPU. -:::info -- [NVIDIA driver](https://www.nvidia.com/Download/index.aspx) version 470.63.01 or higher. -- [CUDA Toolkit](https://developer.nvidia.com/cuda-toolkit) version 12.3 or higher. -::: -#### Disk -- At least 10GB for app storage and model download. \ No newline at end of file +## Update cortex to latest version +```bash +cortex.exe update +``` \ No newline at end of file diff --git a/docs/sidebars.ts b/docs/sidebars.ts index 09ad3e504..bf520499c 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -35,7 +35,8 @@ const sidebars: SidebarsConfig = { type: "category", label: "Installation", link: { - type: "generated-index", + type: "doc", + id: "installation" }, collapsed: true, items: [ @@ -68,7 +69,8 @@ const sidebars: SidebarsConfig = { type: "category", label: "Architecture", link: { - type: "generated-index", + type: "doc", + id: "architecture" }, collapsed: true, items: [