From e98c7edbafb2527f0e25bb638904d297e2b5b901 Mon Sep 17 00:00:00 2001 From: Filippo Pedrazzini Date: Sat, 23 Sep 2023 16:38:41 +0200 Subject: [PATCH] initial draft --- blog/2023-06-26-welcome/index.md | 2 +- .../index.md | 2 +- .../2023-07-05-chainlit-langchain-qa/index.md | 2 +- blog/2023-09-23-petals-private-swarm/index.md | 72 +++++++++++++++++++ blog/authors.yml | 2 +- 5 files changed, 76 insertions(+), 4 deletions(-) create mode 100644 blog/2023-09-23-petals-private-swarm/index.md diff --git a/blog/2023-06-26-welcome/index.md b/blog/2023-06-26-welcome/index.md index 31fd4e73..696a16bc 100644 --- a/blog/2023-06-26-welcome/index.md +++ b/blog/2023-06-26-welcome/index.md @@ -1,7 +1,7 @@ --- slug: hello-prem title: Hello Prem! -authors: [filippopedrazzinfp] +authors: [filopedraz] tags: [llm, self-hosted, prem, open-source, welcome] description: "Hello, I am Filippo and I am currently contributing to Prem." image: "./banner.png" diff --git a/blog/2023-07-03-serve-falcon-with-fastapi-and-docker/index.md b/blog/2023-07-03-serve-falcon-with-fastapi-and-docker/index.md index b655ef58..349eb7c4 100644 --- a/blog/2023-07-03-serve-falcon-with-fastapi-and-docker/index.md +++ b/blog/2023-07-03-serve-falcon-with-fastapi-and-docker/index.md @@ -1,7 +1,7 @@ --- slug: serving-falcon-7b-fastapi-docker title: Serving Falcon 7B Instruct with FastAPI and Docker -authors: [filippopedrazzinfp] +authors: [filopedraz] tags: [llm, self-hosted, prem, open-source, fastapi, docker, falcon-7b] description: "In this tutorial, we will walk you through the process of serving the Falcon 7B Instruction model using FastAPI and Docker. The complete code for this tutorial is available on GitHub." image: "./banner.jpg" diff --git a/blog/2023-07-05-chainlit-langchain-qa/index.md b/blog/2023-07-05-chainlit-langchain-qa/index.md index e7296c69..e7bec7c2 100644 --- a/blog/2023-07-05-chainlit-langchain-qa/index.md +++ b/blog/2023-07-05-chainlit-langchain-qa/index.md @@ -1,7 +1,7 @@ --- slug: chainlit-langchain-prem title: Talk to your Data with ChainLit and Langchain -authors: [ tiero, filippopedrazzinfp ] +authors: [ tiero, filopedraz ] tags: [ llm, self-hosted, prem, open-source, langchain, chainlit, vicuna-7b, chroma, vector-store ] description: 'Build a chatbot that talks to your data with Prem using LangChain, Chainlit, Chroma Vector Store and Vicuna 7B model, self-hosted on your MacOS laptop.' image: './chainlit-langchain.gif' diff --git a/blog/2023-09-23-petals-private-swarm/index.md b/blog/2023-09-23-petals-private-swarm/index.md new file mode 100644 index 00000000..a3e7f19c --- /dev/null +++ b/blog/2023-09-23-petals-private-swarm/index.md @@ -0,0 +1,72 @@ +--- +slug: petals-private-swarm +title: How to Deploy your Petals Private Swarm +authors: [filopedraz] +tags: [llms, petals, swarm, private, prem] +description: "Get your own Petals Private Swarm up and running in 5 minutes" +--- + + + +## Introduction to Petals + +### What is Petals? + +### Some Definitions and Concepts + +- **DHT**: Distributed Hash Table +- **Torrent**: A torrent is a file sent via the BitTorrent protocol. It can be just about any type of file, such as a movie, song, game, or application. +- **Swarm**: A swarm is a group of peers that share a torrent and are both uploading and downloading the torrent's content. +- **Peer**: A peer is one instance of a BitTorrent client running on a computer on the Internet to which other clients connect and transfer data. +- **Tracker**: A tracker is a server that keeps track of which seeds and peers are in the swarm. +- **Seed**: A seed is a client that has a complete copy of the data of a certain torrent. Once your BitTorrent client finishes downloading, it will remain open until you click the Finish button (or otherwise close it). This is known as being a seed or seeding. + +### How does Petals work? + +## Deploy your Petals Private Swarm + +### 1. Run a Backbone Server + +Create a DO Machine with at least 2GiB of RAM and 1 CPU and Install all the necessary dependencies (Docker). + +```bash +docker run -d --net host --ipc host --volume petals-cache-backbone:/cache --name backbone --rm learningathome/petals:main python -m petals.cli.run_dht --host_maddrs /ip4/0.0.0.0/tcp/31337 --identity_path bootstrap1.id +``` + +Check the logs of the `backbone` containers in order to get the IPs. + +### 2. Contribute to the Swarm + +#### Cloud GPU Instance (NVIDIA) + +Create a Machine on `Paperspace` or `DataCrunch` and run the following command in order to join the Swarm + +```bash +docker run -d --net host --ipc host --gpus all --volume petals-cache-node1:/cache --name node1 --rm learningathome/petals:main python -m petals.cli.run_server --port 31330 --num_blocks 20 petals-team/StableBeluga2 --initial_peers /ip4/209.38.217.30/tcp/31337/p2p/QmecL18cmRaDdAcRmA7Ctj1gyAeUYG433WppA1UWTHTew6 /ip4/127.0.0.1/tcp/31337/p2p/QmecL18cmRaDdAcRmA7Ctj1gyAeUYG433WppA1UWTHTew6 +``` + +Where the `--initial_peers` variable should be filled with the logs you get from the `backbone` peer. + +#### Mac + +```bash +brew install python +python3 -m pip install git+https://github.com/bigscience-workshop/petals +python3 -m petals.cli.run_server --public_name prem-app petals-team/StableBeluga2 --initial_peers /ip4/209.38.217.30/tcp/31337/p2p/QmecL18cmRaDdAcRmA7Ctj1gyAeUYG433WppA1UWTHTew6 /ip4/127.0.0.1/tcp/31337/p2p/QmecL18cmRaDdAcRmA7Ctj1gyAeUYG433WppA1UWTHTew6 +``` + +#### Prem + +- Install Prem Desktop App +- Go to `Settings` +- Enable `Swarm Mode` + +### 3. Monitor your Private Swarm + +- Run Prem Explorer + +### 4. Consume your Private Swarm + +- Run Prem App and connect to your Private Swarm + +## Conclusion \ No newline at end of file diff --git a/blog/authors.yml b/blog/authors.yml index d3005b08..4657cb9a 100644 --- a/blog/authors.yml +++ b/blog/authors.yml @@ -3,7 +3,7 @@ tiero: title: Bitcoin wizard url: https://github.com/tiero image_url: https://github.com/tiero.png -filippopedrazzinfp: +filopedraz: name: Filippo Pedrazzini title: Core contributor @ PremAI url: https://github.com/filopedraz