One-stop Python library for seamless AI model training, deployment and infrastructure management.
To install GreenNode Python Library, simply run:
pip install greennode
- Write documentation for image_gen, speech_to_text, text_to_speech, rerank, etc.
import os
from greennode import GreenNode
client = GreenNode(api_key=os.environ.get("GREENNODE_API_KEY"),
base_url=os.environ.get("GREENNODE_BASE_URL"))
response = client.chat.completions.create(
model="meta/llama-3.1-8b-instruct",
messages=[{"role": "user", "content": "tell me about new york"}],
)
print(response.choices[0].message.content)
import os
from greennode import GreenNode
client = GreenNode(api_key=os.environ.get("GREENNODE_API_KEY"),
base_url=os.environ.get("GREENNODE_BASE_URL"))
stream = client.chat.completions.create(
model="meta/llama-3.1-8b-instruct",
messages=[{"role": "user", "content": "tell me about new york"}],
stream=True,
)
for chunk in stream:
print(chunk.choices[0].delta.content or "", end="", flush=True)
import os
from greennode import GreenNode
client = GreenNode(api_key=os.environ.get("GREENNODE_API_KEY"),
base_url=os.environ.get("GREENNODE_BASE_URL"))
response = client.completions.create(
model="Qwen/Qwen2.5-1.5B",
prompt="New York City is",
stream=False,
)
print(response.choices[0].text)
import os
from greennode import GreenNode
client = GreenNode(api_key=os.environ.get("GREENNODE_API_KEY"),
base_url=os.environ.get("GREENNODE_BASE_URL"))
stream = client.completions.create(
model="Qwen/Qwen2.5-1.5B",
prompt="New York City is",
stream=True,
)
for chunk in stream:
print(chunk.choices[0].text or "", end="", flush=True)
import os
from greennode import GreenNode
client = GreenNode(api_key=os.environ.get("GREENNODE_API_KEY"),
base_url=os.environ.get("GREENNODE_BASE_URL"))
response = client.embeddings.create(
model="BAAI/bge-m3",
input=["Hello world", "Thank you"],
encoding_format="float"
)
print(response)
import os
from greennode import GreenNode
client = GreenNode(api_key=os.environ.get("GREENNODE_API_KEY"),
base_url=os.environ.get("GREENNODE_BASE_URL"))
response = client.models.list()
print(response)