A unified interface for Hugging Face Inference Providers and Google AI with OpenAI API compatibility.
pip install instantlySet your API keys in environment variables:
export HF_TOKEN=your_huggingface_token
export GEMINI_API_KEY=your_google_ai_tokenOr use a .env file:
HF_TOKEN=your_huggingface_token
GEMINI_API_KEY=your_google_ai_token
OPENAI_BASE_URL=https://router.huggingface.co/v1
from instantly import OpenAIClient
client = OpenAIClient(api_key="hf_token")
response = client.chat_completion(
model="moonshotai/Kimi-K2-Instruct",
messages=[{"role": "user", "content": "Hello"}]
)from instantly import InferenceClient
client = InferenceClient(api_key="hf_token")
image = client.text_to_image(
prompt="A landscape",
model="black-forest-labs/FLUX.1-dev"
)from instantly import GoogleAIClient
client = GoogleAIClient(api_key="your_gemini_key")
response = client.generate_content(
model="gemini-2.5-flash-image-preview",
prompt="Hello, how are you?"
)from instantly import DuckDuckGoSearchTool, WebSearchTool, VisitWebpageTool
# DuckDuckGo search
search = DuckDuckGoSearchTool(max_results=5)
results = search("Hugging Face")
# Web search with configurable engine
web_search = WebSearchTool(max_results=10, engine="duckduckgo")
results = web_search("Machine Learning")
# Visit and process webpage
webpage = VisitWebpageTool()
content = webpage("https://example.com")- Clone the repository:
git clone https://github.com/yourusername/instantly.git
cd instantly- Install development dependencies:
pip install -e ".[dev]"- Run tests:
python -m pytest tests/MIT License