import requests import os import gspread from oauth2client.service_account import ServiceAccountCredentials from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry # Fetch the API key from environment variables API_KEY = os.getenv("OPENAI_API_KEY") if API_KEY is None: raise ValueError("No API key provided. Please set the OPENAI_API_KEY environment variable.") # Endpoint for the Llama API ENDPOINT_URL = "https://api.llama-api.com/chat/completions" # Google Sheets setup scope = [ "https://spreadsheets.google.com/feeds", 'https://www.googleapis.com/auth/spreadsheets', "https://www.googleapis.com/auth/drive.file", "https://www.googleapis.com/auth/drive" ] creds_path = 'D:\\YouTube\\Scripted\\Operationnel\\Developping\\1.Agent_Symbiotic_Beats\\rugged-link-416209-ffb6073b6fed.json' creds = ServiceAccountCredentials.from_json_keyfile_name(creds_path, scope) client = gspread.authorize(creds) ss = client.open("Suno.ai") output_sheet = ss.worksheet('Feuille 1') def requests_retry_session( retries=3, backoff_factor=0.3, status_forcelist=(500, 502, 503, 504), session=None, ): session = session or requests.Session() retry = Retry( total=retries, read=retries, connect=retries, backoff_factor=backoff_factor, status_forcelist=status_forcelist, ) adapter = HTTPAdapter(max_retries=retry) session.mount('http://', adapter) session.mount('https://', adapter) return session def chat_llama(prompt, max_tokens=150, temperature=0.7): headers = { "Authorization": f"Bearer {API_KEY}", "Content-Type": "application/json" } data = { "model": "llama-70B", "prompt": prompt, "max_tokens": max_tokens, "temperature": temperature } try: response = requests_retry_session().post(ENDPOINT_URL, json=data, headers=headers) response.raise_for_status() # Will not be called if response is successful return response.json() except requests.exceptions.HTTPError as e: print("HTTP Error:", e) return None except requests.exceptions.RequestException as e: print("Error contacting the Llama API:", e) return Nonedef process_response(response): # Assuming the response structure contains these fields; adjust as needed return response.get('choices', [{}])[0].get('text', "").strip() if __name__ == "__main__": prompt = "Describe the music concept focusing on specific styles and themes." response = chat_llama(prompt) if response: processed_text = process_response(response) print("Response from Llama API:", processed_text) else: print("Failed to get a response from the Llama API.")