diff --git a/README.md b/README.md index a5245bba..b22af8f6 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ Ensure your machine satisfies the requirements: ## Resource Requirements - You need xDAI on Gnosis Chain in one of your wallets. -- You need an RPC for your agent instance. We recommend [Nodies RPC](https://www.nodies.app/). +- You need an RPC for your agent instance. We recommend [Quicknode RPC](https://www.quicknode.com/). - (From release v0.16.0 onwards) You will need a Subgraph API key that can be obtained at [The Graph](https://thegraph.com/studio/apikeys/). ## Run the Service @@ -105,7 +105,7 @@ Services can become staked by invoking the `stake()` contract method, where serv Once the command has completed, i.e. the service is running, you can see the live logs with: ```bash -docker logs trader_abci_0 --follow +docker logs $(docker ps --filter "name=trader" --format "{{.Names}}" | grep "_abci" | head -n 1) --follow ``` To stop your agent, use: @@ -153,16 +153,16 @@ Note: In this case, if the service is staked, then it will not update the on-cha cd trader; poetry run python ../report.py; cd .. ``` -3. Use this command to investigate your agent's logs: +3. Use the `analyse_logs.py` script to investigate your agent's logs: ```bash - cd trader; poetry run autonomy analyse logs --from-dir trader_service/abci_build/persistent_data/logs/ --agent aea_0 --reset-db; cd .. + cd trader; poetry run python ../analyse_logs.py --agent aea_0 --reset-db; cd .. ``` - For example, inspect the state transitions using this command: + For example, inspect the state transitions using the following command: ```bash - cd trader; poetry run autonomy analyse logs --from-dir trader_service/abci_build/persistent_data/logs/ --agent aea_0 --fsm --reset-db; cd .. + cd trader; poetry run python ../analyse_logs.py --agent aea_0 --fsm --reset-db; cd .. ``` This will output the different state transitions of your agent per period, for example: @@ -411,7 +411,11 @@ Error: Service terminatation failed with following error; ChainInteractionError( ## Build deployments without executing the service -The script builds both a Docker Compose deployment (on `./trader/trader_service/abci_build`) and a Kubernetes deployment (on `./trader/trader_service/abci_build_k8s`). Then, by default, the script will launch the local Docker Compose deployment. If you just want to build the deployment without executing the service (for example, if you are deploying to a custom Kubernetes cluster), then execute the script as +The script builds both a Docker Compose deployment (on `./trader/trader_service/abci_build_????`) +and a Kubernetes deployment (on `./trader/trader_service/abci_build_k8s`). +Then, by default, the script will launch the local Docker Compose deployment. +If you just want to build the deployment without executing the service +(for example, if you are deploying to a custom Kubernetes cluster), then execute the script as: ```bash ./run_service.sh --build-only diff --git a/analyse_logs.py b/analyse_logs.py new file mode 100644 index 00000000..6d2afb50 --- /dev/null +++ b/analyse_logs.py @@ -0,0 +1,153 @@ +import os +import subprocess +import sys +import argparse + + +def _parse_args(): + """Parse the script arguments.""" + parser = argparse.ArgumentParser(description="Analyse agent logs.") + + parser.add_argument( + "--service-dir", + default="trader_service", + help="The service directory containing build directories (default: 'trader_service')." + ) + parser.add_argument( + "--from-dir", + help="Path to the logs directory. If not provided, it is auto-detected." + ) + parser.add_argument( + "--agent", + default="aea_0", + help="The agent name to analyze (default: 'aea_0')." + ) + parser.add_argument( + "--reset-db", + action="store_true", + help="Use this flag to disable resetting the log database." + ) + parser.add_argument( + "--start-time", + help="Start time in `YYYY-MM-DD H:M:S,MS` format." + ) + parser.add_argument( + "--end-time", + help="End time in `YYYY-MM-DD H:M:S,MS` format." + ) + parser.add_argument( + "--log-level", + choices=["INFO", "DEBUG", "WARNING", "ERROR", "CRITICAL"], + help="Logging level." + ) + parser.add_argument( + "--period", + type=int, + help="Period ID." + ) + parser.add_argument( + "--round", + help="Round name." + ) + parser.add_argument( + "--behaviour", + help="Behaviour name filter." + ) + parser.add_argument( + "--fsm", + action="store_true", + help="Print only the FSM execution path." + ) + parser.add_argument( + "--include-regex", + help="Regex pattern to include in the result." + ) + parser.add_argument( + "--exclude-regex", + help="Regex pattern to exclude from the result." + ) + + return parser.parse_args() + + +def find_build_directory(service_dir): + """Find the appropriate build directory within the service directory.""" + try: + # create a list of all build directories + build_dirs = [ + d for d in os.listdir(service_dir) + if d.startswith("abci_build_") and os.path.isdir(os.path.join(service_dir, d)) + ] + # iterate through the build directories to find the one that contains logs + for build_dir in build_dirs: + build_dir = os.path.join(service_dir, build_dir) + logs_dir = os.path.join(build_dir, "persistent_data", "logs") + # Check if the logs folder exists and contains files + if os.path.exists(logs_dir) and os.listdir(logs_dir): + return build_dir + return os.path.join(service_dir, "abci_build") + except FileNotFoundError: + print(f"Service directory '{service_dir}' not found") + sys.exit(1) + + +def run_analysis(logs_dir, **kwargs): + """Run the log analysis command.""" + command = [ + "poetry", "run", "autonomy", "analyse", "logs", + "--from-dir", logs_dir, + ] + if kwargs.get("agent"): + command.extend(["--agent", kwargs.get("agent")]) + if kwargs.get("reset_db"): + command.extend(["--reset-db"]) + if kwargs.get("start_time"): + command.extend(["--start-time", kwargs.get("start_time")]) + if kwargs.get("end_time"): + command.extend(["--end-time", kwargs.get("end_time")]) + if kwargs.get("log_level"): + command.extend(["--log-level", kwargs.get("log_level")]) + if kwargs.get("period"): + command.extend(["--period", kwargs.get("period")]) + if kwargs.get("round"): + command.extend(["--round", kwargs.get("round")]) + if kwargs.get("behaviour"): + command.extend(["--behaviour", kwargs.get("behaviour")]) + if kwargs.get("fsm"): + command.extend(["--fsm"]) + if kwargs.get("include_regex"): + command.extend(["--include-regex", kwargs.get("include_regex")]) + if kwargs.get("exclude_regex"): + command.extend(["--exclude-regex", kwargs.get("exclude_regex")]) + + try: + subprocess.run(command, check=True) + print("Analysis completed successfully.") + except subprocess.CalledProcessError as e: + print(f"Command failed with exit code {e.returncode}") + sys.exit(e.returncode) + except FileNotFoundError: + print("Poetry or autonomy not found. Ensure they are installed and accessible.") + sys.exit(1) + + +if __name__ == "__main__": + # Parse user arguments + args = _parse_args() + + # Determine the logs directory + if args.from_dir: + logs_dir = args.from_dir + if not os.path.exists(logs_dir): + print(f"Specified logs directory '{logs_dir}' not found.") + sys.exit(1) + else: + # Auto-detect the logs directory + build_dir = find_build_directory(args.service_dir) + logs_dir = os.path.join(build_dir, "persistent_data", "logs") + if not os.path.exists(logs_dir): + print(f"Logs directory '{logs_dir}' not found.") + sys.exit(1) + + # Run the analysis + run_analysis(logs_dir, **vars(args)) diff --git a/report.py b/report.py index a18fd3d1..7fe37242 100644 --- a/report.py +++ b/report.py @@ -111,6 +111,9 @@ SECONDS_PER_DAY = 60 * 60 * 24 OUTPUT_WIDTH = 80 +TRADER_CONTAINER_PREFIX = "trader" +AGENT_CONTAINER_IDENTIFIER = "abci" +NODE_CONTAINER_IDENTIFIER = "tm" class ColorCode: @@ -238,19 +241,19 @@ def _warning_message(current_value: int, threshold: int = 0, message: str = "") def _get_agent_status() -> str: client = docker.from_env() - trader_abci_container = ( - client.containers.get("trader_abci_0") - if "trader_abci_0" in [c.name for c in client.containers.list()] - else None - ) - trader_tm_container = ( - client.containers.get("trader_tm_0") - if "trader_tm_0" in [c.name for c in client.containers.list()] - else None - ) - - is_running = trader_abci_container and trader_tm_container - return _color_bool(is_running, "Running", "Stopped") + agent_running = node_running = service_running = False + for container in client.containers.list(): + container_name = container.name + if TRADER_CONTAINER_PREFIX in container_name: + if AGENT_CONTAINER_IDENTIFIER in container_name: + agent_running = True + if NODE_CONTAINER_IDENTIFIER in container_name: + node_running = True + if agent_running and node_running: + service_running = True + break + + return _color_bool(service_running, "Running", "Stopped") def _parse_args() -> Any: diff --git a/run_service.sh b/run_service.sh index fe62b688..0e7a8484 100755 --- a/run_service.sh +++ b/run_service.sh @@ -737,7 +737,7 @@ directory="trader" service_repo=https://github.com/$org_name/$directory.git # This is a tested version that works well. # Feel free to replace this with a different version of the repo, but be careful as there might be breaking changes -service_version="v0.21.4" +service_version="v0.22.0" # Define constants for on-chain interaction gnosis_chain_id=100 @@ -832,10 +832,15 @@ command -v docker >/dev/null 2>&1 || exit 1 } -docker rm -f abci0 node0 trader_abci_0 trader_tm_0 &> /dev/null || +containers=$(docker ps --filter name=trader_* -aq) &> /dev/null || { echo >&2 "Docker is not running!"; exit 1 } +if [[ -n "$containers" ]]; then + docker rm -f $containers +fi + +docker network prune --force try_read_storage @@ -1275,8 +1280,7 @@ export DISABLE_TRADING=false export STOP_TRADING_IF_STAKING_KPI_MET=true export RESET_PAUSE_DURATION=45 export MECH_WRAPPED_NATIVE_TOKEN_ADDRESS=$WXDAI_ADDRESS -export MECH_CHAIN_ID=ethereum -export TOOLS_ACCURACY_HASH=QmebjcPizAdVFSUAfMBgAGFJhLPVBMvV68LxhSq4LPvv9d +export TOOLS_ACCURACY_HASH=QmZSkE49cnp3KeR9r6bp3hP4M2LPAmG4beHq4isz55ghv5 if [ -n "$SUBGRAPH_API_KEY" ]; then export CONDITIONAL_TOKENS_SUBGRAPH_URL="https://gateway-arbitrum.network.thegraph.com/api/$SUBGRAPH_API_KEY/subgraphs/id/7s9rGBffUTL8kDZuxvvpuc46v44iuDarbrADBFw5uVp2" @@ -1287,9 +1291,9 @@ if [ -n "$SUBGRAPH_API_KEY" ]; then fi service_dir="trader_service" -build_dir="abci_build" +directory=$(ls -d "$service_dir"/abci_build_???? 2>/dev/null || echo "$service_dir/abci_build") +build_dir=$(basename "$directory") build_dir_k8s="abci_build_k8s" -directory="$service_dir/$build_dir" if [ -d $directory ] then @@ -1329,10 +1333,12 @@ if [[ -d "$build_dir_k8s" ]]; then echo "Directory removed: $build_dir" fi export OPEN_AUTONOMY_PRIVATE_KEY_PASSWORD="$password" && poetry run autonomy deploy build --kubernetes "../../$keys_json_path" --n $n_agents -ltm +build_dir=$(ls -d abci_build_???? 2>/dev/null || echo "abci_build") mv $build_dir $build_dir_k8s echo "Kubernetes deployment built on ./trader/$service_dir/$build_dir_k8s" export OPEN_AUTONOMY_PRIVATE_KEY_PASSWORD="$password" && poetry run autonomy deploy build "../../$keys_json_path" --n $n_agents -ltm +build_dir=$(ls -d abci_build_???? 2>/dev/null || echo "abci_build") echo "Docker Compose deployment built on ./trader/$service_dir/$build_dir" cd .. @@ -1340,8 +1346,15 @@ cd .. # warm start is disabled as no global weights are provided to calibrate the tools' weights # warm_start -add_volume_to_service_docker_compose "$PWD/trader_service/abci_build/docker-compose.yaml" "trader_abci_0" "/data" "$path_to_store" -add_volume_to_service_k8s "$PWD/trader_service/abci_build_k8s/build.yaml" +directory="$service_dir/$build_dir" +if [ "$build_dir" = "abci_build" ]; then + suffix="abci_build" +else + suffix=${build_dir##*_} +fi +abci_0="trader${suffix}_abci_0" +add_volume_to_service_docker_compose "$PWD/$directory/docker-compose.yaml" "$abci_0" "/data" "$path_to_store" +add_volume_to_service_k8s "$PWD/$service_dir/$build_dir_k8s/build.yaml" sudo chown -R $(whoami) "$path_to_store" if [[ "$build_only" == true ]]; then diff --git a/scripts/mech_events.py b/scripts/mech_events.py index 76cd607b..7f0b0214 100644 --- a/scripts/mech_events.py +++ b/scripts/mech_events.py @@ -30,6 +30,7 @@ from typing import Any, ClassVar, Dict import requests +from dotenv import dotenv_values from gql import Client, gql from gql.transport.requests import RequestsHTTPTransport from tqdm import tqdm @@ -38,7 +39,9 @@ SCRIPT_PATH = Path(__file__).resolve().parent STORE_PATH = Path(SCRIPT_PATH, "..", ".trader_runner") -MECH_EVENTS_JSON_PATH = Path(STORE_PATH, "mech_events.json") +ENV_FILENAME = ".env" +DOTENV_PATH = STORE_PATH / ENV_FILENAME +MECH_EVENTS_JSON_PATH = STORE_PATH / "mech_events.json" HTTP = "http://" HTTPS = HTTP[:4] + "s" + HTTP[4:] CID_PREFIX = "f01701220" @@ -47,7 +50,9 @@ DEFAULT_MECH_FEE = 10000000000000000 DEFAULT_FROM_TIMESTAMP = 0 DEFAULT_TO_TIMESTAMP = 2147483647 -MECH_SUBGRAPH_URL = "https://api.studio.thegraph.com/query/57238/mech/0.0.2" +MECH_SUBGRAPH_URL_TEMPLATE = Template( + "https://gateway.thegraph.com/api/${SUBGRAPH_API_KEY}/subgraphs/id/4YGoX3iXUni1NBhWJS5xyKcntrAzssfytJK7PQxxQk5g" +) SUBGRAPH_HEADERS = { "Accept": "application/json, multipart/mixed", "Content-Type": "application/json", @@ -74,6 +79,7 @@ """ ) + @dataclass class MechBaseEvent: # pylint: disable=too-many-instance-attributes """Base class for mech's on-chain event representation.""" @@ -159,7 +165,7 @@ def _read_mech_events_data_from_file() -> Dict[str, Any]: if mech_events_data.get("db_version", 0) < MECH_EVENTS_DB_VERSION: current_time = time.strftime("%Y-%m-%d_%H-%M-%S") old_db_filename = f"mech_events.{current_time}.old.json" - os.rename(MECH_EVENTS_JSON_PATH, Path(STORE_PATH, old_db_filename)) + os.rename(MECH_EVENTS_JSON_PATH, STORE_PATH / old_db_filename) mech_events_data = {} mech_events_data["db_version"] = MECH_EVENTS_DB_VERSION except FileNotFoundError: @@ -190,17 +196,26 @@ def _write_mech_events_data_to_file( last_write_time = now +def get_mech_subgraph_url() -> str: + """Get the mech subgraph's URL.""" + env_file_vars = dotenv_values(DOTENV_PATH) + return MECH_SUBGRAPH_URL_TEMPLATE.substitute(env_file_vars) + + def _query_mech_events_subgraph( sender: str, event_cls: type[MechBaseEvent] ) -> dict[str, Any]: """Query the subgraph.""" - transport = RequestsHTTPTransport(url=MECH_SUBGRAPH_URL) + mech_subgraph_url = get_mech_subgraph_url() + transport = RequestsHTTPTransport(mech_subgraph_url) client = Client(transport=transport, fetch_schema_from_transport=True) subgraph_event_set_name = f"{event_cls.subgraph_event_name}s" all_results: dict[str, Any] = {"data": {subgraph_event_set_name: []}} - query = MECH_EVENTS_SUBGRAPH_QUERY_TEMPLATE.safe_substitute(subgraph_event_set_name=subgraph_event_set_name) + query = MECH_EVENTS_SUBGRAPH_QUERY_TEMPLATE.safe_substitute( + subgraph_event_set_name=subgraph_event_set_name + ) id_gt = "" while True: variables = { diff --git a/stop_service.sh b/stop_service.sh index 3b4b9178..92414d7f 100755 --- a/stop_service.sh +++ b/stop_service.sh @@ -22,4 +22,8 @@ export PYTHONUTF8=1 -cd trader; poetry run autonomy deploy stop --build-dir trader_service/abci_build; cd .. +cd trader +service_dir="trader_service" +build_dir=$(ls -d "$service_dir"/abci_build_???? 2>/dev/null || echo "$service_dir/abci_build") +poetry run autonomy deploy stop --build-dir "$build_dir" +cd ..