Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 10 additions & 58 deletions base/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,14 @@
REST_FRAMEWORK = {
"DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.LimitOffsetPagination",
"PAGE_SIZE": DEFAULT_PAGE_SIZE,
"DEFAULT_THROTTLE_CLASSES": [
# "rest_framework.throttling.UserRateThrottle",
"rest_framework.throttling.AnonRateThrottle",
],
"DEFAULT_THROTTLE_RATES": {
# "user": "100/day",
"anon": "100/minute",
},
}


Expand Down Expand Up @@ -273,64 +281,8 @@
LOGGING["loggers"]["indexer"]["handlers"].append("watchtower")
LOGGING["loggers"]["jobs"]["handlers"].append("watchtower")

# log_level = getattr(logging, LOG_LEVEL, logging.INFO)
# print("LOG_LEVEL: ", LOG_LEVEL)
# # print("log_level: ", log_level)

# if ENVIRONMENT != "local":
# AWS_REGION_NAME = "us-east-1"
# boto3_logs_client = boto3.client("logs", region_name=AWS_REGION_NAME)


# LOGGING = {
# "version": 1,
# "disable_existing_loggers": False,
# "root": {
# "level": log_level,
# # Adding the watchtower handler here causes all loggers in the project that
# # have propagate=True (the default) to send messages to watchtower. If you
# # wish to send only from specific loggers instead, remove "watchtower" here
# # and configure individual loggers below.
# # "handlers": ["watchtower", "console"],
# "handlers": ["console"],
# },
# "handlers": {
# "console": {
# "class": "logging.StreamHandler",
# },
# # "watchtower": {
# # "class": "watchtower.CloudWatchLogHandler",
# # "boto3_client": boto3_logs_client,
# # "log_group_name": "django-indexer",
# # # Decrease the verbosity level here to send only those logs to watchtower,
# # # but still see more verbose logs in the console. See the watchtower
# # # documentation for other parameters that can be set here.
# # "level": log_level,
# # },
# },
# "loggers": {
# # In the debug server (`manage.py runserver`), several Django system loggers cause
# # deadlocks when using threading in the logging handler, and are not supported by
# # watchtower. This limitation does not apply when running on production WSGI servers
# # (gunicorn, uwsgi, etc.), so we recommend that you set `propagate=True` below in your
# # production-specific Django settings file to receive Django system logs in CloudWatch.
# "django": {"level": log_level, "handlers": ["console"], "propagate": False}
# # Add any other logger-specific configuration here.
# },
# }

# if ENVIRONMENT != "local":
# LOGGING["handlers"]["watchtower"] = {
# "class": "watchtower.CloudWatchLogHandler",
# "boto3_client": boto3_logs_client,
# "log_group_name": "django-indexer",
# # Decrease the verbosity level here to send only those logs to watchtower,
# # but still see more verbose logs in the console. See the watchtower
# # documentation for other parameters that can be set here.
# "level": log_level,
# }

# LOGGING["root"]["handlers"].append("watchtower")

## SENTRY CONFIG

sentry_sdk.init(
environment=ENVIRONMENT,
Expand Down
11 changes: 3 additions & 8 deletions indexer_app/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,7 @@
from near_lake_framework import near_primitives

from base.utils import convert_ns_to_utc
from pots.utils import (
match_pot_factory_version_pattern,
match_pot_subaccount_version_pattern,
)
from pots.utils import match_pot_factory_pattern, match_pot_subaccount_pattern

from .logging import logger
from .utils import (
Expand Down Expand Up @@ -129,14 +126,12 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess

match method_name:
case "new":
if match_pot_factory_version_pattern(receipt.receiver_id):
if match_pot_factory_pattern(receipt.receiver_id):
logger.info(f"matched for factory pattern: {args_dict}")
await handle_new_pot_factory(
args_dict, receiver_id, created_at
)
elif match_pot_subaccount_version_pattern(
receipt.receiver_id
):
elif match_pot_subaccount_pattern(receipt.receiver_id):
logger.info(
f"new pot deployment: {args_dict}, {action}"
)
Expand Down
12 changes: 9 additions & 3 deletions pots/utils.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,21 @@
import re

BASE_PATTERN = r"v\d+\.potfactory\.potlock\.near$"
from django.conf import settings

BASE_PATTERN = (
r"^potlock\.testnet$"
if settings.ENVIRONMENT == "testnet"
else r"v\d+\.potfactory\.potlock\.near$"
)

def match_pot_factory_version_pattern(receiver):

def match_pot_factory_pattern(receiver):
"""Matches the base pot factory version pattern without a subaccount. NB: does not currently handle testnet factory."""
pattern = f"^{BASE_PATTERN}"
return bool(re.match(pattern, receiver))


def match_pot_subaccount_version_pattern(receiver):
def match_pot_subaccount_pattern(receiver):
"""Matches the pot factory version pattern with a subaccount. NB: does not currently handle testnet factory."""
pattern = f"^[a-zA-Z0-9_]+\.{BASE_PATTERN}"
return bool(re.match(pattern, receiver))
73 changes: 0 additions & 73 deletions scripts/after_install_dev.sh
Original file line number Diff line number Diff line change
Expand Up @@ -65,76 +65,3 @@ else
fi

echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install_dev.sh completed" >> "$LOG_FILE"




# #!/bin/bash
# # Log output to a specific file
# LOG_FILE="/home/ec2-user/django-indexer-dev/logs/deploy.log"

# # print placeholder
# echo -e "\n THIS IS A PLACEHOLDER \n" >> "$LOG_FILE"

# echo -e "\n\n" >> "$LOG_FILE"
# echo "=========================================" >> "$LOG_FILE"
# echo "Running after_install_dev.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE"
# echo "=========================================" >> "$LOG_FILE"

# # Load env vars
# source /home/ec2-user/.bashrc

# # Set correct ownership recursively for project directory
# sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/
# echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE"

# # Set the necessary permissions
# sudo chmod -R 775 /home/ec2-user/django-indexer/
# echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE"

# # Restart nginx to apply any configuration changes
# sudo systemctl restart nginx
# echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE"

# # Define the project directory
# PROJECT_DIR="/home/ec2-user/django-indexer"

# # Navigate to the project directory
# cd "$PROJECT_DIR"

# # Source the specific poetry virtual environment
# source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" # TODO: UPDATE THIS

# # Install dependencies using Poetry
# echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE"
# poetry install >> "$LOG_FILE"
# echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE"

# # Check if there are pending migrations and log the output
# echo "Checking for pending migrations..." >> "$LOG_FILE"
# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout
# echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE"

# # Log the full output of showmigrations
# echo "Checking for pending migrations..." >> "$LOG_FILE"
# poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose

# # Check for unapplied migrations
# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations

# if [ "$PENDING_MIGRATIONS" -gt 0 ]; then
# echo "Migrations found; stopping services..." >> "$LOG_FILE"
# sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat

# echo 'Applying migrations...' >> "$LOG_FILE"
# poetry run python manage.py migrate >> "$LOG_FILE" 2>&1

# echo 'Starting services...' >> "$LOG_FILE"
# sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat
# else
# echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE"
# poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1
# sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat
# fi

# echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install_dev.sh completed" >> "$LOG_FILE"