Skip to content

Commit

Permalink
Merge e68da4b into e24d429
Browse files Browse the repository at this point in the history
  • Loading branch information
ninoseki committed Nov 3, 2022
2 parents e24d429 + e68da4b commit 78a0021
Show file tree
Hide file tree
Showing 34 changed files with 911 additions and 1,239 deletions.
15 changes: 14 additions & 1 deletion .github/workflows/test.yaml
Expand Up @@ -6,10 +6,21 @@ jobs:
test:
runs-on: ubuntu-latest

services:
redis:
image: "redis/redis-stack:6.2.4-v2"
ports:
- 6379:6379
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
strategy:
matrix:
python-version: [3.9]
poetry-version: [1.1.8]
poetry-version: [1.1.15]

steps:
- uses: actions/checkout@v2
Expand All @@ -27,6 +38,8 @@ jobs:
poetry install
- name: Run tests
env:
REDIS_URL: redis://localhost:6379
run: |
poetry run pytest -v --cov=app --cov-report=term-missing
Expand Down
11 changes: 6 additions & 5 deletions .pre-commit-config.yaml
Expand Up @@ -11,26 +11,27 @@ repos:
]

- repo: https://github.com/asottile/pyupgrade
rev: v2.25.0
rev: v3.2.0
hooks:
- id: pyupgrade
args: [--py37-plus]
args: [--py39-plus]

- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.2
rev: 5.0.4
hooks:
- id: flake8
additional_dependencies: [flake8-print]
args: ["--ignore=E501,W503,E203"]

- repo: https://github.com/timothycrosley/isort
rev: 5.9.3
rev: 5.10.1
hooks:
- id: isort
args: ["--profile", "black", "--filter-files"]
additional_dependencies: [toml]
exclude: ^.*/?setup\.py$

- repo: https://github.com/psf/black
rev: 21.8b0
rev: 22.10.0
hooks:
- id: black
2 changes: 1 addition & 1 deletion app.Dockerfile
Expand Up @@ -6,7 +6,7 @@ RUN apk --no-cache add build-base gcc musl-dev python3-dev libffi-dev openssl-de

COPY pyproject.toml poetry.lock ./

RUN pip install poetry && \
RUN pip install poetry==1.1.15 && \
poetry config virtualenvs.create false && \
poetry install --no-dev

Expand Down
9 changes: 4 additions & 5 deletions app/api/v1/endpoints/domains.py
@@ -1,10 +1,9 @@
import json
from typing import List

import aioredis
from fastapi import APIRouter, Depends
from fastapi_cache.coder import PickleCoder
from fastapi_cache.decorator import cache
from redis import asyncio as aioredis

from app import schemas
from app.core.dependencies import get_redis
Expand All @@ -14,21 +13,21 @@


@cache(coder=PickleCoder, expire=60 * 5)
async def _get_domains(redis: aioredis.Redis) -> List[schemas.Domain]:
async def _get_domains(redis: aioredis.Redis) -> list[schemas.Domain]:
keys = await redis.keys(f"{KEY_PREFIX}*")
if len(keys) == 0:
return []

values = await redis.mget(*keys)

dicts: List[dict] = [json.loads(value) for value in values]
dicts: list[dict] = [json.loads(value) for value in values]
return [schemas.Domain.parse_obj(d) for d in dicts]


@router.get(
"/",
summary="Get the latest suspicious domains",
response_model=List[schemas.Domain],
response_model=list[schemas.Domain],
)
async def get_domains(redis: aioredis.Redis = Depends(get_redis)):
return await _get_domains(redis)
4 changes: 2 additions & 2 deletions app/arq/worker.py
@@ -1,4 +1,4 @@
from typing import List, Optional
from typing import Optional

from arq import cron
from arq.connections import RedisSettings
Expand All @@ -20,7 +20,7 @@ async def shutdown(ctx: dict) -> None:

async def save_newly_suspicious_domains_from_security_trails(
_: dict,
) -> List[dataclasses.DomainWithVerdiction]:
) -> list[dataclasses.DomainWithVerdiction]:
suspicious_domains = await SuspiciousDomainsFactory.from_security_trails()

async with get_redis_with_context() as redis:
Expand Down
6 changes: 3 additions & 3 deletions app/cache/backend.py
@@ -1,7 +1,7 @@
import time
from asyncio import Lock
from dataclasses import dataclass
from typing import Dict, Optional, Tuple
from typing import Optional

from fastapi_cache.backends import Backend

Expand All @@ -13,7 +13,7 @@ class Value:


class InMemoryBackend(Backend):
_store: Dict[str, Value] = {}
_store: dict[str, Value] = {}
_lock = Lock()

@property
Expand All @@ -30,7 +30,7 @@ def _get(self, key: str) -> Optional[Value]:

return None

async def get_with_ttl(self, key: str) -> Tuple[int, Optional[str]]:
async def get_with_ttl(self, key: str) -> tuple[int, Optional[str]]:
async with self._lock:
v = self._get(key)
if v:
Expand Down
10 changes: 5 additions & 5 deletions app/core/dependencies.py
@@ -1,8 +1,9 @@
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from typing import AsyncGenerator, Optional
from typing import Optional

import aioredis
import arq
from redis import asyncio as aioredis

from app.core import settings

Expand All @@ -20,12 +21,11 @@ async def get_redis_with_context() -> AsyncGenerator[aioredis.Redis, None]:
redis: Optional[aioredis.Redis] = None

try:
redis = await aioredis.create_redis_pool(str(settings.REDIS_URL))
redis = await aioredis.from_url(str(settings.REDIS_URL))
yield redis
finally:
if redis is not None:
redis.close()
await redis.wait_closed()
await redis.close()


async def get_redis():
Expand Down
8 changes: 5 additions & 3 deletions app/core/events.py
@@ -1,10 +1,11 @@
from typing import Any, Callable, Coroutine, Union
from collections.abc import Coroutine
from typing import Any, Callable, Union

import aioredis
from fastapi import FastAPI
from fastapi_cache import FastAPICache
from fastapi_cache.backends.redis import RedisBackend
from loguru import logger
from redis import asyncio as aioredis

from app.cache.backend import InMemoryBackend
from app.core import settings
Expand All @@ -16,9 +17,10 @@ def create_start_app_handler(
async def start_app() -> None:
# initialize FastAPI cache
backend: Union[InMemoryBackend, RedisBackend] = InMemoryBackend()

if settings.REDIS_URL != "" and settings.TESTING is False:
try:
redis = await aioredis.create_redis_pool(str(settings.REDIS_URL))
redis = aioredis.from_url(str(settings.REDIS_URL))
backend = RedisBackend(redis)
except (ConnectionRefusedError, OSError) as e:
logger.error("Failed to connect to Redis")
Expand Down
6 changes: 3 additions & 3 deletions app/dataclasses/certstream.py
@@ -1,12 +1,12 @@
from dataclasses import dataclass
from typing import Any, Dict, List, Optional
from typing import Any, Optional

from dataclasses_json import DataClassJsonMixin


@dataclass
class LeafCERT:
all_domains: List[str]
all_domains: list[str]


@dataclass
Expand All @@ -32,7 +32,7 @@ class CertStreamUpdateMessage(DataClassJsonMixin):

@classmethod
def from_message(
cls, message: Dict[str, Any]
cls, message: dict[str, Any]
) -> Optional["CertStreamUpdateMessage"]:
message_type = message.get("message_type")
if message_type == "certificate_update":
Expand Down
4 changes: 2 additions & 2 deletions app/dataclasses/domain.py
@@ -1,7 +1,7 @@
import functools
import re
from dataclasses import dataclass
from typing import List, cast
from typing import cast

import tld

Expand Down Expand Up @@ -36,5 +36,5 @@ def without_tld(self) -> str:
return f"{self._parsed.subdomain}.{self._parsed.domain}"

@functools.cached_property
def inner_words(self) -> List[str]:
def inner_words(self) -> list[str]:
return re.split(r"\W+", self.without_tld)
4 changes: 2 additions & 2 deletions app/dataclasses/rule.py
@@ -1,9 +1,9 @@
from dataclasses import dataclass
from typing import List, Optional
from typing import Optional


@dataclass
class Rule:
name: str
score: int
notes: Optional[List[str]] = None
notes: Optional[list[str]] = None
3 changes: 1 addition & 2 deletions app/dataclasses/verdiction.py
@@ -1,6 +1,5 @@
import functools
from dataclasses import asdict, dataclass
from typing import List

from app.rules import match_rules

Expand All @@ -14,7 +13,7 @@ class DomainWithVerdiction(Domain):
updated_on: str

@functools.cached_property
def matched_rules(self) -> List[Rule]:
def matched_rules(self) -> list[Rule]:
return match_rules(self)

@functools.cached_property
Expand Down
10 changes: 5 additions & 5 deletions app/factories/suspicious_domains.py
@@ -1,4 +1,4 @@
from typing import List, Optional
from typing import Optional

from app import dataclasses
from app.services.securitytrails import SecurityTrails
Expand All @@ -8,12 +8,12 @@
class SuspiciousDomainsFactory:
@classmethod
def from_list(
cls, domains: List[str], source: str, *, updated_on: Optional[str] = None
) -> List[dataclasses.DomainWithVerdiction]:
cls, domains: list[str], source: str, *, updated_on: Optional[str] = None
) -> list[dataclasses.DomainWithVerdiction]:
if updated_on is None:
updated_on = get_today_in_isoformat()

suspicious_domains: List[dataclasses.DomainWithVerdiction] = []
suspicious_domains: list[dataclasses.DomainWithVerdiction] = []
for new_domain in domains:
domain = dataclasses.DomainWithVerdiction(
fqdn=new_domain, source=source, updated_on=updated_on
Expand All @@ -24,7 +24,7 @@ def from_list(
return suspicious_domains

@classmethod
async def from_security_trails(cls) -> List[dataclasses.DomainWithVerdiction]:
async def from_security_trails(cls) -> list[dataclasses.DomainWithVerdiction]:
st = SecurityTrails()
date = get_today_in_isoformat()
new_domains = await st.download_new_domain_feed(date=date)
Expand Down
9 changes: 5 additions & 4 deletions app/main.py
Expand Up @@ -9,7 +9,7 @@
from app.views import view_router


def create_app() -> FastAPI:
def create_app(add_event_handlers: bool = True) -> FastAPI:
logger.add(
settings.LOG_FILE, level=settings.LOG_LEVEL, backtrace=settings.LOG_BACKTRACE
)
Expand All @@ -21,9 +21,10 @@ def create_app() -> FastAPI:
# add middleware
app.add_middleware(GZipMiddleware, minimum_size=1000)

# add event handlers
app.add_event_handler("startup", create_start_app_handler(app))
app.add_event_handler("shutdown", create_stop_app_handler(app))
if add_event_handlers:
# add event handlers
app.add_event_handler("startup", create_start_app_handler(app))
app.add_event_handler("shutdown", create_stop_app_handler(app))

# add routes
app.include_router(api_router, prefix="/api/v1")
Expand Down
5 changes: 2 additions & 3 deletions app/redis/__init__.py
@@ -1,7 +1,6 @@
import json
from typing import List

import aioredis
from redis import asyncio as aioredis

from app import dataclasses

Expand All @@ -12,7 +11,7 @@ class Redis:
@classmethod
async def save_suspicious_domains(
cls,
suspicious_domains: List[dataclasses.DomainWithVerdiction],
suspicious_domains: list[dataclasses.DomainWithVerdiction],
*,
redis: aioredis.Redis
):
Expand Down
3 changes: 1 addition & 2 deletions app/redis/utils.py
@@ -1,5 +1,4 @@
import json
from typing import List

import aioredis

Expand All @@ -9,7 +8,7 @@


async def save_suspicious_domains(
suspicious_domains: List[dataclasses.DomainWithVerdiction], *, redis: aioredis.Redis
suspicious_domains: list[dataclasses.DomainWithVerdiction], *, redis: aioredis.Redis
):
tr = redis.multi_exec()

Expand Down
8 changes: 4 additions & 4 deletions app/rules/__init__.py
@@ -1,4 +1,4 @@
from typing import List, Optional, Type
from typing import Optional

from app import dataclasses

Expand All @@ -10,7 +10,7 @@
from .levenshtein import LevenshteinDistance
from .tld import TLD

RULES: List[Type[AbstractRule]] = [
RULES: list[type[AbstractRule]] = [
Dash(),
Dot(),
Keyword(),
Expand All @@ -27,12 +27,12 @@ def has_high_reputation(domain: dataclasses.Domain) -> bool:
return False


def match_rules(domain: dataclasses.Domain) -> List[dataclasses.Rule]:
def match_rules(domain: dataclasses.Domain) -> list[dataclasses.Rule]:
# skip matching if it has a high reputation
if has_high_reputation(domain):
return []

matched_rules: List[Optional[dataclasses.Rule]] = []
matched_rules: list[Optional[dataclasses.Rule]] = []
for rule in RULES:
matched_rules.append(rule.match(domain))

Expand Down

0 comments on commit 78a0021

Please sign in to comment.