Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions bases/ecoindex/cli/arguments_handler.py
Comment thread
vvatelot marked this conversation as resolved.
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from tempfile import NamedTemporaryFile
from typing import Set
from urllib.parse import urlparse
from urllib.parse import urlparse, urlunparse

from ecoindex.cli.crawl import EcoindexSpider
from ecoindex.cli.helper import replace_localhost_with_hostdocker
from ecoindex.models import WindowSize

from pydantic import AnyHttpUrl, validate_call
from pydantic.types import FilePath
from scrapy.crawler import CrawlerProcess
Expand Down Expand Up @@ -36,8 +38,10 @@ def get_urls_from_file(urls_file: FilePath) -> Set[str]:

def get_urls_recursive(main_url: str) -> Set[str]:
parsed_url = urlparse(main_url)
domain = parsed_url.netloc
main_url = f"{parsed_url.scheme}://{domain}"
host_infos = replace_localhost_with_hostdocker(parsed_url.netloc)
netloc = host_infos.netloc
domain = host_infos.domain
main_url = f"{parsed_url.scheme}://{netloc}"
process = CrawlerProcess()

with NamedTemporaryFile(mode="w+t") as temp_file:
Expand All @@ -58,6 +62,9 @@ def get_urls_recursive(main_url: str) -> Set[str]:
def get_url_from_args(urls_arg: list[AnyHttpUrl]) -> set[AnyHttpUrl]:
urls_from_args = set()
for url in urls_arg:
parsed_url = urlparse(str(url))
host_infos = replace_localhost_with_hostdocker(parsed_url.netloc)
url = AnyHttpUrl(urlunparse((parsed_url.scheme, host_infos.netloc, parsed_url.path, parsed_url.params, parsed_url.query, parsed_url.fragment)))
urls_from_args.add(url)

return urls_from_args
Expand Down
15 changes: 14 additions & 1 deletion bases/ecoindex/cli/helper.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from asyncio import run
from ecoindex.config import Settings

from ecoindex.models import Result, WindowSize
from ecoindex.models import Result, WindowSize, CliHost
from ecoindex.scraper import EcoindexScraper


Expand Down Expand Up @@ -36,3 +37,15 @@ def run_page_analysis(
),
False,
)


def replace_localhost_with_hostdocker(netloc: str) -> CliHost:
if Settings().DOCKER_CONTAINER and "localhost" in netloc:
domain = "host.docker.internal"
netloc = netloc.replace("localhost", domain)
elif "localhost" in netloc :
domain = "localhost"
else :
domain = netloc

return CliHost(domain=domain, netloc=netloc)
2 changes: 1 addition & 1 deletion components/ecoindex/config/settings.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from pydantic_settings import BaseSettings, SettingsConfigDict


class Settings(BaseSettings):
CORS_ALLOWED_CREDENTIALS: bool = True
CORS_ALLOWED_HEADERS: list = ["*"]
CORS_ALLOWED_METHODS: list = ["*"]
CORS_ALLOWED_ORIGINS: list = ["*"]
DAILY_LIMIT_PER_HOST: int = 0
DATABASE_URL: str = "sqlite+aiosqlite:///db.sqlite3"
DOCKER_CONTAINER: bool = False
DEBUG: bool = False
ENABLE_SCREENSHOT: bool = False
EXCLUDED_HOSTS: list[str] = ["localhost", "127.0.0.1"]
Expand Down
4 changes: 4 additions & 0 deletions components/ecoindex/models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
from ecoindex.models.cli import (
CliHost,
)
from ecoindex.models.compute import (
Ecoindex,
PageMetrics,
Expand All @@ -19,6 +22,7 @@
from ecoindex.models.sort import Sort

__all__ = [
"CliHost",
"Ecoindex",
"example_daily_limit_response",
"example_ecoindex_not_found",
Expand Down
6 changes: 6 additions & 0 deletions components/ecoindex/models/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from pydantic import BaseModel

class CliHost(BaseModel):
domain: str
netloc: str

11 changes: 11 additions & 0 deletions projects/ecoindex_cli/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,27 @@ The output is a CSV or JSON file with the results of the analysis.

The simplest way to start with ecoindex-cli is to install docker and then create an alias in your .bashrc or .zshrc file:

For MacOS and Windows :

```bash
alias ecoindex-cli="docker run -it --rm -v /tmp/ecoindex-cli:/tmp/ecoindex-cli vvatelot/ecoindex-cli:latest ecoindex-cli"
```

For Linux :

```bash
alias ecoindex-cli="docker run -it --rm --add-host=host.docker.internal:host-gateway -v /tmp/ecoindex-cli:/tmp/ecoindex-cli vvatelot/ecoindex-cli:latest ecoindex-cli"
```

Then you can use the cli as if it was installed on your computer:

```bash
ecoindex-cli --help
```

To enable local testing we use the docker alias `host.docker.internal` to access your machine localhost.
Currently, `host.docker.internal` is mapped to your IPv4 address. To facilitate local testing, ensure that your localhost is accessible via your IPv4 address (127.0.0.1).

## Use case

The docker image [vvatelot/ecoindex-cli](https://hub.docker.com/r/vvatelot/ecoindex-cli) is available for `linux/amd64` and `linux/arm64` platforms and provides you an easy way to use this CLI on your environment.
Expand Down
5 changes: 3 additions & 2 deletions projects/ecoindex_cli/dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ RUN poetry export --output=requirements.txt --without-hashes


FROM python:3.12-slim

ARG wheel=ecoindex_cli-2.23.0-py3-none-any.whl

ARG wheel=ecoindex_cli-2.26.0a0-py3-none-any.whl
ENV DOCKER_CONTAINER=True

WORKDIR /code

Expand Down
Loading