Skip to content

Commit

Permalink
chore(build): migrate to python3.9
Browse files Browse the repository at this point in the history
Migrate the project to use python3.9 and above.
  • Loading branch information
kennedykori committed Oct 14, 2022
1 parent 819f2e3 commit 2a46156
Show file tree
Hide file tree
Showing 46 changed files with 212 additions and 176 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:

strategy:
matrix:
python-version: ["3.8", "3.9", "3.10"]
python-version: ["3.9", "3.10"]
steps:
- uses: actions/checkout@v3
- name: Set up project using python ${{ matrix.python-version }}
Expand Down
16 changes: 12 additions & 4 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,22 @@ jobs:

- name: Install requirements
run: |
sudo add-apt-repository ppa:deadsnakes/ppa
sudo apt-get update
sudo apt-get install python3.10 nodejs
pip install -r requirements/dev.txt
sudo apt-get install python3.10 python3.10-dev python3.10-distutils nodejs
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.8 1
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.10 2
sudo update-alternatives --set python /usr/bin/python3.10
sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 1
sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.10 2
sudo update-alternatives --set python3 /usr/bin/python3.10
curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
python3.10 -m pip install -r requirements/dev.txt
npm install @semantic-release/changelog @semantic-release/git conventional-changelog-conventionalcommits
- name: Package the app
run: |
pyinstaller app/__main__.py --collect-all app --name idr_client_temp -F
pyinstaller idr_client.spec
staticx dist/idr_client_temp dist/idr_client
./dist/idr_client --version
Expand All @@ -40,7 +48,7 @@ jobs:
git_commit_gpgsign: true
git_committer_email: ${{ secrets.GIT_COMMITTER_EMAIL }}
git_committer_name: ${{ secrets.GIT_COMMITTER_NAME }}
git_tag_gpgsign: false
git_tag_gpgsign: true
git_user_signingkey: true
gpg_private_key: ${{ secrets.GPG_KEY }}
passphrase: ${{ secrets.GPG_KEY_PASSPHRASE }}
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -309,5 +309,6 @@ config.yaml
local.sh
logs/*
!logs/.gitkeep
!idr_client.spec
secrets/*
cloud_sql_proxy
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ repos:
rev: v2.37.3
hooks:
- id: pyupgrade
args: [--py37-plus]
args: [--py38-plus]

- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.2
Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ IDR(Integrated Data Repository) Client is a tool that extracts data from a
source(most likely a database), performs any transformations that may be required
on the data and then transmits it to a remote
[server](https://github.com/savannahghi/idr-server) for further processing and
consumption. The tool is authored in Python(3.8+) but working executable binaries
consumption. The tool is authored in Python(3.9+) but working executable binaries
for Linux can be found on the [release section](https://github.com/savannahghi/idr-client/releases).

[![Coverage Status](https://coveralls.io/repos/github/savannahghi/idr-client/badge.svg?branch=develop)](https://coveralls.io/github/savannahghi/idr-client?branch=develop)
Expand Down Expand Up @@ -46,7 +46,7 @@ achieved using the following steps:
You are now good to go :thumbsup:.

#### 2. Cloning the Repo.
For this method, you will need have [Python 3.8.0](https://www.python.org/downloads/release/python-380/)
For this method, you will need have [Python 3.9.0](https://www.python.org/downloads/release/python-390/)
(3.10 is recommended) or above installed. You could optionally create a
[virtualenv](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment)
for the project separate from the system Python. Next, perform the following
Expand Down Expand Up @@ -81,7 +81,7 @@ pip install -r requirements/dev.txt

And then create the binary using the following command:-
```bash
pyinstaller app/__main__.py --hidden-import apps/imp --collect-all app --name idr_client_temp -F
pyinstaller app/__main__.py idr_client.spec
```
This will create an executable but the executable will still depend on the
target system/computer having the correct system libraries. More details on this
Expand Down
29 changes: 10 additions & 19 deletions app/__init__.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,6 @@
from collections.abc import Mapping, Sequence
from logging.config import dictConfig
from typing import (
Any,
Dict,
Final,
List,
Mapping,
Optional,
Sequence,
Type,
cast,
)
from typing import Any, Final, Optional, cast

import yaml
from yaml import Loader
Expand Down Expand Up @@ -39,7 +30,7 @@
str
] = "SUPPORTED_DATA_SOURCE_TYPES" # noqa

_DEFAULT_CONFIG: Final[Dict[str, Any]] = {
_DEFAULT_CONFIG: Final[dict[str, Any]] = {
_LOGGING_CONFIG_KEY: {
"version": 1,
"disable_existing_loggers": False,
Expand Down Expand Up @@ -101,10 +92,10 @@ def _load_config_file(
def _load_settings_initializers(
initializers_dotted_paths: Sequence[str],
) -> Sequence[SettingInitializer]:
initializers: List[SettingInitializer] = list()
initializers: list[SettingInitializer] = list()
for _initializer_dotted_path in initializers_dotted_paths:
try:
initializer_klass: Type[SettingInitializer]
initializer_klass: type[SettingInitializer]
initializer_klass = import_string_as_klass(
_initializer_dotted_path, SettingInitializer
)
Expand Down Expand Up @@ -172,7 +163,7 @@ def setting(self) -> str:
return _LOGGING_CONFIG_KEY

def execute(self, an_input: Optional[Mapping[str, Any]]) -> Any:
logging_config: Dict[str, Any] = dict(
logging_config: dict[str, Any] = dict(
an_input or _DEFAULT_CONFIG[self.setting]
)
dictConfig(logging_config)
Expand Down Expand Up @@ -207,9 +198,9 @@ def execute(self, an_input: Optional[Sequence[str]]) -> Any:
@staticmethod
def _dotted_path_to_data_source_type_klass(
dotted_path: str,
) -> Type[DataSourceType]:
) -> type[DataSourceType]:
try:
data_source_type_klass: Type[DataSourceType]
data_source_type_klass: type[DataSourceType]
data_source_type_klass = import_string_as_klass(
dotted_path, DataSourceType
)
Expand Down Expand Up @@ -252,13 +243,13 @@ def setup(
registry = AppRegistry() # type: ignore

# Load the application settings
_settings_dict: Dict[str, Any] = dict(initial_settings or _DEFAULT_CONFIG)
_settings_dict: dict[str, Any] = dict(initial_settings or _DEFAULT_CONFIG)
# Load config from a file when provided
if config_file_path: # pragma: no branch
_settings_dict.update(_load_config_file(config_file_path))

# Load initializers
_initializers: List[Any] = list(settings_initializers or [])
_initializers: list[Any] = list(settings_initializers or [])
_initializers.extend(
_load_settings_initializers(
_settings_dict.get(_SETTINGS_INITIALIZERS_CONFIG_KEY, tuple())
Expand Down
3 changes: 2 additions & 1 deletion app/__main__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from argparse import ArgumentParser
from typing import Optional, Sequence
from collections.abc import Sequence
from typing import Optional

import app
from app.__version__ import __title__, __version__
Expand Down
15 changes: 8 additions & 7 deletions app/core/domain.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from abc import ABCMeta, abstractmethod
from collections.abc import Mapping, Sequence
from functools import lru_cache
from typing import Any, Generic, Mapping, Optional, Sequence, Type, TypeVar
from typing import Any, Generic, Optional, TypeVar

from typing_inspect import is_optional_type

Expand All @@ -23,7 +24,7 @@


@lru_cache(maxsize=None)
def _get_available_annotations(do_klass: Type[_ADO]) -> Mapping[str, Any]:
def _get_available_annotations(do_klass: type[_ADO]) -> Mapping[str, Any]:
"""Extract all annotations available on a domain object class.
This includes all annotations defined on the class's ancestors.
Expand All @@ -45,7 +46,7 @@ def _get_available_annotations(do_klass: Type[_ADO]) -> Mapping[str, Any]:


@lru_cache(maxsize=None)
def _get_required_fields_names(do_klass: Type[_ADO]) -> Sequence[str]:
def _get_required_fields_names(do_klass: type[_ADO]) -> Sequence[str]:
"""Determine and return the required fields of a domain object class.
A required field in the context of this method is defined as one whose
Expand Down Expand Up @@ -444,7 +445,7 @@ def __str__(self) -> str:

@classmethod
@abstractmethod
def imp_data_source_klass(cls) -> Type[DataSource]:
def imp_data_source_klass(cls) -> type[DataSource]:
"""
Return the :class:`DataSource` concrete implementation class for this
data source type.
Expand All @@ -455,7 +456,7 @@ def imp_data_source_klass(cls) -> Type[DataSource]:

@classmethod
@abstractmethod
def imp_extract_metadata_klass(cls) -> Type[ExtractMetadata]:
def imp_extract_metadata_klass(cls) -> type[ExtractMetadata]:
"""
Return the :class:`ExtractMetadata` concrete implementation class for
this dats source type.
Expand All @@ -467,7 +468,7 @@ def imp_extract_metadata_klass(cls) -> Type[ExtractMetadata]:

@classmethod
@abstractmethod
def imp_upload_chunk_klass(cls) -> Type[UploadChunk]:
def imp_upload_chunk_klass(cls) -> type[UploadChunk]:
"""
Return the :class:`UploadChunk` concrete implementation class for this
data source type.
Expand All @@ -478,7 +479,7 @@ def imp_upload_chunk_klass(cls) -> Type[UploadChunk]:

@classmethod
@abstractmethod
def imp_upload_metadata_klass(cls) -> Type[UploadMetadata]:
def imp_upload_metadata_klass(cls) -> type[UploadMetadata]:
"""
Return the :class:`UploadMetadata` concrete implementation class for
this data source type.
Expand Down
13 changes: 3 additions & 10 deletions app/core/mixins.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,7 @@
from abc import ABCMeta, abstractmethod
from collections.abc import Mapping
from types import TracebackType
from typing import (
Any,
ContextManager,
Generic,
Mapping,
Optional,
Type,
TypeVar,
)
from typing import Any, ContextManager, Generic, Optional, TypeVar

from .task import Task

Expand All @@ -30,7 +23,7 @@ class Disposable(ContextManager, metaclass=ABCMeta):

def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_type: Optional[type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
Expand Down
3 changes: 2 additions & 1 deletion app/core/transport.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from abc import ABCMeta, abstractmethod
from typing import Any, Mapping, Optional, Sequence
from collections.abc import Mapping, Sequence
from typing import Any, Optional

from .domain import (
DataSource,
Expand Down
15 changes: 8 additions & 7 deletions app/imp/sql_data/domain.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import io
from collections.abc import Mapping, Sequence
from enum import Enum
from logging import getLogger
from typing import Any, Dict, Final, Mapping, Optional, Sequence, Type
from typing import Any, Final, Optional

import pandas as pd
import pyarrow as pa
Expand Down Expand Up @@ -89,7 +90,7 @@ def __init__(self, **kwargs):
data_source_type: SQLDataSourceType = kwargs.pop("data_source_type")
super().__init__(**kwargs)
self._data_source_type: SQLDataSourceType = data_source_type
self._extract_metadata: Dict[str, "SQLExtractMetadata"] = dict()
self._extract_metadata: dict[str, "SQLExtractMetadata"] = dict()
self._engine: Optional[Engine] = None

def __enter__(self) -> "SQLDataSource":
Expand Down Expand Up @@ -228,7 +229,7 @@ def __init__(self, **kwargs):
"description", "Represents SQL databases as a source type."
)
super().__init__(**kwargs)
self._data_sources: Dict[str, SQLDataSource] = dict()
self._data_sources: dict[str, SQLDataSource] = dict()

@property
def code(self) -> str:
Expand All @@ -243,19 +244,19 @@ def data_sources(self, data_sources: Mapping[str, SQLDataSource]) -> None:
self._data_sources = dict(**data_sources)

@classmethod
def imp_data_source_klass(cls) -> Type[DataSource]:
def imp_data_source_klass(cls) -> type[DataSource]:
return SQLDataSource

@classmethod
def imp_extract_metadata_klass(cls) -> Type[ExtractMetadata]:
def imp_extract_metadata_klass(cls) -> type[ExtractMetadata]:
return SQLExtractMetadata

@classmethod
def imp_upload_chunk_klass(cls) -> Type[UploadChunk]:
def imp_upload_chunk_klass(cls) -> type[UploadChunk]:
return SQLUploadChunk

@classmethod
def imp_upload_metadata_klass(cls) -> Type[UploadMetadata]:
def imp_upload_metadata_klass(cls) -> type[UploadMetadata]:
return SQLUploadMetadata


Expand Down
5 changes: 3 additions & 2 deletions app/lib/app_registry.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from typing import Callable, Dict, Mapping, Optional
from collections.abc import Mapping
from typing import Callable, Optional

from app.core import DataSourceType, Transport

Expand All @@ -24,7 +25,7 @@ class AppRegistry:
"""

def __init__(self):
self._data_source_types: Dict[str, DataSourceType] = dict()
self._data_source_types: dict[str, DataSourceType] = dict()
self._default_transport_factory: Optional[
DefaultTransportFactory
] = None
Expand Down
3 changes: 2 additions & 1 deletion app/lib/checkers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from abc import abstractmethod
from typing import Optional, Protocol, Sized, SupportsFloat, TypeVar
from collections.abc import Sized
from typing import Optional, Protocol, SupportsFloat, TypeVar

# =============================================================================
# TYPES
Expand Down
7 changes: 4 additions & 3 deletions app/lib/config/config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
from typing import Any, Dict, List, Mapping, Optional, Sequence
from collections.abc import Mapping, Sequence
from typing import Any, Optional

from .exceptions import MissingSettingError
from .setting_initializer import SettingInitializer
Expand Down Expand Up @@ -55,7 +56,7 @@ def __init__(
:param settings_initializers: Optional initializers to perform post
initialization tasks.
"""
self._settings: Dict[str, Any] = dict(settings or {})
self._settings: dict[str, Any] = dict(settings or {})
self._initializers: Mapping[
str, Sequence[SettingInitializer]
] = self._group_related_initializers(settings_initializers or tuple())
Expand Down Expand Up @@ -111,7 +112,7 @@ def _run_initializers(self) -> None:
def _group_related_initializers(
initializers: Sequence[SettingInitializer],
) -> Mapping[str, Sequence[SettingInitializer]]:
grouped_initializers: Dict[str, List[SettingInitializer]] = dict()
grouped_initializers: dict[str, list[SettingInitializer]] = dict()
for _initializer in initializers:
grouped_initializers.setdefault(_initializer.setting, []).append(
_initializer
Expand Down
8 changes: 4 additions & 4 deletions app/lib/module_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import sys
from importlib import import_module
from types import ModuleType
from typing import Type, TypeVar, cast
from typing import TypeVar, cast

# =============================================================================
# TYPES
Expand Down Expand Up @@ -62,8 +62,8 @@ def import_string(dotted_path: str) -> ModuleType:


def import_string_as_klass(
dotted_path: str, target_klass: Type[_T]
) -> Type[_T]:
dotted_path: str, target_klass: type[_T]
) -> type[_T]:
"""
Import a dotted module as the given class type. Raise ``ImportError`` if
the import failed and a ``TypeError`` if the imported module is not of the
Expand All @@ -88,4 +88,4 @@ def import_string_as_klass(
% (dotted_path, target_klass.__qualname__)
)

return cast(Type[target_klass], _module)
return cast(type[target_klass], _module)

0 comments on commit 2a46156

Please sign in to comment.