Skip to content

Commit

Permalink
Resolved flake8
Browse files Browse the repository at this point in the history
  • Loading branch information
ManiMozaffar committed May 2, 2023
1 parent a395d9b commit b3a1df9
Show file tree
Hide file tree
Showing 23 changed files with 72 additions and 211 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/flake8.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ jobs:
pip install flake8
- name: Run flake8
run: flake8 .
run: flake8 . --exclude=prompt.py,constant.py
5 changes: 4 additions & 1 deletion isolated/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@

from aiohttp import web

logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s - %(levelname)s - %(message)s'
)


def is_safe_expression(node) -> bool:
Expand Down
2 changes: 1 addition & 1 deletion src/core/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


class InterceptHandler(logging.Handler):
def emit(self, record: logging.LogRecord) -> None:
def emit(self, record: logging.LogRecord) -> None:
try:
level = logger.level(record.levelname).name
except ValueError:
Expand Down
8 changes: 6 additions & 2 deletions src/core/settings/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,10 @@ def configure_logging(self) -> None:
logging.getLogger().handlers = [InterceptHandler()]
for logger_name in self.loggers:
logging_logger = logging.getLogger(logger_name)
logging_logger.handlers = [InterceptHandler(level=self.logging_level)]
logging_logger.handlers = [
InterceptHandler(level=self.logging_level)
]

logger.configure(handlers=[{"sink": sys.stderr, "level": self.logging_level}])
logger.configure(handlers=[
{"sink": sys.stderr, "level": self.logging_level}
])
2 changes: 1 addition & 1 deletion src/core/settings/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@ class TestAppSettings(AppSettings):
max_connection_count: int = 5
min_connection_count: int = 5

logging_level: int = logging.DEBUG
logging_level: int = logging.DEBUG
2 changes: 0 additions & 2 deletions src/orm/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,6 @@ async def count(self, db_session: AsyncSession) -> int:
result = await db_session.execute(count_stmt)
return result.scalar()



async def create(
self,
db_session: AsyncSession,
Expand Down
3 changes: 2 additions & 1 deletion src/services/ads/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,12 @@ async def create(
sql_creation_data.pop("keywords", None)
result = await super().create(db_session, sql_creation_data)
if result.id:
ads_id = data.get('ads_id')
message: dict = send_message_to_telegram(
chat_id=get_app_settings().telegram_chat_id,
message_text=text,
button_text="Apply",
button_url=f"https://www.linkedin.com/jobs/view/{data.get('ads_id')}/"
button_url=f"https://www.linkedin.com/jobs/view/{ads_id}/"
)
keywords = data.get("keywords")
if keywords and len(keywords) > 0 and message:
Expand Down
3 changes: 1 addition & 2 deletions src/services/ads/models.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from sqlalchemy import Column, Integer, String, DateTime
from datetime import datetime
from sqlalchemy import Column, Integer, String
from db import Base
from services.common import AbstractModeDateMixin

Expand Down
1 change: 0 additions & 1 deletion src/services/ads/repositories.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ async def create_ads(
return result



@router.get("", response_model=None)
async def get_all_ads(
request: Request,
Expand Down
3 changes: 2 additions & 1 deletion src/services/ads/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ def forward_message(self, message_id, from_chat_id, ads_keywords):
logging.error(
f"Telegram did not forward because: {response.text}"
)
if "bot was blocked by the user" in response.json().get("description", ""):
if "bot was blocked by the user" in response.json(
).get("description", ""):
self.delete_user(user)


Expand Down
3 changes: 2 additions & 1 deletion src/services/ads/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ def send_message_to_telegram(
)
if resp.status_code != 200 or not resp.json()["ok"]:
logging.error(
f"Telegram Message Ddin't sent\nresp={resp.text}\ntext={message_text}"
f"""Telegram Message Ddin't sent\nresp={resp.text}
text={message_text}"""
)
return None
else:
Expand Down
30 changes: 21 additions & 9 deletions src/services/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,10 @@ def init_order_by(self, order_by):
fields = [field.strip("-") for field in order_by.split(",")]
invalid_fields = set(fields) - set(self.model.__table__.columns.keys())
if invalid_fields:
raise ValueError("order_by contains invalid fields: {}".format(", ".join(invalid_fields)))
raise ValueError(
"order_by contains invalid fields: {}".format(
", ".join(invalid_fields)
))
return True

def is_order_by_valid(self, order_by):
Expand All @@ -103,7 +106,9 @@ class CRUD(BaseCRUD, ConstructorMixin):

async def create(self, db_session: AsyncSession, data: dict):
await self.pre_save_check(db_session, data)
instance = await self.model.objects.create(db_session=db_session, **data)
instance = await self.model.objects.create(
db_session=db_session, **data
)
return instance

async def delete(self, db_session: AsyncSession, joins=set(), **kwargs):
Expand All @@ -124,9 +129,12 @@ async def read_all(
):
if order_by and order_by != "?":
if not self.is_order_by_valid(order_by):
text = f"{self.verbose_name} with the specified field for"
text += "order_by is not found, please use one of"
text += f"these fields : {self._order_by_fields}"
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"{self.verbose_name} with the specified field for order_by is not found, please use one of these fields: {self._order_by_fields}"
detail=text
)
else:
order_by = tuple(order_by.split(","))
Expand All @@ -148,7 +156,7 @@ async def read_all(
async def paginated_read_all(
self,
db_session: AsyncSession = Depends(get_db()),
joins=set(), order_by: Optional[str] = None,
joins=set(), order_by: Optional[str] = None,
base_url=None, query_params: dict() = dict(),
**kwargs
):
Expand All @@ -172,7 +180,9 @@ async def paginated_read_all(
)
next_page = f"{base_url}?{next_query_params}"

if (page_num - 1) > 0 and 0 < ((page_num-1) * per_page) < count+per_page:
if (page_num - 1) > 0 and 0 < (
(page_num-1) * per_page
) < count+per_page:
prev_query_params = urlencode(
{**query_params, "page": page_num - 1}
)
Expand All @@ -193,9 +203,10 @@ async def read_single(
db_session=db_session, joins=joins, **kwargs
)
if not result:
text = f"{self.verbose_name} with specified filter is not found"
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"{self.verbose_name} with the specified filters is not found"
detail=text
)
return result

Expand All @@ -207,9 +218,10 @@ async def update(
db_session=db_session, data=data, joins=set(), **kwargs
)
if updated_instance is None:
text = f"{self.verbose_name} with specified filter is not found"
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"{self.verbose_name} with the specified filters is not found"
detail=text
)
return updated_instance

Expand Down Expand Up @@ -290,7 +302,7 @@ def add(
) -> Set:
return self._update_set_with_function(
keyword,
lambda current_value: current_value.update(new_data),
lambda current_val: current_val.update(new_data),
max_retries,
retry_interval,
**kwargs
Expand All @@ -305,7 +317,7 @@ def delete(
) -> Set:
return self._update_set_with_function(
keyword,
lambda current_value: current_value.difference_update(deleted_data),
lambda current_val: current_val.difference_update(deleted_data),
max_retries,
retry_interval
)
Expand Down
4 changes: 3 additions & 1 deletion src/services/proxy/repositories.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@ async def get_Proxys(
query_params: dict = data.dict(exclude_unset=True, exclude_defaults=True)
data: dict = query_params.copy()
query_params.update(paginated_data)
base_url = str(request.url_for(request.scope["endpoint"].__name__)).rstrip("/")
base_url = str(request.url_for(
request.scope["endpoint"].__name__)
).rstrip("/")
return await ProxyCrud(
Proxy, ProxyCreate, ProxyUpdate, ProxyCrud.verbose_name
).paginated_read_all(
Expand Down
1 change: 0 additions & 1 deletion src/services/tel_users/repositories.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ async def get_all_users(
return dict(
users=TelegramRetriever(db).get_all_active_users()
)



@router.get("/filters")
Expand Down
39 changes: 0 additions & 39 deletions src/services/test.py

This file was deleted.

4 changes: 2 additions & 2 deletions tel/client.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from pyrogram import Client
from pyrogram import __version__
from pyrogram.raw.all import layer
import loguru

from env_reader import EnvReader
Expand All @@ -22,7 +21,8 @@ async def start(self):
await super().start()

me = await self.get_me()
loguru.logger.info(f"linkedIn-scraper on Pyrogram v{__version__} (Layer {layer}) started on @{me.username}.")
loguru.logger.info(
f"Pyrogram v{__version__} started on @{me.username}.")

async def stop(self, *args):
await super().stop()
Expand Down
2 changes: 1 addition & 1 deletion tel/constant.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@
Also, don't forget to star my project in github or contribute if you found it cool/useful :)
\n https://github.com/ManiMozaffar/linkedIn-scraper
"""
"""
12 changes: 9 additions & 3 deletions tel/plugins/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@ async def show_keywords(_, message: types.Message):
text = ' \n'.join(text) + (
"\n\n\nFor reference please check this url:"
) + (
"\nhttps://github.com/ManiMozaffar/linkedIn-scraper/blob/main/src/services/tech/loaddata.py"
"\nhttps://github.com/ManiMozaffar/linkedIn-scraper"
) + (
"/blob/main/src/services/tech/loaddata.py"
)
await message.reply_text(
text,
Expand All @@ -32,7 +34,9 @@ async def get_info(client, message: types.Message):
)
if await is_user_a_member_of_channel(client, message):
text = requests.get(
f"http://main_app:8000/api/telegram/user/{int(message.from_user.id)}"
"http://main_app:8000/api/telegram/user/{}".format(
int(message.from_user.id)
)
).json().get("result", [])
text = text[0] if len(text) == 1 else "No Filter Found"
loguru.logger.info(f"Sent message {message.from_user.id}: {text}")
Expand Down Expand Up @@ -64,7 +68,9 @@ async def update_expression(client, message: types.Message):
if await is_user_a_member_of_channel(client, message):
payload = {"expression": str(message.text)}
resp: dict = requests.put(
f"http://main_app:8000/api/telegram/user/{int(message.from_user.id)}",
"http://main_app:8000/api/telegram/user/{}".format(
int(message.from_user.id)
),
json=payload
).json()
if resp.get("success"):
Expand Down
2 changes: 1 addition & 1 deletion worker/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ async def create_ads(
except (json.JSONDecodeError, KeyError, ValueError) as e:
body = first_resp["text"]
loguru.logger.error(
f"[WORKER {worker_id}] Could not retrieve tags from second_resp because {e.__name__} raised"
f"[WORKER {worker_id}] {e.__name__} raised on second resp"
)
loguru.logger.error(
f"\n\n [WORKER {worker_id}] second_resp={second_resp}\n"
Expand Down
4 changes: 3 additions & 1 deletion worker/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@
SPOOF_FINGERPRINT = '''
(() => {
delete navigator.__proto__.webdriver;
Object.defineProperty(navigator, 'deviceMemory', { value: %d, configurable: true });
Object.defineProperty(navigator, 'deviceMemory', {
value: %d, configurable: true
});
const originalHardwareConcurrency = navigator.hardwareConcurrency;
const originalPropertyDescriptor = Object.getOwnPropertyDescriptor(
Expand Down
10 changes: 6 additions & 4 deletions worker/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ async def wrapper(*args, **kwargs):
try:
return await func(*args, **kwargs)
except Exception as e:
print(f"An error occurred in function {func.__name__} with {args} & {kwargs}: {e}")
loguru.logger.error(
f"Error raised at {func.__name__} with {args} & {kwargs}: {e}"
)
traceback.print_exc()
return ""
return wrapper
Expand All @@ -40,8 +42,8 @@ def get_unique_object(func: Callable):
func (callable): A function that returns a list of objects.
Returns:
Callable: A wrapped function that returns a tuple containing a unique object from
the list and the list of used objects.
Callable: A wrapped function that returns a tuple containing a unique
object from the list and the list of used objects.
"""
used_objects = set()

Expand Down Expand Up @@ -78,7 +80,7 @@ async def wrapper(*args: Any, **kwargs: Any) -> Any:
return await asyncio.wait_for(func(*args, **kwargs), timeout)
except asyncio.TimeoutError:
raise TimeoutError(
f"Function '{func.__name__}' exceeded {timeout} seconds timeout."
f"Function '{func.__name__}' exceeded {timeout} seconds."
)
return wrapper
return decorator
8 changes: 6 additions & 2 deletions worker/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,9 @@ async def fill_form(page: Page, xpath: str, text: str, timeout=None):


@decorators.exception_handler
async def safe_get_element_text(page: Page, xpath: str, replace=True, timeout=None):
async def safe_get_element_text(
page: Page, xpath: str, replace=True, timeout=None
):
"""
Safely get the text content of an element using its XPath.
Expand Down Expand Up @@ -183,7 +185,9 @@ def does_ads_exists(ads_id) -> bool:
:param ads_id: Advertisement ID to check for existence.
:return: True if advertisement exists, False otherwise.
"""
return requests.get(f"{constants.HOST}/api/ads/{int(ads_id)}").status_code == 200
return requests.get(
f"{constants.HOST}/api/ads/{int(ads_id)}"
).status_code == 200


def create_proxy_url(proxy_dict: dict) -> ProxySettings:
Expand Down
Loading

0 comments on commit b3a1df9

Please sign in to comment.