Skip to content

Commit

Permalink
Merge 94b0a1c into 1d5da17
Browse files Browse the repository at this point in the history
  • Loading branch information
agarrido19 committed Jan 21, 2020
2 parents 1d5da17 + 94b0a1c commit 17c40dd
Show file tree
Hide file tree
Showing 9 changed files with 2,005 additions and 402 deletions.
4 changes: 0 additions & 4 deletions dhlmex/resources/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
from bs4 import BeautifulSoup
from requests import Response

from dhlmex.exceptions import DhlmexException


class Resource:
_client: ClassVar["dhlmex.Client"] # type: ignore
Expand Down Expand Up @@ -37,8 +35,6 @@ class Resource:

@staticmethod
def get_data(resp: Response, action: Dict) -> Dict:
if 'Login / Admin' in resp.text:
raise DhlmexException('Client not logged in')
soup = BeautifulSoup(resp.text, features='html.parser')
view_state = soup.find('input', id='javax.faces.ViewState').attrs[
'value'
Expand Down
2 changes: 1 addition & 1 deletion dhlmex/resources/destination.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
class Destination:
company: str
contact: str
mail: str
email: str
phone: str
address1: str
postal_code: str
Expand Down
41 changes: 36 additions & 5 deletions dhlmex/resources/guides.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def _fill_guide_table(
)
fill_data['datos:j_id15'] = origin.company
fill_data['datos:j_id19'] = origin.contact
fill_data['datos:emailOrigen'] = origin.mail
fill_data['datos:emailOrigen'] = origin.email
fill_data['datos:j_id24'] = origin.phone
fill_data['datos:j_id28'] = origin.address1
fill_data['datos:j_id36'] = origin.postal_code
Expand All @@ -120,7 +120,7 @@ def _fill_guide_table(
fill_data['datos:j_id49'] = origin.state
fill_data['datos:j_id54'] = destination.company
fill_data['datos:j_id58'] = destination.contact
fill_data['datos:emailDestino'] = destination.mail
fill_data['datos:emailDestino'] = destination.email
fill_data['datos:j_id63'] = destination.phone
fill_data['datos:j_id67'] = destination.address1
fill_data['datos:j_id75'] = destination.postal_code
Expand All @@ -145,7 +145,7 @@ def _confirm_capture(self, view_state: str) -> Response:
}
return self._client.post(self._urls['capture'], confirm_data)

def _force_percent(self, view_state: str, retries: int = 10) -> str:
def _force_percent(self, view_state: str, retries: int = 20) -> str:
force_data = {
'AJAXREQUEST': '_viewRoot',
'j_id115': 'j_id115',
Expand All @@ -154,22 +154,53 @@ def _force_percent(self, view_state: str, retries: int = 10) -> str:
'forcePercent': 'complete',
'ajaxSingle': 'j_id115:pb_sub',
}
guide_number = ''
while retries:
resp = self._client.post(self._urls['capture'], force_data)
if 'Procesada correctamente' in resp.text:
soup = BeautifulSoup(resp.text, features='html.parser')
return soup.find(
guide_number = soup.find(
'td', id='j_id115:tblElementos:0:j_id123'
).text
break
else:
sleep(1)
retries -= 1
raise DhlmexException('Error while processing guide')
if retries == 0:
raise DhlmexException('Error while capturing guide data')
return guide_number

def _move_page(self, view_state: str, page: str) -> Response:
final_data = {
'AJAXREQUEST': '_viewRoot',
'j_id6': 'j_id6',
'javax.faces.ViewState': view_state,
'ajaxSingle': 'j_id6:j_id37',
'j_id6:j_id37': page,
'AJAX:EVENTS_COUNT': '1',
}
return self._client.post(self._urls['print'], final_data)

def _download_pdf(self, guide_number: str) -> str:
resp = self._client.post(self._urls['home'], {})
data = self.get_data(resp, self._actions['download'])
resp = self._client.post(self._urls['home'], data)
if guide_number not in resp.text: # search on last page
soup = BeautifulSoup(resp.text, features='html.parser')
view_state = soup.find('input', id='javax.faces.ViewState').attrs[
'value'
]
pages = len(
soup.find("div", {"class": "rich-datascr"}).find_all('td')
)
resp = self._move_page(view_state, 'last')
for _ in range(pages):
if guide_number not in resp.text: # search previous pages
resp = self._move_page(view_state, 'previous')
else:
break
if guide_number not in resp.text:
raise DhlmexException(f'Guide {guide_number} not found')
soup = BeautifulSoup(resp.text, features='html.parser')
view_state = soup.find('input', id='javax.faces.ViewState').attrs[
'value'
Expand Down
2 changes: 1 addition & 1 deletion dhlmex/resources/origin.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
class Origin:
company: str
contact: str
mail: str
email: str
phone: str
address1: str
postal_code: str
Expand Down
2 changes: 1 addition & 1 deletion dhlmex/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.0.2' # pragma: no cover
__version__ = '0.0.3' # pragma: no cover
35 changes: 25 additions & 10 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def origin() -> Origin:
return Origin(
company='CUENCA LABS',
contact='GINO LAPI',
mail='gino@cuenca.com',
email='gino@cuenca.com',
phone='5544364200',
address1='VARSOVIA 36',
postal_code='06600',
Expand All @@ -58,15 +58,30 @@ def origin() -> Origin:
@pytest.fixture
def destination() -> Destination:
return Destination(
company='IVANNA DÍAZ ESTRADA',
contact='IVANNA DÍAZ ESTRADA',
mail='ivanna.diaz.estrada@gmail.com',
phone='5544364200',
address1='CALLE 39 231',
postal_code='97320',
neighborhood='VICENTE GUERRERO',
city='PROGRESO',
state='YUC',
company='JOSE DE JESUS ALVARADO YERENA',
contact='JOSE DE JESUS ALVARADO YERENA',
email='yerena13_24@hotmail.com',
phone='3223444645',
address1='CALLE PEONIAS 40A',
postal_code='63737',
neighborhood='JARDINES DEL SOL',
city='FRACCIONAMIENTO SANTA FE',
state='NAY',
)


@pytest.fixture
def invalid_destination() -> Destination:
return Destination(
company='ALEJANDRO VIZQUEZ',
contact='ALEJANDRO VIZQUEZ',
email='alex_visquets@hotmail.com',
phone='5560934315',
address1='TOMAS ALVA EDISON 169',
postal_code='00000',
neighborhood='SAN RAFAEL',
city='CUAUHTEMOC',
state='CDMX',
)


Expand Down

0 comments on commit 17c40dd

Please sign in to comment.