Skip to content

Commit

Permalink
Merge pull request #231 from PnX-SI/develop
Browse files Browse the repository at this point in the history
Develop > Master / Release 1.7.1
  • Loading branch information
camillemonchicourt committed Jul 2, 2020
2 parents cd5f59d + f40811e commit f39f109
Show file tree
Hide file tree
Showing 7 changed files with 68 additions and 22 deletions.
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.7.0
1.7.1
10 changes: 7 additions & 3 deletions apptax/taxonomie/routesbiblistes.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from . import db
from ..log import logmanager
from ..utils.utilssqlalchemy import json_resp, csv_resp
from ..utils.genericfunctions import calculate_offset_page
from .models import BibListes, CorNomListe, Taxref, BibNoms


Expand Down Expand Up @@ -153,8 +154,10 @@ def insertUpdate_biblistes(id_liste=None, id_role=None):
def getNoms_bibtaxons(idliste):
# Traitement des parametres
parameters = request.args
limit = int(parameters.get("limit")) if parameters.get("limit") else 100
page = int(parameters.get("page")) - 1 if parameters.get("page") else 0
limit = parameters.get("limit", 100, int)
page = parameters.get("page", 0, int)
offset = parameters.get("offset", 0, int)
(limit, offset, page) = calculate_offset_page(limit, offset, page)

# Récupération du groupe de la liste
(regne, group2_inpn) = (
Expand Down Expand Up @@ -230,7 +233,8 @@ def getNoms_bibtaxons(idliste):
q = q.order_by(orderCol)

nbResults = q.count()
data = q.limit(limit).offset(page * limit).all()
data = q.limit(limit).offset(offset).all()

results = []
for row in data:
data_as_dict = {}
Expand Down
10 changes: 7 additions & 3 deletions apptax/taxonomie/routesbibnoms.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from sqlalchemy import func

from ..utils.utilssqlalchemy import json_resp
from ..utils.genericfunctions import calculate_offset_page
from ..log import logmanager
from .models import (
BibNoms,
Expand Down Expand Up @@ -37,9 +38,12 @@ def get_bibtaxons():
)

nbResultsWithoutFilter = q.count()

# Traitement des parametres
limit = int(parameters.get("limit")) if parameters.get("limit") else 100
page = int(parameters.get("page")) - 1 if parameters.get("page") else 0
limit = parameters.get("limit", 20, int)
page = parameters.get("page", 0, int)
offset = parameters.get("offset", 0, int)
(limit, offset, page) = calculate_offset_page(limit, offset, page)

# Order by
if "orderby" in parameters:
Expand Down Expand Up @@ -81,7 +85,7 @@ def get_bibtaxons():
)

nbResults = q.count()
data = q.limit(limit).offset(page * limit).all()
data = q.limit(limit).offset(offset).all()
results = []
for row in data:
data_as_dict = row.BibNoms.as_dict()
Expand Down
36 changes: 23 additions & 13 deletions apptax/taxonomie/routestaxref.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from flask import jsonify, Blueprint, request
from sqlalchemy import distinct, desc, func
from sqlalchemy import distinct, desc, func, and_
from sqlalchemy.orm.exc import NoResultFound


from ..utils.utilssqlalchemy import json_resp, serializeQuery, serializeQueryOneResult
from ..utils.genericfunctions import calculate_offset_page
from .models import (
Taxref,
BibNoms,
Expand Down Expand Up @@ -175,7 +176,7 @@ def getDistinctField(field):
taxrefColumns = Taxref.__table__.columns
q = db.session.query(taxrefColumns[field]).distinct(taxrefColumns[field])

limit = request.args.get("limit") if request.args.get("limit") else 100
limit = request.args.get("limit", 100, int)

for param in request.args:
if param in taxrefColumns:
Expand Down Expand Up @@ -217,8 +218,10 @@ def genericTaxrefList(inBibtaxon, parameters):
q = q.outerjoin(BibNoms, BibNoms.cd_nom == Taxref.cd_nom)

# Traitement des parametres
limit = int(parameters.get("limit")) if parameters.get("limit") else 100
page = int(parameters.get("page")) - 1 if parameters.get("page") else 0
limit = parameters.get("limit", 20, int)
page = parameters.get("page", 0, int)
offset = parameters.get("offset", 0, int)
(limit, offset, page) = calculate_offset_page(limit, offset, page)

for param in parameters:
if param in taxrefColumns and parameters[param] != "":
Expand Down Expand Up @@ -248,13 +251,13 @@ def genericTaxrefList(inBibtaxon, parameters):
orderCol = orderCol.desc()
q = q.order_by(orderCol)

results = q.limit(limit).offset(page * limit).all()
results = q.limit(limit).offset(offset).all()
return {
"items": [dict(d.Taxref.as_dict(), **{"id_nom": d.id_nom}) for d in results],
"total": nbResultsWithoutFilter,
"total_filtered": nbResults,
"limit": limit,
"page": page,
"page": page
}


Expand All @@ -263,7 +266,7 @@ def genericHierarchieSelect(tableHierarchy, rang, parameters):
dfRang = tableHierarchy.__table__.columns["id_rang"]
q = db.session.query(tableHierarchy).filter(tableHierarchy.id_rang == rang)

limit = parameters.get("limit") if parameters.get("limit") else 100
limit = parameters.get("limit", 100, int)

for param in parameters:
if param in tableHierarchy.__table__.columns:
Expand Down Expand Up @@ -307,7 +310,7 @@ def get_regneGroup2Inpn_taxref():
def get_AllTaxrefNameByListe(id_liste):
"""
Route utilisée pour les autocompletes
Si le paramètre search_name est passé, la requête SQL utilise l'algorithme
Si le paramètre search_name est passé, la requête SQL utilise l'algorithme
des trigrames pour améliorer la pertinence des résultats
Route utilisé par le mobile pour remonter la liste des taxons
params URL:
Expand All @@ -327,8 +330,10 @@ def get_AllTaxrefNameByListe(id_liste):
.join(BibNoms, BibNoms.cd_nom == VMTaxrefListForautocomplete.cd_nom)
.join(
CorNomListe,
CorNomListe.id_nom == BibNoms.id_nom
and CorNomListe.id_liste == id_liste,
and_(
CorNomListe.id_nom == BibNoms.id_nom,
CorNomListe.id_liste == id_liste
),
)
)
if search_name:
Expand All @@ -353,9 +358,14 @@ def get_AllTaxrefNameByListe(id_liste):
q = q.order_by(
desc(VMTaxrefListForautocomplete.cd_nom == VMTaxrefListForautocomplete.cd_ref)
)
limit = int(request.args.get("limit", 20))
page = int(request.args.get("offset", 0))
data = q.limit(limit).offset(page * limit).all()

limit = request.args.get("limit", 20, int)
page = request.args.get("page", 0, int)
offset = request.args.get("offset", 0, int)
(limit, offset, page) = calculate_offset_page(limit, offset, page)

data = q.limit(limit).offset(offset).all()

if search_name:
return [d[0].as_dict() for d in data]
else:
Expand Down
19 changes: 19 additions & 0 deletions apptax/utils/genericfunctions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
'''
Fichier contenant des fonctions utilisées
par l'ensemble de l'application
'''


def calculate_offset_page(limit, offset, page):
"""
fonction qui calcul les paramètres
offset et page
Si un offset est défini
il prend le pas sur le paramètre page
"""
if offset:
page = int(offset / limit)
return (limit, offset, page)
else:
offset = (page-1) * limit
return (limit, offset, page)
4 changes: 2 additions & 2 deletions apptax/utils/routesconfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@

from ..database import db

@adresses.route("/", methods=["GET"])
@adresses.route("", methods=["GET"])
@json_resp
def get_config(id=None):
"""
Route générant la configuration utile au frontend
"""

data = db.session.query(Application).filter_by(code_application='TH').first()

return {
"id_application": data.id_application
}
9 changes: 9 additions & 0 deletions docs/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,15 @@
CHANGELOG
=========

1.7.1 (2020-07-02)
------------------

**🐛 Corrections**

* Correction et homogénéisation des paramètres ``offset`` et ``page`` sur toutes les routes (#229)
* Correction de la route de récupération de la configuration sans le "/" (#228)
* Suppression des doublons de la route ``allnamebylist``, entrainant un nombre de résultats différent du paramètre ``limit`` fourni

1.7.0 (2020-06-17)
------------------

Expand Down

0 comments on commit f39f109

Please sign in to comment.