Skip to content

Commit

Permalink
Add option for loose duplicate search dependant on required fields
Browse files Browse the repository at this point in the history
  • Loading branch information
melton-jason committed Oct 5, 2023
1 parent ca8631c commit 975bf79
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 7 deletions.
22 changes: 16 additions & 6 deletions specifyweb/businessrules/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,14 +52,14 @@ def uniqueness_rule(request, discipline_id):

elif request.method == 'POST':
rules = json.loads(request.body)['rules']
discipline = models.Discipline.objects.get(id=discipline_id)
for rule in rules:
if rule["id"] is None:
fetched_rule = UniquenessRule.objects.create(
isDatabaseConstraint=rule["isDatabaseConstraint"], discipline=discipline_id, scope=rule["scope"])
isdatabaseconstraint=rule["isDatabaseConstraint"], discipline=discipline, scope=rule["scope"])
else:
fetched_rule = UniquenessRule.objects.get(id=rule["id"])
fetched_rule.discipline = models.Discipline.objects.get(
id=discipline_id)
fetched_rule.discipline = discipline
fetched_rule.isdatabaseconstraint = rule["isDatabaseConstraint"]
fetched_rule.scope = rule["scope"] if rule["scope"] is None else models.Splocalecontaineritem.objects.get(
id=rule["scope"]["id"])
Expand Down Expand Up @@ -87,12 +87,22 @@ def validate_uniqueness(request):
scope = uniqueness_rule['scope'].lower(
) if uniqueness_rule['scope'] is not None else None

filters = [field for field in fields]
required_fields = {field: model.get_field(
field).required for field in fields}

strict_search = data["strict"] if 'strict' in data.keys() else False

strict_filters = Q()
for field, is_required in required_fields.items():
if not strict_search and not is_required:
strict_filters &= (~Q(**{f"{field}": None}))

field_filters = [field for field in fields]
if scope is not None:
filters.append(scope)
field_filters.append(scope)

duplicates = django_model.objects.values(
*filters).annotate(_duplicates=Count('id')).order_by().filter(_duplicates__gt=1)
*field_filters).annotate(_duplicates=Count('id')).order_by().filter(strict_filters).filter(_duplicates__gt=1)

total_duplicates = 0
for dupe in duplicates:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,8 @@ export async function validateUniqueness<
>(
model: TABLE_NAME,
fields: RA<string & keyof SCHEMA['fields']>,
scope?: string & keyof SCHEMA['toOneIndependent']
scope?: string & keyof SCHEMA['toOneIndependent'],
strictSearch?: boolean
) {
return ajax<UniquenessRuleValidation>(
'/businessrules/uniqueness_rules/validate/',
Expand All @@ -136,6 +137,7 @@ export async function validateUniqueness<
name: field,
})),
scope: scope === undefined ? null : scope,
strict: strictSearch,
},
},
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,7 @@ export function TableUniquenessRules({
className="cursor-not-allowed"
onClick={() => undefined}
>
{icons.exclamation}
{commonText.save()}
</Button.Danger>
) : (
Expand Down

0 comments on commit 975bf79

Please sign in to comment.