-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #340 from bento-platform/merger-candig
Merge CANdig's Katsu fork changes into main Katsu development
- Loading branch information
Showing
69 changed files
with
1,773 additions
and
714 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# name: Docker | ||
|
||
# on: | ||
# push: | ||
# branches: | ||
# - stable | ||
|
||
# # Publish `v1.2.3` tags as releases. | ||
# tags: | ||
# - v* | ||
|
||
# jobs: | ||
# push_to_registry: | ||
# name: Push Docker image to GitHub Container Registry | ||
# runs-on: ubuntu-latest | ||
# steps: | ||
# - name: Check out the repo | ||
# uses: actions/checkout@v2 | ||
# with: | ||
# submodules: recursive | ||
# ref: stable | ||
# - name: Push to GitHub Packages | ||
# uses: docker/build-push-action@v1 | ||
# with: | ||
# username: ${{ github.actor }} | ||
# password: ${{ github.token }} | ||
# registry: ghcr.io | ||
# repository: candig/candig-katsu | ||
# tag_with_ref: true | ||
# tags: latest | ||
# dockerfile: Dockerfile |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
dist: focal | ||
language: python | ||
python: | ||
- "3.10" | ||
addons: | ||
postgresql: "12" | ||
apt: | ||
packages: | ||
- postgresql-12 | ||
- postgresql-contrib-12 | ||
before_install: | ||
- sudo -u postgres psql -U postgres -p 5433 -d postgres -c "alter user postgres with password 'hj38f3Ntr';" | ||
install: | ||
- pip install -r requirements-dev.txt | ||
script: | ||
- export POSTGRES_USER="postgres" && export POSTGRES_PASSWORD="hj38f3Ntr" && export POSTGRES_PORT=5433 | ||
- python3 -m coverage run ./manage.py test | ||
- codecov |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
import django_filters | ||
import logging | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
# HELPERS | ||
|
||
|
||
def filter_datasets(qs, name, value): | ||
""" | ||
Filters by datasets. | ||
If value is None, returns all objects regardless of datasets. | ||
Otherwise, return objects that are in the specified datasets. | ||
""" | ||
if value: | ||
lookup = "__".join([name, "in"]) | ||
return qs.filter(**{lookup: value.split(",")}).distinct() | ||
else: | ||
return qs | ||
|
||
|
||
# TODO authorize_datasets(): remove the code == GRU filter, urgently. | ||
def authorize_datasets(qs, name, value): | ||
""" | ||
Filter by authorized datasets. | ||
If value is 'NO_DATASETS_AUTHORIZED', returns no objects. | ||
Otherwise, returns objects that are in the specified datasets. | ||
""" | ||
logger.warn(f"value is {value}") | ||
if value == "NO_DATASETS_AUTHORIZED": | ||
lookup = "__".join([name, "in"]) | ||
return qs.filter(**{lookup: []}) | ||
else: | ||
lookup = "__".join([name, "in"]) | ||
|
||
# TODO THE FILTER BELOW IS JANKY; NEEDS TO BE REMOVED. | ||
# It is only here for the ClinDIG 4.3 demo. | ||
temp = qs.filter(**{lookup: value.split(",")}).distinct()\ | ||
.filter(data_use__consent_code__primary_category__code='GRU') | ||
for t in temp: | ||
logger.warn(str(t.data_use)) | ||
|
||
return temp | ||
|
||
|
||
class AuthorizedDatasetFilter(django_filters.rest_framework.FilterSet): | ||
datasets = django_filters.CharFilter( | ||
method=filter_datasets, field_name="table_ownership__dataset__title", | ||
label="Datasets" | ||
) | ||
authorized_datasets = django_filters.CharFilter( | ||
method=authorize_datasets, field_name="table_ownership__dataset__title", | ||
label="Authorized datasets" | ||
) |
108 changes: 108 additions & 0 deletions
108
chord_metadata_service/chord/migrations/0004_v2_14_0.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,108 @@ | ||
# Generated by Django 4.1.1 on 2022-09-22 19:32 | ||
|
||
from django.db import migrations, models | ||
|
||
|
||
class Migration(migrations.Migration): | ||
|
||
dependencies = [ | ||
('chord', '0003_v2_13_0'), | ||
] | ||
|
||
operations = [ | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='acknowledges', | ||
field=models.JSONField(blank=True, default=list, help_text='The grant(s) which funded the work reported by the dataset.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='alternate_identifiers', | ||
field=models.JSONField(blank=True, default=list, help_text='Alternate identifiers for the dataset.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='citations', | ||
field=models.JSONField(blank=True, default=list, help_text='The publication(s) that cite this dataset.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='creators', | ||
field=models.JSONField(blank=True, default=list, help_text='The person(s) or organization(s) which contributed to the creation of the dataset.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='data_use', | ||
field=models.JSONField(), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='dates', | ||
field=models.JSONField(blank=True, default=list, help_text='Relevant dates for the datasets, a date must be added, e.g. creation date or last modification date should be added.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='dimensions', | ||
field=models.JSONField(blank=True, default=list, help_text='The different dimensions (granular components) making up a dataset.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='distributions', | ||
field=models.JSONField(blank=True, default=list, help_text='The distribution(s) by which datasets are made available (for example: mySQL dump).'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='extra_properties', | ||
field=models.JSONField(blank=True, help_text='Extra properties that do not fit in the previous specified attributes.', null=True), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='has_part', | ||
field=models.ManyToManyField(blank=True, help_text="A Dataset that is a subset of this Dataset; Datasets declaring the 'hasPart' relationship are considered a collection of Datasets, the aggregation criteria could be included in the 'description' field.", to='chord.dataset'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='keywords', | ||
field=models.JSONField(blank=True, default=list, help_text='Tags associated with the dataset, which will help in its discovery.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='licenses', | ||
field=models.JSONField(blank=True, default=list, help_text='The terms of use of the dataset.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='linked_field_sets', | ||
field=models.JSONField(blank=True, default=list, help_text='Data type fields which are linked together.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='primary_publications', | ||
field=models.JSONField(blank=True, default=list, help_text='The primary publication(s) associated with the dataset, usually describing how the dataset was produced.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='produced_by', | ||
field=models.JSONField(blank=True, help_text='A study process which generated a given dataset, if any.', null=True), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='related_identifiers', | ||
field=models.JSONField(blank=True, default=list, help_text='Related identifiers for the dataset.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='spatial_coverage', | ||
field=models.JSONField(blank=True, default=list, help_text='The geographical extension and span covered by the dataset and its measured dimensions/variables.'), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='stored_in', | ||
field=models.JSONField(blank=True, help_text='The data repository hosting the dataset.', null=True), | ||
), | ||
migrations.AlterField( | ||
model_name='dataset', | ||
name='types', | ||
field=models.JSONField(blank=True, default=list, help_text='A term, ideally from a controlled terminology, identifying the dataset type or nature of the data, placing it in a typology.'), | ||
), | ||
] |
Oops, something went wrong.