Skip to content

Commit

Permalink
access_level is now uppercase; None becomes PRIVATE
Browse files Browse the repository at this point in the history
  • Loading branch information
talavis committed Apr 25, 2019
1 parent 4bf48e7 commit 9072394
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 13 deletions.
8 changes: 4 additions & 4 deletions backend/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,8 +215,8 @@ class Meta:
num_variants = IntegerField(null=True)
coverage_levels = ArrayField(IntegerField, null=True)
portal_avail = BooleanField(null=True)
file_access = EnumField(null=False, choices=['None', 'Controlled', 'Registered', 'Public'])
beacon_access = EnumField(null=False, choices=['None', 'Controlled', 'Registered', 'Public'])
file_access = EnumField(null=False, choices=['PRIVATE', 'CONTROLLED', 'REGISTERED', 'PUBLIC'])
beacon_access = EnumField(null=False, choices=['PRIVATE', 'CONTROLLED', 'REGISTERED', 'PUBLIC'])


class DatasetFile(BaseModel):
Expand Down Expand Up @@ -353,9 +353,9 @@ def has_access(self, dataset, ds_version=None):
dsv = get_dataset_version(dataset.short_name, ds_version)
if not dsv:
return False
if dsv.file_access in ('Registered', 'Public'):
if dsv.file_access in ('REGISTERED', 'PUBLIC'):
return True
elif dsv.file_access == 'None':
elif dsv.file_access == 'PRIVATE':
return False

return (DatasetAccessCurrent.select()
Expand Down
2 changes: 1 addition & 1 deletion sql/data_schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ CREATE TABLE IF NOT EXISTS data.sample_sets (
phenotype varchar(50) NOT NULL
);

CREATE TYPE access_levels AS enum('None', 'Controlled', 'Registered', 'Public');
CREATE TYPE access_levels AS enum('PRIVATE', 'CONTROLLED', 'REGISTERED', 'PUBLIC');
-- None - do not make available
-- Controlled - access must be permitted "manually"
-- Registered - free access after registration
Expand Down
8 changes: 4 additions & 4 deletions test/data/browser_test_data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ COPY data.reference_sets (id, reference_build, reference_name, ensembl_version,
\.

COPY data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, var_call_ref, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels, portal_avail, file_access, beacon_access) FROM stdin;
2 1 1 20170823 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE Registered Public
3 1 1 20171025 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE Registered Public
1 1 1 20161223 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE Registered Public
4 1 1 20180409 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE Registered Public
2 1 1 20170823 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
3 1 1 20171025 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
1 1 1 20161223 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
4 1 1 20180409 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
\.

COPY data.coverage (id, dataset_version, chrom, pos, mean, median, coverage) FROM stdin;
Expand Down
8 changes: 4 additions & 4 deletions test/data/load_dummy_data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ INSERT INTO data.sample_sets (id, dataset, "collection", sample_size, phenotype)
(1000003, 1000002, 1000003, 20, 'SamplePheno2 Coll2');

INSERT INTO data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, var_call_ref, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels, portal_avail, file_access, beacon_access)
VALUES (1000001, 1000001, 1000001, 'Version 1-1', 'Dataset 1-1, description', 'Dataset 1-1, terms', 'CallRef11', '2017-01-01', 'datset11DOI', 'Gunnar Green', 'gunnar.green@example.com', 10, ARRAY[1,5,10], TRUE, 'Controlled', 'Public'),
(1000002, 1000002, 1000001, 'Version 2-1', 'Dataset 2-1, description', 'Dataset 2-1, terms', 'CallRef21', '2017-02-01', 'datset21DOI', NULL, NULL, 100, ARRAY[1,5,10], TRUE, 'Controlled', 'Public'),
(1000003, 1000002, 1000002, 'Version 2-2', 'Dataset 2-2, description', 'Dataset 2-2, terms', 'CallRef22', '2017-02-02', 'datset22DOI', 'Strummer project', 'https://example.com/strummer', 1000, ARRAY[1,5,10], TRUE, 'Controlled', 'Public'),
(1000004, 1000002, 1000002, 'InvVer 2-3', 'Dataset 2-3, description', 'Dataset 2-3, terms', 'CallRef23', '2030-02-03', 'datset23DOI', 'Drummer project', 'https://example.com/drummer', 10000, ARRAY[1,5,10], TRUE, 'Controlled', 'Public');
VALUES (1000001, 1000001, 1000001, 'Version 1-1', 'Dataset 1-1, description', 'Dataset 1-1, terms', 'CallRef11', '2017-01-01', 'datset11DOI', 'Gunnar Green', 'gunnar.green@example.com', 10, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC'),
(1000002, 1000002, 1000001, 'Version 2-1', 'Dataset 2-1, description', 'Dataset 2-1, terms', 'CallRef21', '2017-02-01', 'datset21DOI', NULL, NULL, 100, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC'),
(1000003, 1000002, 1000002, 'Version 2-2', 'Dataset 2-2, description', 'Dataset 2-2, terms', 'CallRef22', '2017-02-02', 'datset22DOI', 'Strummer project', 'https://example.com/strummer', 1000, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC'),
(1000004, 1000002, 1000002, 'InvVer 2-3', 'Dataset 2-3, description', 'Dataset 2-3, terms', 'CallRef23', '2030-02-03', 'datset23DOI', 'Drummer project', 'https://example.com/drummer', 10000, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC');

INSERT INTO data.dataset_files(id, dataset_version, basename, uri, file_size)
VALUES (1000001, 1000001, 'File11-1', '/release/file111.txt', 100),
Expand Down

0 comments on commit 9072394

Please sign in to comment.