Skip to content

Commit

Permalink
Merge pull request #549 from NBISweden/feature/drop-var-call-ref
Browse files Browse the repository at this point in the history
Remove var_call_ref from database.
  • Loading branch information
viklund committed Apr 25, 2019
2 parents 65d0d26 + 940c0ee commit f716868
Show file tree
Hide file tree
Showing 5 changed files with 11 additions and 12 deletions.
1 change: 1 addition & 0 deletions backend/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ def get(self, dataset, version=None):
future_version = True

ret = build_dataset_structure(version, user)
ret['version']['var_call_ref'] = version.reference_set.reference_build
ret['future'] = future_version

self.finish(ret)
Expand Down
1 change: 0 additions & 1 deletion backend/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,6 @@ class Meta:
version = CharField(db_column="dataset_version")
description = TextField(db_column="dataset_description")
terms = TextField()
var_call_ref = CharField(null=True)
available_from = DateTimeField()
ref_doi = CharField(null=True)
data_contact_name = CharField(null=True)
Expand Down
1 change: 0 additions & 1 deletion sql/data_schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,6 @@ CREATE TABLE IF NOT EXISTS data.dataset_versions (
dataset_version varchar(20) NOT NULL,
dataset_description text NOT NULL,
terms text NOT NULL,
var_call_ref varchar(50) DEFAULT NULL,
available_from timestamp DEFAULT current_timestamp,
ref_doi varchar(100) DEFAULT NULL,
data_contact_name varchar(100) DEFAULT NULL,
Expand Down
10 changes: 5 additions & 5 deletions test/data/browser_test_data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ COPY data.reference_sets (id, reference_build, reference_name, ensembl_version,
1 \N swegen homo_sapiens_core_75_37 19 2.9.3
\.

COPY data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, var_call_ref, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels, portal_avail, file_access, beacon_access) FROM stdin;
2 1 1 20170823 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
3 1 1 20171025 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
1 1 1 20161223 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
4 1 1 20180409 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
COPY data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels, portal_avail, file_access, beacon_access) FROM stdin;
2 1 1 20170823 desc terms 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
3 1 1 20171025 desc terms 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
1 1 1 20161223 desc terms 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
4 1 1 20180409 desc terms 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
\.

COPY data.coverage (id, dataset_version, chrom, pos, mean, median, coverage) FROM stdin;
Expand Down
10 changes: 5 additions & 5 deletions test/data/load_dummy_data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ INSERT INTO data.sample_sets (id, dataset, "collection", sample_size, phenotype)
(1000002, 1000001, 1000002, 15, 'SamplePheno2 Coll1'),
(1000003, 1000002, 1000003, 20, 'SamplePheno2 Coll2');

INSERT INTO data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, var_call_ref, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels, portal_avail, file_access, beacon_access)
VALUES (1000001, 1000001, 1000001, 'Version 1-1', 'Dataset 1-1, description', 'Dataset 1-1, terms', 'CallRef11', '2017-01-01', 'datset11DOI', 'Gunnar Green', 'gunnar.green@example.com', 10, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC'),
(1000002, 1000002, 1000001, 'Version 2-1', 'Dataset 2-1, description', 'Dataset 2-1, terms', 'CallRef21', '2017-02-01', 'datset21DOI', NULL, NULL, 100, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC'),
(1000003, 1000002, 1000002, 'Version 2-2', 'Dataset 2-2, description', 'Dataset 2-2, terms', 'CallRef22', '2017-02-02', 'datset22DOI', 'Strummer project', 'https://example.com/strummer', 1000, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC'),
(1000004, 1000002, 1000002, 'InvVer 2-3', 'Dataset 2-3, description', 'Dataset 2-3, terms', 'CallRef23', '2030-02-03', 'datset23DOI', 'Drummer project', 'https://example.com/drummer', 10000, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC');
INSERT INTO data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels, portal_avail, file_access, beacon_access)
VALUES (1000001, 1000001, 1000001, 'Version 1-1', 'Dataset 1-1, description', 'Dataset 1-1, terms', '2017-01-01', 'datset11DOI', 'Gunnar Green', 'gunnar.green@example.com', 10, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC'),
(1000002, 1000002, 1000001, 'Version 2-1', 'Dataset 2-1, description', 'Dataset 2-1, terms', '2017-02-01', 'datset21DOI', NULL, NULL, 100, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC'),
(1000003, 1000002, 1000002, 'Version 2-2', 'Dataset 2-2, description', 'Dataset 2-2, terms', '2017-02-02', 'datset22DOI', 'Strummer project', 'https://example.com/strummer', 1000, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC'),
(1000004, 1000002, 1000002, 'InvVer 2-3', 'Dataset 2-3, description', 'Dataset 2-3, terms', '2030-02-03', 'datset23DOI', 'Drummer project', 'https://example.com/drummer', 10000, ARRAY[1,5,10], TRUE, 'CONTROLLED', 'PUBLIC');

INSERT INTO data.dataset_files(id, dataset_version, basename, uri, file_size)
VALUES (1000001, 1000001, 'File11-1', '/release/file111.txt', 100),
Expand Down

0 comments on commit f716868

Please sign in to comment.