Skip to content

Commit

Permalink
Merge pull request #542 from NBISweden/feature/schema-access
Browse files Browse the repository at this point in the history
Add columns for portal, file, and beacon access to data.dataset_versions
  • Loading branch information
viklund authored Apr 17, 2019
2 parents 4f54193 + 5759189 commit 41894a0
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 12 deletions.
13 changes: 11 additions & 2 deletions sql/data_schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,12 @@ CREATE TABLE IF NOT EXISTS data.sample_sets (
phenotype varchar(50) NOT NULL
);

CREATE TYPE access_levels AS enum('None', 'Controlled', 'Registered', 'Public');
-- None - do not make available
-- Controlled - access must be permitted "manually"
-- Registered - free access after registration
-- Public - free access

CREATE TABLE IF NOT EXISTS data.dataset_versions (
id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY,
dataset integer NOT NULL REFERENCES data.datasets,
Expand All @@ -128,7 +134,10 @@ CREATE TABLE IF NOT EXISTS data.dataset_versions (
data_contact_name varchar(100) DEFAULT NULL,
data_contact_link varchar(100) DEFAULT NULL,
num_variants integer DEFAULT NULL,
coverage_levels integer[] DEFAULT NULL -- Levels used for data.coverage.coverage
coverage_levels integer[] DEFAULT NULL, -- Levels used for data.coverage.coverage
portal_avail boolean NOT NULL,
file_access access_levels NOT NULL,
beacon_access access_levels NOT NULL
);

CREATE TABLE IF NOT EXISTS data.dataset_files (
Expand Down Expand Up @@ -209,7 +218,7 @@ CREATE OR REPLACE VIEW data.dataset_version_current AS
-- Indexes
--

CREATE INDEX coverage_pos_chrom ON data.coverage (chrom, pos);
CREATE INDEX coverage_chrom_pos ON data.coverage (chrom, pos);
CREATE INDEX features_gene ON data.features (gene);
CREATE INDEX features_transcript ON data.features (transcript);
CREATE INDEX features_transcript_type ON data.features (transcript, feature_type);
Expand Down
10 changes: 5 additions & 5 deletions test/data/browser_test_data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ COPY data.reference_sets (id, reference_build, reference_name, ensembl_version,
1 \N swegen homo_sapiens_core_75_37 19 2.9.3
\.

COPY data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, var_call_ref, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels) FROM stdin;
2 1 1 20170823 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100}
3 1 1 20171025 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100}
1 1 1 20161223 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100}
4 1 1 20180409 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100}
COPY data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, var_call_ref, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels, portal_avail, file_access, beacon_access) FROM stdin;
2 1 1 20170823 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE Registered Public
3 1 1 20171025 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE Registered Public
1 1 1 20161223 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE Registered Public
4 1 1 20180409 desc terms hg19 2001-01-01 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE Registered Public
\.

COPY data.coverage (id, dataset_version, chrom, pos, mean, median, coverage) FROM stdin;
Expand Down
10 changes: 5 additions & 5 deletions test/data/load_dummy_data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ INSERT INTO data.sample_sets (id, dataset, "collection", sample_size, phenotype)
(1000002, 1000001, 1000002, 15, 'SamplePheno2 Coll1'),
(1000003, 1000002, 1000003, 20, 'SamplePheno2 Coll2');

INSERT INTO data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, var_call_ref, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels)
VALUES (1000001, 1000001, 1000001, 'Version 1-1', 'Dataset 1-1, description', 'Dataset 1-1, terms', 'CallRef11', '2017-01-01', 'datset11DOI', 'Gunnar Green', 'gunnar.green@example.com', 10, ARRAY[1,5,10]),
(1000002, 1000002, 1000001, 'Version 2-1', 'Dataset 2-1, description', 'Dataset 2-1, terms', 'CallRef21', '2017-02-01', 'datset21DOI', NULL, NULL, 100, ARRAY[1,5,10]),
(1000003, 1000002, 1000002, 'Version 2-2', 'Dataset 2-2, description', 'Dataset 2-2, terms', 'CallRef22', '2017-02-02', 'datset22DOI', 'Strummer project', 'https://example.com/strummer', 1000, ARRAY[1,5,10]),
(1000004, 1000002, 1000002, 'InvVer 2-3', 'Dataset 2-3, description', 'Dataset 2-3, terms', 'CallRef23', '2030-02-03', 'datset23DOI', 'Drummer project', 'https://example.com/drummer', 10000, ARRAY[1,5,10]);
INSERT INTO data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, var_call_ref, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels, portal_avail, file_access, beacon_access)
VALUES (1000001, 1000001, 1000001, 'Version 1-1', 'Dataset 1-1, description', 'Dataset 1-1, terms', 'CallRef11', '2017-01-01', 'datset11DOI', 'Gunnar Green', 'gunnar.green@example.com', 10, ARRAY[1,5,10], TRUE, 'Registered', 'Public'),
(1000002, 1000002, 1000001, 'Version 2-1', 'Dataset 2-1, description', 'Dataset 2-1, terms', 'CallRef21', '2017-02-01', 'datset21DOI', NULL, NULL, 100, ARRAY[1,5,10], TRUE, 'Registered', 'Public'),
(1000003, 1000002, 1000002, 'Version 2-2', 'Dataset 2-2, description', 'Dataset 2-2, terms', 'CallRef22', '2017-02-02', 'datset22DOI', 'Strummer project', 'https://example.com/strummer', 1000, ARRAY[1,5,10], TRUE, 'Registered', 'Public'),
(1000004, 1000002, 1000002, 'InvVer 2-3', 'Dataset 2-3, description', 'Dataset 2-3, terms', 'CallRef23', '2030-02-03', 'datset23DOI', 'Drummer project', 'https://example.com/drummer', 10000, ARRAY[1,5,10], TRUE, 'Registered', 'Public');

INSERT INTO data.dataset_files(id, dataset_version, basename, uri, file_size)
VALUES (1000001, 1000001, 'File11-1', '/release/file111.txt', 100),
Expand Down

0 comments on commit 41894a0

Please sign in to comment.