Skip to content

Commit

Permalink
added references
Browse files Browse the repository at this point in the history
  • Loading branch information
rvosa committed Sep 2, 2017
1 parent 18a56e3 commit 24a116e
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 0 deletions.
33 changes: 33 additions & 0 deletions data/references.bib
Expand Up @@ -98,6 +98,20 @@ @article{Gewin2016
volume = {529},
year = {2016}
}
@article{Goodman2016,
abstract = {The language and conceptual framework of “research reproducibility” are nonstandard and unsettled across the sciences. In this Perspective, we review an array of explicit and implicit definitions of reproducibility and related terminology, and discuss how to avoid potential misunderstandings when these terms are used as a surrogate for “truth.”},
author = {Goodman, S. N. and Fanelli, D. and Ioannidis, J. P. A.},
doi = {10.1126/scitranslmed.aaf5027},
issn = {1946-6234},
journal = {Science Translational Medicine},
number = {341},
pages = {341ps12--341ps12},
pmid = {27252173},
title = {{What does research reproducibility mean?}},
url = {http://stm.sciencemag.org/content/8/341/341ps12 http://stm.sciencemag.org/cgi/doi/10.1126/scitranslmed.aaf5027},
volume = {8},
year = {2016}
}
@article{Hart2016,
abstract = {Data is the central currency of science, but the nature of scientific data has changed dramatically with the rapid pace of technology. This change has led to the development of a wide variety of data formats, dataset sizes, data complexity, data use cases, and data sharing practices. Improvements in high throughput DNA sequencing, sustained institutional support for large sensor networks, and sky surveys with large-format digital cameras have created massive quantities of data. At the same time, the combination of increasingly diverse research teams and data aggregation in portals (e.g. for biodiversity data, GBIF or iDigBio) necessitates increased coordination among data collectors and institutions. As a consequence, “data” can now mean anything from petabytes of information stored in professionally-maintained databases, through spreadsheets on a single computer, to hand-written tables in lab notebooks on shelves. All remain important, but data curation practices must continue to keep pace with the changes brought about by new forms and practices of data collection and storage.},
author = {Hart, E. M. and Barmby, P. and LeBauer, D. and Michonneau, F. and Mount, S. and Mulrooney, P. and Poisot, T. and Woo, K. H. and Zimmerman, N. B. and Hollister, J. W.},
Expand Down Expand Up @@ -214,6 +228,25 @@ @article{Michener2015
volume = {11},
year = {2015}
}
@article{Mobley2013,
abstract = {BACKGROUND: The pharmaceutical and biotechnology industries depend on findings from academic investigators prior to initiating programs to develop new diagnostic and therapeutic agents to benefit cancer patients. The success of these programs depends on the validity of published findings. This validity, represented by the reproducibility of published findings, has come into question recently as investigators from companies have raised the issue of poor reproducibility of published results from academic laboratories. Furthermore, retraction rates in high impact journals are climbing.$\backslash$n$\backslash$nMETHODS AND FINDINGS: To examine a microcosm of the academic experience with data reproducibility, we surveyed the faculty and trainees at MD Anderson Cancer Center using an anonymous computerized questionnaire; we sought to ascertain the frequency and potential causes of non-reproducible data. We found that ∼50{\%} of respondents had experienced at least one episode of the inability to reproduce published data; many who pursued this issue with the original authors were never able to identify the reason for the lack of reproducibility; some were even met with a less than "collegial" interaction.$\backslash$n$\backslash$nCONCLUSIONS: These results suggest that the problem of data reproducibility is real. Biomedical science needs to establish processes to decrease the problem and adjudicate discrepancies in findings when they are discovered.},
author = {Mobley, A. and Linder, S. K. and Braeuer, R. and Ellis, L. M. and Zwelling, L.},
doi = {10.1371/journal.pone.0063221},
editor = {Arakawa, Hirofumi},
file = {::},
isbn = {1932-6203 (Electronic)$\backslash$r1932-6203 (Linking)},
issn = {19326203},
journal = {PLoS ONE},
month = {may},
number = {5},
pages = {e63221},
pmid = {23691000},
publisher = {Public Library of Science},
title = {{A Survey on Data Reproducibility in Cancer Research Provides Insights into Our Limited Ability to Translate Findings from the Laboratory to the Clinic}},
url = {http://dx.plos.org/10.1371/journal.pone.0063221},
volume = {8},
year = {2013}
}
@article{Noble2009,
abstract = {Most bioinformatics coursework focuses on algorithms, with perhaps some components devoted to learning programming skills and learning how to use existing bioinformatics software. Unfortunately, for students who are preparing for a research career, this type of curriculum fails to address many of the day-to-day organizational challenges associated with performing computational experiments. In practice, the principles behind organizing and documenting computational experiments are often learned on the fly, and this learning is strongly influenced by personal predilections as well as by chance interactions with collaborators or colleagues. The purpose of this article is to describe one good strategy for carrying out computational experiments. I will not describe profound issues such as how to formulate hypotheses, design experiments, or draw conclusions. Rather, I will focus on relatively mundane issues such as organizing files and directories and documenting progress. These issues are important because poor organizational choices can lead to significantly slower research progress. I do not claim that the strategies I outline here are optimal. These are simply the principles and practices that I have developed over 12 years of bioinformatics research, augmented with various suggestions from other researchers with whom I have discussed these issues.},
author = {Noble, W. S.},
Expand Down
6 changes: 6 additions & 0 deletions docs/REFERENCES/README.md
Expand Up @@ -18,6 +18,9 @@ References
<a name="Gewin2016"></a>
**Gewin, V.** 2016. Data sharing: An open mind on open data. _Nature_. **529** (7584): 117--119. [[10.1038/nj7584-117a](http://doi.org/10.1038/nj7584-117a)] `[id:Gewin2016]`

<a name="Goodman2016"></a>
**Goodman, S. N., Fanelli, D., Ioannidis, J. P. A.** 2016. What does research reproducibility mean?. _Science Translational Medicine_. **8** (341): 341ps12--341ps12. [[10.1126/scitranslmed.aaf5027](http://doi.org/10.1126/scitranslmed.aaf5027)] `[id:Goodman2016]`

<a name="Hart2016"></a>
**Hart, E. M., Barmby, P., LeBauer, D., Michonneau, F., Mount, S., Mulrooney, P., Poisot, T., Woo, K. H., Zimmerman, N. B., Hollister, J. W.** 2016. Ten Simple Rules for Digital Data Storage. _PLoS Computational Biology_. **12** (10): e1005097. [[10.1371/journal.pcbi.1005097](http://doi.org/10.1371/journal.pcbi.1005097)] `[id:Hart2016]`

Expand All @@ -39,6 +42,9 @@ References
<a name="Michener2015"></a>
**Michener, W. K.** 2015. Ten Simple Rules for Creating a Good Data Management Plan. _PLoS Computational Biology_. **11** (10): e1004525. [[10.1371/journal.pcbi.1004525](http://doi.org/10.1371/journal.pcbi.1004525)] `[id:Michener2015]`

<a name="Mobley2013"></a>
**Mobley, A., Linder, S. K., Braeuer, R., Ellis, L. M., Zwelling, L.** 2013. A Survey on Data Reproducibility in Cancer Research Provides Insights into Our Limited Ability to Translate Findings from the Laboratory to the Clinic. _PLoS ONE_. **8** (5): e63221. [[10.1371/journal.pone.0063221](http://doi.org/10.1371/journal.pone.0063221)] `[id:Mobley2013]`

<a name="Noble2009"></a>
**Noble, W. S.** 2009. A quick guide to organizing computational biology projects. _PLoS Computational Biology_. **5** (7): e1000424. [[10.1371/journal.pcbi.1000424](http://doi.org/10.1371/journal.pcbi.1000424)] `[id:Noble2009]`

Expand Down

0 comments on commit 24a116e

Please sign in to comment.