Skip to content

Commit

Permalink
Merge pull request #4306 from openjournals/joss.05321
Browse files Browse the repository at this point in the history
Merging automatically
  • Loading branch information
editorialbot committed Jun 13, 2023
2 parents 007bb6d + 31cf894 commit ff9c0e9
Show file tree
Hide file tree
Showing 5 changed files with 1,246 additions and 0 deletions.
385 changes: 385 additions & 0 deletions joss.05321/10.21105.joss.05321.crossref.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,385 @@
<?xml version="1.0" encoding="UTF-8"?>
<doi_batch xmlns="http://www.crossref.org/schema/5.3.1"
xmlns:ai="http://www.crossref.org/AccessIndicators.xsd"
xmlns:rel="http://www.crossref.org/relations.xsd"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
version="5.3.1"
xsi:schemaLocation="http://www.crossref.org/schema/5.3.1 http://www.crossref.org/schemas/crossref5.3.1.xsd">
<head>
<doi_batch_id>20230613T085024-7ba720787ea4dd0ba5011d7cb37db6e8e90341b6</doi_batch_id>
<timestamp>20230613085024</timestamp>
<depositor>
<depositor_name>JOSS Admin</depositor_name>
<email_address>admin@theoj.org</email_address>
</depositor>
<registrant>The Open Journal</registrant>
</head>
<body>
<journal>
<journal_metadata>
<full_title>Journal of Open Source Software</full_title>
<abbrev_title>JOSS</abbrev_title>
<issn media_type="electronic">2475-9066</issn>
<doi_data>
<doi>10.21105/joss</doi>
<resource>https://joss.theoj.org/</resource>
</doi_data>
</journal_metadata>
<journal_issue>
<publication_date media_type="online">
<month>06</month>
<year>2023</year>
</publication_date>
<journal_volume>
<volume>8</volume>
</journal_volume>
<issue>86</issue>
</journal_issue>
<journal_article publication_type="full_text">
<titles>
<title>stimupy: A Python package for creating stimuli in
vision science</title>
</titles>
<contributors>
<person_name sequence="first" contributor_role="author">
<given_name>Lynn</given_name>
<surname>Schmittwilken</surname>
<ORCID>https://orcid.org/0000-0003-3621-9576</ORCID>
</person_name>
<person_name sequence="additional"
contributor_role="author">
<given_name>Marianne</given_name>
<surname>Maertens</surname>
</person_name>
<person_name sequence="additional"
contributor_role="author">
<given_name>Joris</given_name>
<surname>Vincent</surname>
<ORCID>https://orcid.org/0000-0001-6882-5584</ORCID>
</person_name>
</contributors>
<publication_date>
<month>06</month>
<day>13</day>
<year>2023</year>
</publication_date>
<pages>
<first_page>5321</first_page>
</pages>
<publisher_item>
<identifier id_type="doi">10.21105/joss.05321</identifier>
</publisher_item>
<ai:program name="AccessIndicators">
<ai:license_ref applies_to="vor">http://creativecommons.org/licenses/by/4.0/</ai:license_ref>
<ai:license_ref applies_to="am">http://creativecommons.org/licenses/by/4.0/</ai:license_ref>
<ai:license_ref applies_to="tdm">http://creativecommons.org/licenses/by/4.0/</ai:license_ref>
</ai:program>
<rel:program>
<rel:related_item>
<rel:description>Software archive</rel:description>
<rel:inter_work_relation relationship-type="references" identifier-type="doi">10.17605/OSF.IO/Z439V</rel:inter_work_relation>
</rel:related_item>
<rel:related_item>
<rel:description>GitHub review issue</rel:description>
<rel:inter_work_relation relationship-type="hasReview" identifier-type="uri">https://github.com/openjournals/joss-reviews/issues/5321</rel:inter_work_relation>
</rel:related_item>
</rel:program>
<doi_data>
<doi>10.21105/joss.05321</doi>
<resource>https://joss.theoj.org/papers/10.21105/joss.05321</resource>
<collection property="text-mining">
<item>
<resource mime_type="application/pdf">https://joss.theoj.org/papers/10.21105/joss.05321.pdf</resource>
</item>
</collection>
</doi_data>
<citation_list>
<citation key="aguilar.maertens.ea2022">
<article_title>Characterizing perceptual brightness scales
for White’s effect using conjoint measurement</article_title>
<author>Aguilar</author>
<journal_title>Journal of Vision</journal_title>
<volume>22</volume>
<doi>10.1167/jov.22.14.3519</doi>
<cYear>2022</cYear>
<unstructured_citation>Aguilar, G., Maertens, M., &amp;
Vincent, J. (2022). Characterizing perceptual brightness scales for
White’s effect using conjoint measurement. Journal of Vision, 22, 3519.
https://doi.org/10.1167/jov.22.14.3519</unstructured_citation>
</citation>
<citation key="brainard1997">
<article_title>The psychophysics toolbox</article_title>
<author>Brainard</author>
<journal_title>Spatial vision</journal_title>
<issue>4</issue>
<volume>10</volume>
<doi>10.1163/156856897X00357</doi>
<cYear>1997</cYear>
<unstructured_citation>Brainard, D. H. (1997). The
psychophysics toolbox. Spatial Vision, 10(4), 433–436.
https://doi.org/10.1163/156856897X00357</unstructured_citation>
</citation>
<citation key="carney1999">
<article_title>Development of an image/threshold database
for designing and testing human vision models</article_title>
<author>Carney</author>
<journal_title>Human vision and electronic imaging
IV</journal_title>
<volume>3644</volume>
<doi>10.1117/12.348473</doi>
<cYear>1999</cYear>
<unstructured_citation>Carney, T., Klein, S. A., Tyler, C.
W., Silverstein, A. D., Beutter, B., Levi, D., Watson, A. B., Reeves, A.
J., Norcia, A. M., Chen, C.-C., &amp; others. (1999). Development of an
image/threshold database for designing and testing human vision models.
Human Vision and Electronic Imaging IV, 3644, 542–551.
https://doi.org/10.1117/12.348473</unstructured_citation>
</citation>
<citation key="deng2009">
<article_title>Imagenet: A large-scale hierarchical image
database</article_title>
<author>Deng</author>
<journal_title>IEEE conference on computer vision and
pattern recognition</journal_title>
<doi>10.1109/CVPR.2009.5206848</doi>
<cYear>2009</cYear>
<unstructured_citation>Deng, J., Dong, W., Socher, R., Li,
L.-J., Li, K., &amp; Fei-Fei, L. (2009). Imagenet: A large-scale
hierarchical image database. IEEE Conference on Computer Vision and
Pattern Recognition, 248–255.
https://doi.org/10.1109/CVPR.2009.5206848</unstructured_citation>
</citation>
<citation key="harris2020">
<article_title>Array programming with NumPy</article_title>
<author>Harris</author>
<journal_title>Nature</journal_title>
<issue>7825</issue>
<volume>585</volume>
<doi>10.1038/s41586-020-2649-2</doi>
<cYear>2020</cYear>
<unstructured_citation>Harris, C. R., Millman, K. J., Walt,
S. J. van der, Gommers, R., Virtanen, P., Cournapeau, D., Wieser, E.,
Taylor, J., Berg, S., Smith, N. J., Kern, R., Picus, M., Hoyer, S.,
Kerkwijk, M. H. van, Brett, M., Haldane, A., Río, J. F. del, Wiebe, M.,
Peterson, P., … Oliphant, T. E. (2020). Array programming with NumPy.
Nature, 585(7825), 357–362.
https://doi.org/10.1038/s41586-020-2649-2</unstructured_citation>
</citation>
<citation key="kluyver2016">
<article_title>Jupyter notebooks – a publishing format for
reproducible computational workflows</article_title>
<author>Kluyver</author>
<journal_title>Positioning and power in academic publishing:
Players, agents and agendas</journal_title>
<doi>10.3233/978-1-61499-649-1-87</doi>
<cYear>2016</cYear>
<unstructured_citation>Kluyver, T., Ragan-Kelley, B., Pérez,
F., Granger, B., Bussonnier, M., Frederic, J., Kelley, K., Hamrick, J.,
Grout, J., Corlay, S., Ivanov, P., Avila, D., Abdalla, S., &amp;
Willing, C. (2016). Jupyter notebooks – a publishing format for
reproducible computational workflows. In F. Loizides &amp; B. Schmidt
(Eds.), Positioning and power in academic publishing: Players, agents
and agendas (pp. 87–90). IOS Press.
https://doi.org/10.3233/978-1-61499-649-1-87</unstructured_citation>
</citation>
<citation key="OCTA">
<article_title>The order &amp; complexity toolbox for
aesthetics (OCTA): A systematic approach to study the relations between
order, complexity, and aesthetic appreciation</article_title>
<author>Van Geert</author>
<journal_title>Behavior Research Methods</journal_title>
<doi>10.3758/s13428-022-01900-w</doi>
<cYear>2022</cYear>
<unstructured_citation>Van Geert, E., Bossens, C., &amp;
Wagemans, J. (2022). The order &amp; complexity toolbox for aesthetics
(OCTA): A systematic approach to study the relations between order,
complexity, and aesthetic appreciation. Behavior Research Methods.
https://doi.org/10.3758/s13428-022-01900-w</unstructured_citation>
</citation>
<citation key="makowski2021">
<article_title>A parametric framework to generate visual
illusions using python</article_title>
<author>Makowski</author>
<journal_title>Perception</journal_title>
<issue>11</issue>
<volume>50</volume>
<doi>10.1177/03010066211057347</doi>
<cYear>2021</cYear>
<unstructured_citation>Makowski, D., Lau, Z. J., Pham, T.,
Paul B., W., &amp; Annabel C., S. (2021). A parametric framework to
generate visual illusions using python. Perception, 50(11), 950–965.
https://doi.org/10.1177/03010066211057347</unstructured_citation>
</citation>
<citation key="martin2001">
<article_title>A database of human segmented natural images
and its application to evaluating segmentation algorithms and measuring
ecological statistics</article_title>
<author>Martin</author>
<journal_title>8th IEEE internatinonal conference on
computer vision</journal_title>
<volume>2</volume>
<doi>10.1109/ICCV.2001.937655</doi>
<cYear>2001</cYear>
<unstructured_citation>Martin, D., Fowlkes, C., Tal, D.,
&amp; Malik, J. (2001). A database of human segmented natural images and
its application to evaluating segmentation algorithms and measuring
ecological statistics. 8th IEEE Internatinonal Conference on Computer
Vision, 2, 416–423.
https://doi.org/10.1109/ICCV.2001.937655</unstructured_citation>
</citation>
<citation key="murray2020">
<article_title>A model of lightness perception guided by
probabilistic assumptions about lighting and reflectance</article_title>
<author>Murray</author>
<journal_title>Journal of Vision</journal_title>
<issue>7</issue>
<volume>20</volume>
<doi>10.1167/jov.20.7.28</doi>
<issn>1534-7362</issn>
<cYear>2020</cYear>
<unstructured_citation>Murray, R. F. (2020). A model of
lightness perception guided by probabilistic assumptions about lighting
and reflectance. Journal of Vision, 20(7), 28.
https://doi.org/10.1167/jov.20.7.28</unstructured_citation>
</citation>
<citation key="murray2021">
<article_title>Lightness perception in complex
scenes</article_title>
<author>Murray</author>
<journal_title>Annual Review of Vision
Science</journal_title>
<volume>7</volume>
<doi>10.1146/annurev-vision-093019-115159</doi>
<cYear>2021</cYear>
<unstructured_citation>Murray, R. F. (2021). Lightness
perception in complex scenes. Annual Review of Vision Science, 7.
https://doi.org/10.1146/annurev-vision-093019-115159</unstructured_citation>
</citation>
<citation key="peirce2019">
<article_title>PsychoPy2: Experiments in behavior made
easy</article_title>
<author>Peirce</author>
<journal_title>Behavior research methods</journal_title>
<issue>1</issue>
<volume>51</volume>
<doi>10.3758/s13428-018-01193-y</doi>
<cYear>2019</cYear>
<unstructured_citation>Peirce, J., Gray, J. R., Simpson, S.,
MacAskill, M., Hoechenberger, R., Sogo, H., Kastman, E., &amp; Lindelov,
J. K. (2019). PsychoPy2: Experiments in behavior made easy. Behavior
Research Methods, 51(1), 195–203.
https://doi.org/10.3758/s13428-018-01193-y</unstructured_citation>
</citation>
<citation key="schmittwilken2022a">
<article_title>BRENCH: An open-source framework for
b(r)enchmarking brightness models</article_title>
<author>Schmittwilken</author>
<journal_title>Journal of vision</journal_title>
<volume>22</volume>
<doi>10.1167/jov.22.3.36</doi>
<cYear>2022</cYear>
<unstructured_citation>Schmittwilken, L., Matic, M.,
Maertens, M., &amp; Vincent, J. (2022). BRENCH: An open-source framework
for b(r)enchmarking brightness models [Talk]. Journal of Vision, 22, 36.
https://doi.org/10.1167/jov.22.3.36</unstructured_citation>
</citation>
<citation key="schmittwilken2022b">
<article_title>Fixational eye movements enable robust edge
detection</article_title>
<author>Schmittwilken</author>
<journal_title>Journal of Vision</journal_title>
<issue>8</issue>
<volume>22</volume>
<doi>10.1167/jov.22.8.5</doi>
<cYear>2022</cYear>
<unstructured_citation>Schmittwilken, L., &amp; Maertens, M.
(2022). Fixational eye movements enable robust edge detection. Journal
of Vision, 22(8), 1–12.
https://doi.org/10.1167/jov.22.8.5</unstructured_citation>
</citation>
<citation key="schmittwilken2022c">
<article_title>Medium spatial frequencies mask edges most
effectively</article_title>
<author>Schmittwilken</author>
<journal_title>Journal of vision</journal_title>
<volume>22</volume>
<doi>10.1167/jov.22.14.4041</doi>
<cYear>2022</cYear>
<unstructured_citation>Schmittwilken, L., &amp; Maertens, M.
(2022). Medium spatial frequencies mask edges most effectively [Poster].
Journal of Vision, 22.
https://doi.org/10.1167/jov.22.14.4041</unstructured_citation>
</citation>
<citation key="vincent.maertens.ea2021">
<article_title>Perceptual brightness scales in a White’s
effect stimulus are not captured by multiscale spatial filtering models
of brightness perception</article_title>
<author>Vincent</author>
<journal_title>Journal of Vision</journal_title>
<volume>22</volume>
<doi>10.1167/jov.22.3.20</doi>
<cYear>2022</cYear>
<unstructured_citation>Vincent, J., Maertens, M., &amp;
Aguilar, G. (2022). Perceptual brightness scales in a White’s effect
stimulus are not captured by multiscale spatial filtering models of
brightness perception [Poster]. Journal of Vision, 22, 20.
https://doi.org/10.1167/jov.22.3.20</unstructured_citation>
</citation>
<citation key="vincent.maertens.ea2022">
<article_title>Perceptual Brightness Scales for White’s
Effect Constrain Computational Models of Brightness
Perception</article_title>
<author>Vincent</author>
<journal_title>Journal of Vision</journal_title>
<volume>22</volume>
<doi>10.1167/jov.22.14.4160</doi>
<cYear>2022</cYear>
<unstructured_citation>Vincent, J., Maertens, M., &amp;
Aguilar, G. (2022). Perceptual Brightness Scales for White’s Effect
Constrain Computational Models of Brightness Perception. Journal of
Vision, 22, 4160.
https://doi.org/10.1167/jov.22.14.4160</unstructured_citation>
</citation>
<citation key="vincent.maertens2021">
<article_title>The missing linking functions in
computational models of brightness perception</article_title>
<author>Vincent</author>
<cYear>2021</cYear>
<unstructured_citation>Vincent, J., &amp; Maertens, M.
(2021). The missing linking functions in computational models of
brightness perception [Talk]. OSF. osf.io/9bca7</unstructured_citation>
</citation>
<citation key="vincent.maertens2021a">
<article_title>A history and modular future of multiscale
spatial filtering models</article_title>
<author>Vincent</author>
<journal_title>Journal of Vision</journal_title>
<volume>21</volume>
<doi>10.1167/jov.21.9.2824</doi>
<cYear>2021</cYear>
<unstructured_citation>Vincent, J., &amp; Maertens, M.
(2021). A history and modular future of multiscale spatial filtering
models. Journal of Vision, 21, 2824.
https://doi.org/10.1167/jov.21.9.2824</unstructured_citation>
</citation>
<citation key="wang2008">
<article_title>Maximum differentiation (MAD) competition: A
methodology for comparing computational models of perceptual
quantities</article_title>
<author>Wang</author>
<journal_title>Journal of Vision</journal_title>
<issue>12</issue>
<volume>8</volume>
<doi>10.1167/8.12.8</doi>
<cYear>2008</cYear>
<unstructured_citation>Wang, Z., &amp; Simoncelli, E. P.
(2008). Maximum differentiation (MAD) competition: A methodology for
comparing computational models of perceptual quantities. Journal of
Vision, 8(12), 8–8.
https://doi.org/10.1167/8.12.8</unstructured_citation>
</citation>
</citation_list>
</journal_article>
</journal>
</body>
</doi_batch>
Loading

0 comments on commit ff9c0e9

Please sign in to comment.