{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"Fromm-et-al-2024","owner":"PerForm-Lab-RIT","isFork":false,"description":"Data assocated with: \"Multisensory stimuli facilitate low-level perceptual learning on a difficult global motion task in virtual reality\"","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-23T18:34:22.706Z"}},{"type":"Public","name":"domain-adaptation-eye-tracking","owner":"PerForm-Lab-RIT","isFork":false,"description":"Official Implementation for the paper Deep Domain Adaptation: A Sim2Real Neural Approach for Improving Eye-Tracking System.","allTopics":["deep-learning","neural-networks","domain-adaptation","cycle-gan","domain-generalization","eye-segmentation","contrastive-learning"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":14,"forksCount":1,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-07T23:32:07.319Z"}},{"type":"Public","name":"Interception_UXF_Analysis","owner":"PerForm-Lab-RIT","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":0,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-17T22:32:14.384Z"}},{"type":"Public","name":"Deep-Learning-Eye-Tracking","owner":"PerForm-Lab-RIT","isFork":false,"description":"A compilation of code relevant to \"Using Deep Learning to Increase Eye-Tracking Robustness, Accuracy, and Precision in Virtual Reality\"","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-17T22:04:37.399Z"}},{"type":"Public","name":"pupil-core-pipeline","owner":"PerForm-Lab-RIT","isFork":true,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":1,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-03T21:37:33.580Z"}},{"type":"Public","name":"pupil","owner":"PerForm-Lab-RIT","isFork":true,"description":"Open source eye tracking ","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":672,"license":"GNU Lesser General Public License v3.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-03T21:35:26.744Z"}},{"type":"Public","name":"Pupil-Labs-Core-RITnet-Plugins","owner":"PerForm-Lab-RIT","isFork":false,"description":"Pupil detector plugins for Pupil Labs Core that integrate RITnet models.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":2,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-29T17:24:04.395Z"}},{"type":"Public","name":"retinal_flow_toolkit","owner":"PerForm-Lab-RIT","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":1,"starsCount":0,"forksCount":0,"license":"GNU General Public License v3.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-11-01T14:33:35.469Z"}},{"type":"Public","name":"procedural_bend_in_road","owner":"PerForm-Lab-RIT","isFork":true,"description":"Path creation asset for Unity game development","allTopics":[],"primaryLanguage":{"name":"C#","color":"#178600"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":312,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-04-04T01:53:42.003Z"}},{"type":"Public","name":"CBVR-AttentionExperiment","owner":"PerForm-Lab-RIT","isFork":false,"description":"An attention experiment running in Unity involving motion dot stimuli","allTopics":[],"primaryLanguage":{"name":"C#","color":"#178600"},"pullRequestCount":0,"issueCount":0,"starsCount":2,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-03-18T17:23:43.609Z"}},{"type":"Public","name":"CBVRGame","owner":"PerForm-Lab-RIT","isFork":false,"description":"A game intended to perform visual rehabilitation trials and record data","allTopics":[],"primaryLanguage":{"name":"C#","color":"#178600"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-05-14T01:18:52.104Z"}},{"type":"Public","name":"RITnet","owner":"PerForm-Lab-RIT","isFork":true,"description":"This is a winning model of OpenEDS Semantic Segmentation Challenge","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":27,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-03-02T21:01:00.389Z"}},{"type":"Public archive","name":"StepOver","owner":"PerForm-Lab-RIT","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2016-07-07T18:08:15.177Z"}}],"repositoryCount":13,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"PerForm-Lab-RIT repositories"}