Skip to content
This repository has been archived by the owner on Nov 3, 2021. It is now read-only.

Commit

Permalink
Bug 1042944 - added screen reader functionality to the system accessi…
Browse files Browse the repository at this point in the history
…bility component. r=alive
  • Loading branch information
yzen committed Jul 30, 2014
1 parent 29b4803 commit e1fb6d2
Show file tree
Hide file tree
Showing 7 changed files with 555 additions and 47 deletions.
259 changes: 216 additions & 43 deletions apps/system/js/accessibility.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
* gestures using the hardware buttons of the phone. To toggle the setting.
* the user must press volume up, then volume down three times in a row.
* @class Accessibility
* @requires SettingsListener
*/
function Accessibility() {}

Expand Down Expand Up @@ -66,16 +67,30 @@
* @memberof Accessibility.prototype
*/
settings: {
'accessibility.screenreader': false
'accessibility.screenreader': false,
'audio.volume.content': 15,
'accessibility.screenreader-rate': 0
},

/**
* Speech Synthesis
* Audio used by the screen reader.
* Note: Lazy-loaded when first needed
* @type {Object}
* @memberof Accessibility.prototype
*/
get speechSynthesis() {
return window.speechSynthesis;
sounds: {
clickedAudio: null,
vcKeyAudio: null,
vcMoveAudio: null
},

/**
* URLs for screen reader audio files.
* @type {Object}
*/
soundURLs: {
clickedAudio: './resources/sounds/screen_reader_clicked.ogg',
vcKeyAudio: './resources/sounds/screen_reader_virtual_cursor_key.ogg',
vcMoveAudio: './resources/sounds/screen_reader_virtual_cursor_move.ogg'
},

/**
Expand All @@ -86,13 +101,12 @@
window.addEventListener('mozChromeEvent', this);

// Attach all observers.
for (var settingKey in this.settings) {
/* jshint loopfunc:true */
SettingsListener.observe(settingKey,
this.settings[settingKey], function observe(aValue) {
Object.keys(this.settings).forEach(function attach(settingKey) {
SettingsListener.observe(settingKey, this.settings[settingKey],
function observe(aValue) {
this.settings[settingKey] = aValue;
}.bind(this));
}
}, this);
},

/**
Expand All @@ -119,20 +133,14 @@
},

/**
* Handle a mozChromeEvent event.
* @param {Object} aEvent mozChromeEvent.
* Handle volume up and volume down mozChromeEvents.
* @param {Object} aEvent a mozChromeEvent object.
* @memberof Accessibility.prototype
*/
handleEvent: function ar_handleEvent(aEvent) {
handleVolumeButtonPress: function ar_handleVolumeButtonPress(aEvent) {
var type = aEvent.detail.type;
var timeStamp = aEvent.timeStamp;
var expectedEvent = this.expectedEvent;

if (type !== 'volume-up-button-press' &&
type !== 'volume-down-button-press') {
return;
}

if (type !== expectedEvent.type || timeStamp > expectedEvent.timeStamp) {
this.reset();
if (type !== 'volume-up-button-press') {
Expand All @@ -153,14 +161,14 @@
this.reset();

if (!this.isSpeaking && timeStamp > this.expectedCompleteTimeStamp) {
this.speechSynthesis.cancel();
speechSynthesizer.cancel();
this.announceScreenReader(function onEnd() {
this.resetSpeaking(timeStamp + this.REPEAT_BUTTON_PRESS);
}.bind(this));
return;
}

this.speechSynthesis.cancel();
speechSynthesizer.cancel();
this.resetSpeaking();
SettingsListener.getSettingsLock().set({
'accessibility.screenreader':
Expand All @@ -169,30 +177,59 @@
},

/**
* Utter a message with a screen reader.
* XXX: This will need to be moved to the upcoming accessibility app.
* @param {String} message A message key to be localized.
* @param {Boolean} enqueue A flag to enqueue the message.
* @param {Function} aCallback A callback after the speech synthesis is
* completed.
* @memberof Accessibility.prototype
* Get audio for a screen reader notification.
* @param {String} aSoundKey a key for the screen reader audio.
* @return {Object} Audio object to be played.
*/
utter: function ar_utter(aMessage, aEnqueue, aCallback) {
if (!this.speechSynthesis || !window.SpeechSynthesisUtterance) {
if (aCallback) {
aCallback();
}
return;
_getSound: function ar__getSound(aSoundKey) {
if (!this.sounds[aSoundKey]) {
this.sounds[aSoundKey] = new Audio(this.soundURLs[aSoundKey]);
}
if (!aEnqueue) {
this.speechSynthesis.cancel();
return this.sounds[aSoundKey];
},

/**
* Handle accessfu mozChromeEvent.
* @param {Object} accessfu details object.
* @memberof Accessibility.prototype
*/
handleAccessFuOutput: function ar_handleAccessFuOutput(aDetails) {
var options = aDetails.options || {};
switch (aDetails.eventType) {
case 'vc-change':
// Vibrate when the virtual cursor changes.
navigator.vibrate(options.pattern);
this._getSound(options.isKey ? 'vcKeyAudio' : 'vcMoveAudio').play();
break;
case 'action':
if (aDetails.data[0].string === 'clickAction') {
// If element is clicked, play 'click' sound instead of speech.
this._getSound('clickedAudio').play();
return;
}
break;
}
var utterance = new window.SpeechSynthesisUtterance(navigator.mozL10n.get(
aMessage));
if (aCallback) {
utterance.addEventListener('end', aCallback);

this.speak(aDetails.data, null, {
enqueue: options.enqueue
});
},

/**
* Handle a mozChromeEvent event.
* @param {Object} aEvent mozChromeEvent.
* @memberof Accessibility.prototype
*/
handleEvent: function ar_handleEvent(aEvent) {
switch (aEvent.detail.type) {
case 'accessfu-output':
this.handleAccessFuOutput(JSON.parse(aEvent.detail.details));
break;
case 'volume-up-button-press':
case 'volume-down-button-press':
this.handleVolumeButtonPress(aEvent);
break;
}
this.speechSynthesis.speak(utterance);
},

/**
Expand All @@ -205,8 +242,144 @@
announceScreenReader: function ar_announceScreenReader(aCallback) {
var enabled = this.settings['accessibility.screenreader'];
this.isSpeaking = true;
this.utter(enabled ? 'disableScreenReaderSteps' :
'enableScreenReaderSteps', false, aCallback);
this.speak({
string: enabled ? 'disableScreenReaderSteps' : 'enableScreenReaderSteps'
}, aCallback, {enqueue: false});
},

/**
* Use speechSynthesis to speak screen reader utterances.
* @param {?Array} aData Speech data before it is localized.
* @param {?Function} aCallback aCallback A callback after the speech
* synthesis is completed.
* @param {?Object} aOptions = {} Speech options such as enqueue etc.
* @memberof Accessibility.prototype
*/
speak: function ar_speak(aData, aCallback, aOptions = {}) {
speechSynthesizer.speak(aData, aOptions,
this.settings['accessibility.screenreader-rate'],
this.settings['audio.volume.content'] / 15, aCallback);
}
};

/**
* A speech synthesizer component that handles speech localization and
* pronunciation.
* @type {Object}
*/
var speechSynthesizer = {
/**
* Speech Synthesis
* @type {Object}
* @memberof speechSynthesizer
*/
get speech() {
return window.speechSynthesis;
},

/**
* Speech utterance
* @type {Object}
* @memberof speechSynthesizer
*/
get utterance() {
return window.SpeechSynthesisUtterance;
},

/**
* Cancle speech if the screen reader is speaking.
* @memberof speechSynthesizer
*/
cancel: function ss_cancel() {
if (this.speech) {
this.speech.cancel();
}
},

/**
* Localize speech data.
* @param {Object} aDetails Speech data object.
* @return {String} Localized speech data.
* @memberof speechSynthesizer
*/
localize: function ss_localize(aDetails) {
if (!aDetails || typeof aDetails === 'string') {
return aDetails;
}
var string = aDetails.string;
var data = {
count: aDetails.count
};
if (!string) {
return '';
} else {
string = 'accessibility-' + string;
}

if (aDetails.args) {
data = aDetails.args.reduce(function(aData, val, index) {
aData[index] = val;
return aData;
}, data);
}
return navigator.mozL10n.get(string, data);
},

/**
* Build a complete utterance string by localizing an array of speech data.
* @param {?Array} aData Speech data.
* @return {String} A complete localized string from speech array data.
* @memberof speechSynthesizer
*/
buildUtterance: function ss_buildUtterance(aData) {
if (!Array.isArray(aData)) {
aData = [aData];
}
var words = [], localize = this.localize;
aData.reduce(function(words, details) {
var localized = localize(details);
if (localized) {
var word = localized.trim();
if (word) {
words.push(word);
}
}
return words;
}, words);

return words.join(' ');
},

/**
* Utter a message with a speechSynthesizer.
* @param {?Array} aData A messages array to be localized.
* @param {JSON} aOptions Options to be used when speaking. For
* example: {
* enqueue: false
* }
* @param {Number} aRate Speech rate.
* @param {Number} aVolume Speech volume.
* @param {Function} aCallback A callback after the speech synthesis is
* completed.
* @memberof speechSynthesizer
*/
speak: function ss_speak(aData, aOptions, aRate, aVolume, aCallback) {
if (!this.speech || !this.utterance) {
if (aCallback) {
aCallback();
}
return;
}

if (!aOptions.enqueue) {
this.cancel();
}

var utterance = new this.utterance(this.buildUtterance(aData));
utterance.volume = aVolume;
utterance.rate = aRate >= 0 ? aRate + 1 : 1 / (Math.abs(aRate) + 1);
utterance.addEventListener('end', aCallback);
this.speech.speak(utterance);
}
};

Expand Down

0 comments on commit e1fb6d2

Please sign in to comment.