1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
6 * @fileoverview Accesses Chrome's accessibility extension API and gives
7 * spoken feedback for events that happen in the "Chrome of Chrome".
11 goog.provide('cvox.AccessibilityApiHandler');
13 goog.require('cvox.AbstractEarcons');
14 goog.require('cvox.AbstractTts');
15 goog.require('cvox.BrailleInterface');
16 goog.require('cvox.BrailleUtil');
17 goog.require('cvox.ChromeVoxEditableTextBase');
18 goog.require('cvox.NavBraille');
22 * The chrome.experimental.accessibility API is moving to
23 * chrome.accessibilityPrivate, so provide an alias during the transition.
25 * TODO(dmazzoni): Remove after the stable version of Chrome no longer
26 * has the experimental accessibility API.
28 chrome.experimental = chrome.experimental || {};
30 * Fall back on the experimental API if the new name is not available.
32 chrome.accessibilityPrivate = chrome.accessibilityPrivate ||
33 chrome.experimental.accessibility;
37 * Class that adds listeners and handles events from the accessibility API.
39 * @implements {cvox.TtsCapturingEventListener}
40 * @param {cvox.TtsInterface} tts The TTS to use for speaking.
41 * @param {cvox.BrailleInterface} braille The braille interface to use for
43 * @param {Object} earcons The earcons object to use for playing
46 cvox.AccessibilityApiHandler = function(tts, braille, earcons) {
48 this.braille = braille;
49 this.earcons = earcons;
51 * Tracks the previous description received.
55 this.prevDescription_ = {};
57 * Array of strings to speak the next time TTS is idle.
58 * @type {!Array.<string>}
61 this.idleSpeechQueue_ = [];
64 chrome.accessibilityPrivate.setAccessibilityEnabled(true);
65 chrome.accessibilityPrivate.setNativeAccessibilityEnabled(
66 !cvox.ChromeVox.isActive);
67 this.addEventListeners_();
68 if (cvox.ChromeVox.isActive) {
69 this.queueAlertsForActiveTab();
72 console.log('Error trying to access accessibility extension api.');
77 * The interface used to manage speech.
78 * @type {cvox.TtsInterface}
80 cvox.AccessibilityApiHandler.prototype.tts = null;
83 * The interface used to manage braille.
84 * @type {cvox.BrailleInterface}
86 cvox.AccessibilityApiHandler.prototype.braille = null;
89 * The object used to manage arcons.
92 cvox.AccessibilityApiHandler.prototype.earcons = null;
95 * The object that can describe changes and cursor movement in a generic
96 * editable text field.
99 cvox.AccessibilityApiHandler.prototype.editableTextHandler = null;
102 * The name of the editable text field associated with
103 * |editableTextHandler|, so we can tell when focus moves.
106 cvox.AccessibilityApiHandler.prototype.editableTextName = '';
109 * The queue mode for the next focus event.
112 cvox.AccessibilityApiHandler.prototype.nextQueueMode = 0;
115 * The timeout id for the pending text changed event - the return
116 * value from window.setTimeout. We need to delay text events slightly
117 * and return only the last one because sometimes we get a rapid
118 * succession of related events that should all be considered one
119 * bulk change - in particular, autocomplete in the location bar comes
120 * as multiple events in a row.
123 cvox.AccessibilityApiHandler.prototype.textChangeTimeout = null;
126 * Most controls have a "context" - the name of the window, dialog, toolbar,
127 * or menu they're contained in. We announce a context once, when you
128 * first enter it - and we don't announce it again when you move to something
129 * else within the same context. This variable keeps track of the most
133 cvox.AccessibilityApiHandler.prototype.lastContext = null;
136 * Delay in ms between when a text event is received and when it's spoken.
140 cvox.AccessibilityApiHandler.prototype.TEXT_CHANGE_DELAY = 10;
143 * ID returned from setTimeout to queue up speech on idle.
147 cvox.AccessibilityApiHandler.prototype.idleSpeechTimeout_ = null;
150 * Milliseconds of silence to wait before considering speech to be idle.
153 cvox.AccessibilityApiHandler.prototype.IDLE_SPEECH_DELAY_MS = 500;
156 * Called to let us know that the last speech came from web, and not from
157 * native UI. Clear the context and any state associated with the last
160 cvox.AccessibilityApiHandler.prototype.setWebContext = function() {
161 // This will never be spoken - it's just supposed to be a string that
162 // won't match the context of the next control that gets focused.
163 this.lastContext = '--internal-web--';
164 this.editableTextHandler = null;
165 this.editableTextName = '';
169 * Adds event listeners.
172 cvox.AccessibilityApiHandler.prototype.addEventListeners_ = function() {
173 /** Alias getMsg as msg. */
174 var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
176 var accessibility = chrome.accessibilityPrivate;
178 chrome.tabs.onCreated.addListener(goog.bind(function(tab) {
179 if (!cvox.ChromeVox.isActive) {
182 this.tts.speak(msg('chrome_tab_created'),
184 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
185 this.braille.write(cvox.NavBraille.fromText(msg('chrome_tab_created')));
186 this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
189 chrome.tabs.onRemoved.addListener(goog.bind(function(tab) {
190 if (!cvox.ChromeVox.isActive) {
193 this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_CLOSE);
196 chrome.tabs.onActivated.addListener(goog.bind(function(activeInfo) {
197 if (!cvox.ChromeVox.isActive) {
200 chrome.tabs.get(activeInfo.tabId, goog.bind(function(tab) {
201 if (tab.status == 'loading') {
204 var title = tab.title ? tab.title : tab.url;
205 this.tts.speak(msg('chrome_tab_selected',
207 cvox.AbstractTts.QUEUE_MODE_FLUSH,
208 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
210 cvox.NavBraille.fromText(msg('chrome_tab_selected', [title])));
211 this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_SELECT);
212 this.queueAlertsForActiveTab();
216 chrome.tabs.onUpdated.addListener(goog.bind(function(tabId, selectInfo) {
217 if (!cvox.ChromeVox.isActive) {
220 chrome.tabs.get(tabId, goog.bind(function(tab) {
224 if (tab.status == 'loading') {
225 this.earcons.playEarcon(cvox.AbstractEarcons.BUSY_PROGRESS_LOOP);
227 this.earcons.playEarcon(cvox.AbstractEarcons.TASK_SUCCESS);
232 chrome.windows.onFocusChanged.addListener(goog.bind(function(windowId) {
233 if (!cvox.ChromeVox.isActive) {
236 if (windowId == chrome.windows.WINDOW_ID_NONE) {
239 chrome.windows.get(windowId, goog.bind(function(window) {
240 chrome.tabs.getSelected(windowId, goog.bind(function(tab) {
241 var msgId = window.incognito ? 'chrome_incognito_window_selected' :
242 'chrome_normal_window_selected';
243 var title = tab.title ? tab.title : tab.url;
244 this.tts.speak(msg(msgId, [title]),
245 cvox.AbstractTts.QUEUE_MODE_FLUSH,
246 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
247 this.braille.write(cvox.NavBraille.fromText(msg(msgId, [title])));
248 this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_SELECT);
253 chrome.accessibilityPrivate.onWindowOpened.addListener(
254 goog.bind(function(win) {
255 if (!cvox.ChromeVox.isActive) {
258 this.tts.speak(win.name,
259 cvox.AbstractTts.QUEUE_MODE_FLUSH,
260 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
261 this.braille.write(cvox.NavBraille.fromText(win.name));
262 // Queue the next utterance because a window opening is always followed
264 this.nextQueueMode = 1;
265 this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
266 this.queueAlertsForActiveTab();
269 chrome.accessibilityPrivate.onWindowClosed.addListener(
270 goog.bind(function(win) {
271 if (!cvox.ChromeVox.isActive) {
274 // Don't speak, just play the earcon.
275 this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_CLOSE);
278 chrome.accessibilityPrivate.onMenuOpened.addListener(
279 goog.bind(function(menu) {
280 if (!cvox.ChromeVox.isActive) {
283 this.tts.speak(msg('chrome_menu_opened', [menu.name]),
284 cvox.AbstractTts.QUEUE_MODE_FLUSH,
285 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
287 cvox.NavBraille.fromText(msg('chrome_menu_opened', [menu.name])));
288 this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
291 chrome.accessibilityPrivate.onMenuClosed.addListener(
292 goog.bind(function(menu) {
293 if (!cvox.ChromeVox.isActive) {
296 // Don't speak, just play the earcon.
297 this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_CLOSE);
300 // systemPrivate API is only available when this extension is loaded as a
301 // component extension embedded in Chrome.
302 chrome.permissions.contains(
303 { permissions: ['systemPrivate'] },
304 goog.bind(function(result) {
309 // TODO(plundblad): Remove when the native sound is turned on by default.
310 // See crbug.com:225886.
311 var addOnVolumeChangedListener = goog.bind(function() {
312 chrome.systemPrivate.onVolumeChanged.addListener(goog.bind(
314 if (!cvox.ChromeVox.isActive) {
317 // Don't speak, just play the earcon.
318 this.earcons.playEarcon(cvox.AbstractEarcons.TASK_SUCCESS);
321 if (chrome.commandLinePrivate) {
322 chrome.commandLinePrivate.hasSwitch('disable-volume-adjust-sound',
323 goog.bind(function(result) {
325 addOnVolumeChangedListener();
329 addOnVolumeChangedListener();
332 chrome.systemPrivate.onBrightnessChanged.addListener(
335 * @param {{brightness: number, userInitiated: boolean}} brightness
337 function(brightness) {
338 if (brightness.userInitiated) {
339 this.earcons.playEarcon(cvox.AbstractEarcons.TASK_SUCCESS);
341 msg('chrome_brightness_changed', [brightness.brightness]),
342 cvox.AbstractTts.QUEUE_MODE_FLUSH,
343 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
344 this.braille.write(cvox.NavBraille.fromText(
345 msg('chrome_brightness_changed', [brightness.brightness])));
349 chrome.systemPrivate.onScreenUnlocked.addListener(goog.bind(function() {
350 chrome.systemPrivate.getUpdateStatus(goog.bind(function(status) {
351 if (!cvox.ChromeVox.isActive) {
354 // Speak about system update when it's ready, otherwise speak nothing.
355 if (status.state == 'NeedRestart') {
356 this.tts.speak(msg('chrome_system_need_restart'),
357 cvox.AbstractTts.QUEUE_MODE_FLUSH,
358 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
360 cvox.NavBraille.fromText(msg('chrome_system_need_restart')));
365 chrome.systemPrivate.onWokeUp.addListener(goog.bind(function() {
366 if (!cvox.ChromeVox.isActive) {
369 // Don't speak, just play the earcon.
370 this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
374 chrome.accessibilityPrivate.onControlFocused.addListener(
375 goog.bind(this.onControlFocused, this));
377 chrome.accessibilityPrivate.onControlAction.addListener(
378 goog.bind(function(ctl) {
379 if (!cvox.ChromeVox.isActive) {
383 var description = this.describe(ctl, true);
384 this.tts.speak(description.utterance,
385 cvox.AbstractTts.QUEUE_MODE_FLUSH,
386 description.ttsProps);
387 description.braille.write();
388 if (description.earcon) {
389 this.earcons.playEarcon(description.earcon);
394 chrome.accessibilityPrivate.onControlHover.addListener(
395 goog.bind(function(ctl) {
396 if (!cvox.ChromeVox.isActive) {
400 var hasTouch = 'ontouchstart' in window;
405 var description = this.describe(ctl, false);
406 this.tts.speak(description.utterance,
407 cvox.AbstractTts.QUEUE_MODE_FLUSH,
408 description.ttsProps);
409 description.braille.write();
410 if (description.earcon) {
411 this.earcons.playEarcon(description.earcon);
416 chrome.accessibilityPrivate.onTextChanged.addListener(
417 goog.bind(function(ctl) {
418 if (!cvox.ChromeVox.isActive) {
422 if (!this.editableTextHandler ||
423 this.editableTextName != ctl.name ||
424 this.lastContext != ctl.context) {
425 // Chrome won't send a text change event on a control that isn't
426 // focused. If we get a text change event and it doesn't match the
427 // focused control, treat it as a focus event initially.
428 this.onControlFocused(ctl);
432 // Only send the most recent text changed event - throw away anything
434 if (this.textChangeTimeout) {
435 window.clearTimeout(this.textChangeTimeout);
438 // Handle the text change event after a small delay, so multiple
439 // events in rapid succession are handled as a single change. This is
440 // specifically for the location bar with autocomplete - typing a
441 // character and getting the autocompleted text and getting that
442 // text selected may be three separate events.
443 this.textChangeTimeout = window.setTimeout(
444 goog.bind(function() {
445 var textChangeEvent = new cvox.TextChangeEvent(
447 ctl.details.selectionStart,
448 ctl.details.selectionEnd,
449 true); // triggered by user
450 this.editableTextHandler.changed(
452 this.describe(ctl, false).braille.write();
453 }, this), this.TEXT_CHANGE_DELAY);
456 this.tts.addCapturingEventListener(this);
460 * Handle the feedback when a new control gets focus.
461 * @param {AccessibilityObject} ctl The focused control.
463 cvox.AccessibilityApiHandler.prototype.onControlFocused = function(ctl) {
464 if (!cvox.ChromeVox.isActive) {
468 // Call this first because it may clear this.editableTextHandler.
469 var description = this.describe(ctl, false);
471 if (ctl.type == 'textbox') {
472 var start = ctl.details.selectionStart;
473 var end = ctl.details.selectionEnd;
475 start = ctl.details.selectionEnd;
476 end = ctl.details.selectionStart;
478 this.editableTextName = ctl.name;
479 this.editableTextHandler =
480 new cvox.ChromeVoxEditableTextBase(
484 ctl.details.isPassword,
487 this.editableTextHandler = null;
490 this.tts.speak(description.utterance,
492 description.ttsProps);
493 description.braille.write();
494 this.nextQueueMode = 0;
495 if (description.earcon) {
496 this.earcons.playEarcon(description.earcon);
501 * Called when any speech starts.
503 cvox.AccessibilityApiHandler.prototype.onTtsStart = function() {
504 if (this.idleSpeechTimeout_) {
505 window.clearTimeout(this.idleSpeechTimeout_);
510 * Called when any speech ends.
512 cvox.AccessibilityApiHandler.prototype.onTtsEnd = function() {
513 if (this.idleSpeechQueue_.length > 0) {
514 this.idleSpeechTimeout_ = window.setTimeout(
515 goog.bind(this.onTtsIdle, this),
516 this.IDLE_SPEECH_DELAY_MS);
521 * Called when speech has been idle for a certain minimum delay.
522 * Speaks queued messages.
524 cvox.AccessibilityApiHandler.prototype.onTtsIdle = function() {
525 if (this.idleSpeechQueue_.length == 0) {
528 var utterance = this.idleSpeechQueue_.shift();
529 var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
530 this.tts.speak(utterance,
531 cvox.AbstractTts.QUEUE_MODE_FLUSH,
532 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
536 * Given a control received from the accessibility api, determine an
537 * utterance to speak, text to braille, and an earcon to play to describe it.
538 * @param {Object} control The control that had an action performed on it.
539 * @param {boolean} isSelect True if the action is a select action,
540 * otherwise it's a focus action.
541 * @return {Object} An object containing a string field |utterance|, object
542 * |ttsProps|, |braille|, and earcon |earcon|.
544 cvox.AccessibilityApiHandler.prototype.describe = function(control, isSelect) {
545 /** Alias getMsg as msg. */
546 var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
550 var ttsProps = cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT;
552 var context = control.context;
553 if (context && context != this.lastContext) {
555 this.lastContext = context;
556 this.editableTextHandler = null;
559 var earcon = undefined;
560 var name = control.name.replace(/[_&]+/g, '').replace('...', '');
561 braille.name = control.name;
562 switch (control.type) {
564 braille.roleMsg = 'input_type_checkbox';
565 if (control.details.isChecked) {
566 earcon = cvox.AbstractEarcons.CHECK_ON;
567 s += msg('describe_checkbox_checked', [name]);
568 braille.state = msg('checkbox_checked_state_brl');
570 earcon = cvox.AbstractEarcons.CHECK_OFF;
571 s += msg('describe_checkbox_unchecked', [name]);
572 braille.state = msg('checkbox_unchecked_state_brl');
577 braille.roleMsg = 'input_type_radio';
578 if (control.details.isChecked) {
579 earcon = cvox.AbstractEarcons.CHECK_ON;
580 s += msg('describe_radio_selected', [name]);
581 braille.state = msg('radio_selected_state_brl');
583 earcon = cvox.AbstractEarcons.CHECK_OFF;
584 s += msg('describe_radio_unselected', [name]);
585 braille.state = msg('radio_unselected_state_brl');
589 s += msg('describe_menu', [name]);
590 braille.roleMsg = 'aria_role_menu';
594 control.details.hasSubmenu ?
595 'describe_menu_item_with_submenu' : 'describe_menu_item', [name]);
596 braille.roleMsg = 'aria_role_menuitem';
597 if (control.details.hasSubmenu) {
598 braille.state = msg('aria_has_submenu_brl');
602 s += msg('describe_window', [name]);
603 // No specialization for braille.
607 earcon = cvox.AbstractEarcons.ALERT_NONMODAL;
608 s += msg('aria_role_alert') + ': ' + name;
609 ttsProps = cvox.AbstractTts.PERSONALITY_SYSTEM_ALERT;
610 braille.roleMsg = 'aria_role_alert';
614 earcon = cvox.AbstractEarcons.EDITABLE_TEXT;
615 var unnamed = name == '' ? 'unnamed_' : '';
617 if (control.details.isPassword) {
619 braille.roleMsg = 'input_type_password';
620 value = control.details.value.replace(/./g, '*');
623 braille.roleMsg = 'input_type_text';
624 value = control.details.value;
626 s += msg('describe_' + unnamed + type, [value, name]);
627 braille.value = cvox.BrailleUtil.createValue(
628 value, control.details.selectionStart, control.details.selectionEnd);
631 earcon = cvox.AbstractEarcons.BUTTON;
632 s += msg('describe_button', [name]);
633 braille.roleMsg = 'tag_button';
640 earcon = cvox.AbstractEarcons.LISTBOX;
641 var unnamed = name == '' ? 'unnamed_' : '';
642 s += msg('describe_' + unnamed + control.type,
643 [control.details.value, name]);
644 braille.roleMsg = 'tag_select';
647 earcon = cvox.AbstractEarcons.LINK;
648 s += msg('describe_link', [name]);
649 braille.roleMsg = 'tag_link';
652 s += msg('describe_tab', [name]);
653 braille.roleMsg = 'aria_role_tab';
656 s += msg('describe_slider', [control.details.stringValue, name]);
657 braille.value = cvox.BrailleUtil.createValue(control.details.stringValue);
658 braille.roleMsg = 'aria_role_slider';
661 if (this.prevDescription_ &&
662 this.prevDescription_.details &&
663 goog.isDef(control.details.itemDepth) &&
664 this.prevDescription_.details.itemDepth !=
665 control.details.itemDepth) {
666 s += msg('describe_depth', [control.details.itemDepth]);
668 s += name + ' ' + msg('aria_role_treeitem');
669 s += control.details.isItemExpanded ?
670 msg('aria_expanded_true') : msg('aria_expanded_false');
672 braille.name = Array(control.details.itemDepth).join(' ') + braille.name;
673 braille.roleMsg = 'aria_role_treeitem';
674 braille.state = control.details.isItemExpanded ?
675 msg('aria_expanded_true_brl') : msg('aria_expanded_false_brl');
679 s += name + ', ' + control.type;
680 braille.role = control.type;
683 if (isSelect && control.type != 'slider') {
684 s += msg('describe_selected');
686 if (control.details && control.details.itemCount >= 0) {
687 s += msg('describe_index',
688 [control.details.itemIndex + 1, control.details.itemCount]);
689 braille.state = braille.state ? braille.state + ' ' : '';
690 braille.state += msg('LIST_POSITION_BRL',
691 [control.details.itemIndex + 1, control.details.itemCount]);
694 var description = {};
695 description.utterance = s;
696 description.ttsProps = ttsProps;
697 var spannable = cvox.BrailleUtil.getTemplated(null, null, braille);
698 var valueSelectionSpan = spannable.getSpanInstanceOf(
699 cvox.BrailleUtil.ValueSelectionSpan);
700 var brailleObj = {text: spannable};
701 if (valueSelectionSpan) {
702 brailleObj.startIndex = spannable.getSpanStart(valueSelectionSpan);
703 brailleObj.endIndex = spannable.getSpanEnd(valueSelectionSpan);
705 description.braille = new cvox.NavBraille(brailleObj);
706 description.earcon = earcon;
707 this.prevDescription_ = control;
712 * Queues alerts for the active tab, if any, which will be spoken
713 * as soon as speech is idle.
715 cvox.AccessibilityApiHandler.prototype.queueAlertsForActiveTab = function() {
716 this.idleSpeechQueue_.length = 0;
717 var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
719 chrome.tabs.query({'active': true, 'currentWindow': true},
720 goog.bind(function(tabs) {
721 if (tabs.length < 1) {
724 chrome.accessibilityPrivate.getAlertsForTab(
725 tabs[0].id, goog.bind(function(alerts) {
726 if (alerts.length == 0) {
732 if (alerts.length == 1) {
733 utterance += msg('page_has_one_alert_singular');
735 utterance += msg('page_has_alerts_plural',
739 for (var i = 0; i < alerts.length; i++) {
740 utterance += ' ' + alerts[i].message;
743 utterance += ' ' + msg('review_alerts');
745 if (this.idleSpeechQueue_.indexOf(utterance) == -1) {
746 this.idleSpeechQueue_.push(utterance);