• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2014 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5/**
6 * @fileoverview Accesses Chrome's accessibility extension API and gives
7 * spoken feedback for events that happen in the "Chrome of Chrome".
8 *
9 */
10
11goog.provide('cvox.AccessibilityApiHandler');
12
13goog.require('cvox.AbstractEarcons');
14goog.require('cvox.AbstractTts');
15goog.require('cvox.BrailleInterface');
16goog.require('cvox.BrailleUtil');
17goog.require('cvox.ChromeVoxEditableTextBase');
18goog.require('cvox.NavBraille');
19
20
21/**
22 * The chrome.experimental.accessibility API is moving to
23 * chrome.accessibilityPrivate, so provide an alias during the transition.
24 *
25 * TODO(dmazzoni): Remove after the stable version of Chrome no longer
26 * has the experimental accessibility API.
27 */
28chrome.experimental = chrome.experimental || {};
29/**
30 * Fall back on the experimental API if the new name is not available.
31 */
32chrome.accessibilityPrivate = chrome.accessibilityPrivate ||
33    chrome.experimental.accessibility;
34
35
36/**
37 * Class that adds listeners and handles events from the accessibility API.
38 * @constructor
39 * @implements {cvox.TtsCapturingEventListener}
40 * @param {cvox.TtsInterface} tts The TTS to use for speaking.
41 * @param {cvox.BrailleInterface} braille The braille interface to use for
42 * brailing.
43 * @param {Object} earcons The earcons object to use for playing
44 *        earcons.
45 */
46cvox.AccessibilityApiHandler = function(tts, braille, earcons) {
47  this.tts = tts;
48  this.braille = braille;
49  this.earcons = earcons;
50  /**
51   * Tracks the previous description received.
52   * @type {Object}
53   * @private
54   */
55  this.prevDescription_ = {};
56  /**
57   * Array of strings to speak the next time TTS is idle.
58   * @type {!Array.<string>}
59   * @private
60   */
61  this.idleSpeechQueue_ = [];
62
63  try {
64    chrome.accessibilityPrivate.setAccessibilityEnabled(true);
65    chrome.accessibilityPrivate.setNativeAccessibilityEnabled(
66        !cvox.ChromeVox.isActive);
67    this.addEventListeners_();
68    if (cvox.ChromeVox.isActive) {
69      this.queueAlertsForActiveTab();
70    }
71  } catch (err) {
72    console.log('Error trying to access accessibility extension api.');
73  }
74};
75
76/**
77 * The interface used to manage speech.
78 * @type {cvox.TtsInterface}
79 */
80cvox.AccessibilityApiHandler.prototype.tts = null;
81
82/**
83 * The interface used to manage braille.
84 * @type {cvox.BrailleInterface}
85 */
86cvox.AccessibilityApiHandler.prototype.braille = null;
87
88/**
89 * The object used to manage arcons.
90 * @type Object
91 */
92cvox.AccessibilityApiHandler.prototype.earcons = null;
93
94/**
95 * The object that can describe changes and cursor movement in a generic
96 *     editable text field.
97 * @type {Object}
98 */
99cvox.AccessibilityApiHandler.prototype.editableTextHandler = null;
100
101/**
102 * The name of the editable text field associated with
103 * |editableTextHandler|, so we can tell when focus moves.
104 * @type {string}
105 */
106cvox.AccessibilityApiHandler.prototype.editableTextName = '';
107
108/**
109 * The queue mode for the next focus event.
110 * @type {number}
111 */
112cvox.AccessibilityApiHandler.prototype.nextQueueMode = 0;
113
114/**
115 * The timeout id for the pending text changed event - the return
116 * value from window.setTimeout. We need to delay text events slightly
117 * and return only the last one because sometimes we get a rapid
118 * succession of related events that should all be considered one
119 * bulk change - in particular, autocomplete in the location bar comes
120 * as multiple events in a row.
121 * @type {?number}
122 */
123cvox.AccessibilityApiHandler.prototype.textChangeTimeout = null;
124
125/**
126 * Most controls have a "context" - the name of the window, dialog, toolbar,
127 * or menu they're contained in. We announce a context once, when you
128 * first enter it - and we don't announce it again when you move to something
129 * else within the same context. This variable keeps track of the most
130 * recent context.
131 * @type {?string}
132 */
133cvox.AccessibilityApiHandler.prototype.lastContext = null;
134
135/**
136 * Delay in ms between when a text event is received and when it's spoken.
137 * @type {number}
138 * @const
139 */
140cvox.AccessibilityApiHandler.prototype.TEXT_CHANGE_DELAY = 10;
141
142/**
143 * ID returned from setTimeout to queue up speech on idle.
144 * @type {?number}
145 * @private
146 */
147cvox.AccessibilityApiHandler.prototype.idleSpeechTimeout_ = null;
148
149/**
150 * Milliseconds of silence to wait before considering speech to be idle.
151 * @const
152 */
153cvox.AccessibilityApiHandler.prototype.IDLE_SPEECH_DELAY_MS = 500;
154
155/**
156 * Called to let us know that the last speech came from web, and not from
157 * native UI. Clear the context and any state associated with the last
158 * focused control.
159 */
160cvox.AccessibilityApiHandler.prototype.setWebContext = function() {
161  // This will never be spoken - it's just supposed to be a string that
162  // won't match the context of the next control that gets focused.
163  this.lastContext = '--internal-web--';
164  this.editableTextHandler = null;
165  this.editableTextName = '';
166};
167
168/**
169 * Adds event listeners.
170 * @private
171 */
172cvox.AccessibilityApiHandler.prototype.addEventListeners_ = function() {
173  /** Alias getMsg as msg. */
174  var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
175
176  var accessibility = chrome.accessibilityPrivate;
177
178  chrome.tabs.onCreated.addListener(goog.bind(function(tab) {
179    if (!cvox.ChromeVox.isActive) {
180      return;
181    }
182    this.tts.speak(msg('chrome_tab_created'),
183                   0,
184                   cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
185    this.braille.write(cvox.NavBraille.fromText(msg('chrome_tab_created')));
186    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
187  }, this));
188
189  chrome.tabs.onRemoved.addListener(goog.bind(function(tab) {
190    if (!cvox.ChromeVox.isActive) {
191      return;
192    }
193    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_CLOSE);
194  }, this));
195
196  chrome.tabs.onActivated.addListener(goog.bind(function(activeInfo) {
197    if (!cvox.ChromeVox.isActive) {
198      return;
199    }
200    chrome.tabs.get(activeInfo.tabId, goog.bind(function(tab) {
201      if (tab.status == 'loading') {
202        return;
203      }
204      var title = tab.title ? tab.title : tab.url;
205      this.tts.speak(msg('chrome_tab_selected',
206                         [title]),
207                     cvox.AbstractTts.QUEUE_MODE_FLUSH,
208                     cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
209      this.braille.write(
210          cvox.NavBraille.fromText(msg('chrome_tab_selected', [title])));
211      this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_SELECT);
212      this.queueAlertsForActiveTab();
213    }, this));
214  }, this));
215
216  chrome.tabs.onUpdated.addListener(goog.bind(function(tabId, selectInfo) {
217    if (!cvox.ChromeVox.isActive) {
218      return;
219    }
220    chrome.tabs.get(tabId, goog.bind(function(tab) {
221      if (!tab.active) {
222        return;
223      }
224      if (tab.status == 'loading') {
225        this.earcons.playEarcon(cvox.AbstractEarcons.BUSY_PROGRESS_LOOP);
226      } else {
227        this.earcons.playEarcon(cvox.AbstractEarcons.TASK_SUCCESS);
228      }
229    }, this));
230  }, this));
231
232  chrome.windows.onFocusChanged.addListener(goog.bind(function(windowId) {
233    if (!cvox.ChromeVox.isActive) {
234      return;
235    }
236    if (windowId == chrome.windows.WINDOW_ID_NONE) {
237      return;
238    }
239    chrome.windows.get(windowId, goog.bind(function(window) {
240      chrome.tabs.getSelected(windowId, goog.bind(function(tab) {
241        var msgId = window.incognito ? 'chrome_incognito_window_selected' :
242          'chrome_normal_window_selected';
243        var title = tab.title ? tab.title : tab.url;
244        this.tts.speak(msg(msgId, [title]),
245                       cvox.AbstractTts.QUEUE_MODE_FLUSH,
246                       cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
247        this.braille.write(cvox.NavBraille.fromText(msg(msgId, [title])));
248        this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_SELECT);
249      }, this));
250    }, this));
251  }, this));
252
253  chrome.accessibilityPrivate.onWindowOpened.addListener(
254      goog.bind(function(win) {
255    if (!cvox.ChromeVox.isActive) {
256      return;
257    }
258    this.tts.speak(win.name,
259                   cvox.AbstractTts.QUEUE_MODE_FLUSH,
260                   cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
261    this.braille.write(cvox.NavBraille.fromText(win.name));
262    // Queue the next utterance because a window opening is always followed
263    // by a focus event.
264    this.nextQueueMode = 1;
265    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
266    this.queueAlertsForActiveTab();
267  }, this));
268
269  chrome.accessibilityPrivate.onWindowClosed.addListener(
270      goog.bind(function(win) {
271    if (!cvox.ChromeVox.isActive) {
272      return;
273    }
274    // Don't speak, just play the earcon.
275    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_CLOSE);
276  }, this));
277
278  chrome.accessibilityPrivate.onMenuOpened.addListener(
279      goog.bind(function(menu) {
280    if (!cvox.ChromeVox.isActive) {
281      return;
282    }
283    this.tts.speak(msg('chrome_menu_opened', [menu.name]),
284                   cvox.AbstractTts.QUEUE_MODE_FLUSH,
285                   cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
286    this.braille.write(
287        cvox.NavBraille.fromText(msg('chrome_menu_opened', [menu.name])));
288    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
289  }, this));
290
291  chrome.accessibilityPrivate.onMenuClosed.addListener(
292      goog.bind(function(menu) {
293    if (!cvox.ChromeVox.isActive) {
294      return;
295    }
296    // Don't speak, just play the earcon.
297    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_CLOSE);
298  }, this));
299
300  // systemPrivate API is only available when this extension is loaded as a
301  // component extension embedded in Chrome.
302  chrome.permissions.contains(
303      { permissions: ['systemPrivate'] },
304      goog.bind(function(result) {
305    if (!result) {
306      return;
307    }
308
309    // TODO(plundblad): Remove when the native sound is turned on by default.
310    // See crbug.com:225886.
311    var addOnVolumeChangedListener = goog.bind(function() {
312      chrome.systemPrivate.onVolumeChanged.addListener(goog.bind(
313          function(volume) {
314        if (!cvox.ChromeVox.isActive) {
315          return;
316        }
317        // Don't speak, just play the earcon.
318        this.earcons.playEarcon(cvox.AbstractEarcons.TASK_SUCCESS);
319      }, this));
320    }, this);
321    if (chrome.commandLinePrivate) {
322      chrome.commandLinePrivate.hasSwitch('disable-volume-adjust-sound',
323          goog.bind(function(result) {
324        if (result) {
325          addOnVolumeChangedListener();
326        }
327      }, this));
328    } else {
329      addOnVolumeChangedListener();
330    }
331
332    chrome.systemPrivate.onBrightnessChanged.addListener(
333        goog.bind(
334        /**
335         * @param {{brightness: number, userInitiated: boolean}} brightness
336         */
337        function(brightness) {
338          if (brightness.userInitiated) {
339            this.earcons.playEarcon(cvox.AbstractEarcons.TASK_SUCCESS);
340            this.tts.speak(
341                msg('chrome_brightness_changed', [brightness.brightness]),
342                cvox.AbstractTts.QUEUE_MODE_FLUSH,
343                cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
344            this.braille.write(cvox.NavBraille.fromText(
345                msg('chrome_brightness_changed', [brightness.brightness])));
346          }
347        }, this));
348
349    chrome.systemPrivate.onScreenUnlocked.addListener(goog.bind(function() {
350      chrome.systemPrivate.getUpdateStatus(goog.bind(function(status) {
351        if (!cvox.ChromeVox.isActive) {
352          return;
353        }
354        // Speak about system update when it's ready, otherwise speak nothing.
355        if (status.state == 'NeedRestart') {
356          this.tts.speak(msg('chrome_system_need_restart'),
357                         cvox.AbstractTts.QUEUE_MODE_FLUSH,
358                         cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
359          this.braille.write(
360              cvox.NavBraille.fromText(msg('chrome_system_need_restart')));
361        }
362      }, this));
363    }, this));
364
365    chrome.systemPrivate.onWokeUp.addListener(goog.bind(function() {
366      if (!cvox.ChromeVox.isActive) {
367        return;
368      }
369      // Don't speak, just play the earcon.
370      this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
371    }, this));
372  }, this));
373
374  chrome.accessibilityPrivate.onControlFocused.addListener(
375      goog.bind(this.onControlFocused, this));
376
377  chrome.accessibilityPrivate.onControlAction.addListener(
378      goog.bind(function(ctl) {
379    if (!cvox.ChromeVox.isActive) {
380      return;
381    }
382
383    var description = this.describe(ctl, true);
384    this.tts.speak(description.utterance,
385                   cvox.AbstractTts.QUEUE_MODE_FLUSH,
386                   description.ttsProps);
387    description.braille.write();
388    if (description.earcon) {
389      this.earcons.playEarcon(description.earcon);
390    }
391  }, this));
392
393  try {
394    chrome.accessibilityPrivate.onControlHover.addListener(
395        goog.bind(function(ctl) {
396      if (!cvox.ChromeVox.isActive) {
397        return;
398      }
399
400      var hasTouch = 'ontouchstart' in window;
401      if (!hasTouch) {
402        return;
403      }
404
405      var description = this.describe(ctl, false);
406      this.tts.speak(description.utterance,
407                     cvox.AbstractTts.QUEUE_MODE_FLUSH,
408                     description.ttsProps);
409      description.braille.write();
410      if (description.earcon) {
411        this.earcons.playEarcon(description.earcon);
412      }
413    }, this));
414  } catch (e) {}
415
416  chrome.accessibilityPrivate.onTextChanged.addListener(
417       goog.bind(function(ctl) {
418    if (!cvox.ChromeVox.isActive) {
419      return;
420    }
421
422    if (!this.editableTextHandler ||
423        this.editableTextName != ctl.name ||
424        this.lastContext != ctl.context) {
425      // Chrome won't send a text change event on a control that isn't
426      // focused. If we get a text change event and it doesn't match the
427      // focused control, treat it as a focus event initially.
428      this.onControlFocused(ctl);
429      return;
430    }
431
432    // Only send the most recent text changed event - throw away anything
433    // that was pending.
434    if (this.textChangeTimeout) {
435      window.clearTimeout(this.textChangeTimeout);
436    }
437
438    // Handle the text change event after a small delay, so multiple
439    // events in rapid succession are handled as a single change. This is
440    // specifically for the location bar with autocomplete - typing a
441    // character and getting the autocompleted text and getting that
442    // text selected may be three separate events.
443    this.textChangeTimeout = window.setTimeout(
444        goog.bind(function() {
445          var textChangeEvent = new cvox.TextChangeEvent(
446              ctl.details.value,
447              ctl.details.selectionStart,
448              ctl.details.selectionEnd,
449              true);  // triggered by user
450          this.editableTextHandler.changed(
451              textChangeEvent);
452          this.describe(ctl, false).braille.write();
453        }, this), this.TEXT_CHANGE_DELAY);
454  }, this));
455
456  this.tts.addCapturingEventListener(this);
457};
458
459/**
460 * Handle the feedback when a new control gets focus.
461 * @param {AccessibilityObject} ctl The focused control.
462 */
463cvox.AccessibilityApiHandler.prototype.onControlFocused = function(ctl) {
464  if (!cvox.ChromeVox.isActive) {
465    return;
466  }
467
468  // Call this first because it may clear this.editableTextHandler.
469  var description = this.describe(ctl, false);
470
471  if (ctl.type == 'textbox') {
472    var start = ctl.details.selectionStart;
473    var end = ctl.details.selectionEnd;
474    if (start > end) {
475      start = ctl.details.selectionEnd;
476      end = ctl.details.selectionStart;
477    }
478    this.editableTextName = ctl.name;
479    this.editableTextHandler =
480        new cvox.ChromeVoxEditableTextBase(
481            ctl.details.value,
482            start,
483            end,
484            ctl.details.isPassword,
485            this.tts);
486  } else {
487    this.editableTextHandler = null;
488  }
489
490  this.tts.speak(description.utterance,
491                 this.nextQueueMode,
492                 description.ttsProps);
493  description.braille.write();
494  this.nextQueueMode = 0;
495  if (description.earcon) {
496    this.earcons.playEarcon(description.earcon);
497  }
498};
499
500/**
501 * Called when any speech starts.
502 */
503cvox.AccessibilityApiHandler.prototype.onTtsStart = function() {
504  if (this.idleSpeechTimeout_) {
505    window.clearTimeout(this.idleSpeechTimeout_);
506  }
507};
508
509/**
510 * Called when any speech ends.
511 */
512cvox.AccessibilityApiHandler.prototype.onTtsEnd = function() {
513  if (this.idleSpeechQueue_.length > 0) {
514    this.idleSpeechTimeout_ = window.setTimeout(
515        goog.bind(this.onTtsIdle, this),
516        this.IDLE_SPEECH_DELAY_MS);
517  }
518};
519
520/**
521 * Called when speech has been idle for a certain minimum delay.
522 * Speaks queued messages.
523 */
524cvox.AccessibilityApiHandler.prototype.onTtsIdle = function() {
525  if (this.idleSpeechQueue_.length == 0) {
526    return;
527  }
528  var utterance = this.idleSpeechQueue_.shift();
529  var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
530  this.tts.speak(utterance,
531                 cvox.AbstractTts.QUEUE_MODE_FLUSH,
532                 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
533};
534
535/**
536 * Given a control received from the accessibility api, determine an
537 * utterance to speak, text to braille, and an earcon to play to describe it.
538 * @param {Object} control The control that had an action performed on it.
539 * @param {boolean} isSelect True if the action is a select action,
540 *     otherwise it's a focus action.
541 * @return {Object} An object containing a string field |utterance|, object
542 *      |ttsProps|, |braille|, and earcon |earcon|.
543 */
544cvox.AccessibilityApiHandler.prototype.describe = function(control, isSelect) {
545  /** Alias getMsg as msg. */
546  var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
547
548  var s = '';
549  var braille = {};
550  var ttsProps = cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT;
551
552  var context = control.context;
553  if (context && context != this.lastContext) {
554    s += context + ', ';
555    this.lastContext = context;
556    this.editableTextHandler = null;
557  }
558
559  var earcon = undefined;
560  var name = control.name.replace(/[_&]+/g, '').replace('...', '');
561  braille.name = control.name;
562  switch (control.type) {
563    case 'checkbox':
564      braille.roleMsg = 'input_type_checkbox';
565      if (control.details.isChecked) {
566        earcon = cvox.AbstractEarcons.CHECK_ON;
567        s += msg('describe_checkbox_checked', [name]);
568        braille.state = msg('checkbox_checked_state_brl');
569      } else {
570        earcon = cvox.AbstractEarcons.CHECK_OFF;
571        s += msg('describe_checkbox_unchecked', [name]);
572        braille.state = msg('checkbox_unchecked_state_brl');
573      }
574      break;
575    case 'radiobutton':
576      s += name;
577      braille.roleMsg = 'input_type_radio';
578      if (control.details.isChecked) {
579        earcon = cvox.AbstractEarcons.CHECK_ON;
580        s += msg('describe_radio_selected', [name]);
581        braille.state = msg('radio_selected_state_brl');
582      } else {
583        earcon = cvox.AbstractEarcons.CHECK_OFF;
584        s += msg('describe_radio_unselected', [name]);
585        braille.state = msg('radio_unselected_state_brl');
586      }
587      break;
588    case 'menu':
589      s += msg('describe_menu', [name]);
590      braille.roleMsg = 'aria_role_menu';
591      break;
592    case 'menuitem':
593      s += msg(
594          control.details.hasSubmenu ?
595              'describe_menu_item_with_submenu' : 'describe_menu_item', [name]);
596      braille.roleMsg = 'aria_role_menuitem';
597      if (control.details.hasSubmenu) {
598        braille.state = msg('aria_has_submenu_brl');
599      }
600      break;
601    case 'window':
602      s += msg('describe_window', [name]);
603      // No specialization for braille.
604      braille.name = s;
605      break;
606    case 'alert':
607      earcon = cvox.AbstractEarcons.ALERT_NONMODAL;
608      s += msg('aria_role_alert') + ': ' + name;
609      ttsProps = cvox.AbstractTts.PERSONALITY_SYSTEM_ALERT;
610      braille.roleMsg = 'aria_role_alert';
611      isSelect = false;
612      break;
613    case 'textbox':
614      earcon = cvox.AbstractEarcons.EDITABLE_TEXT;
615      var unnamed = name == '' ? 'unnamed_' : '';
616      var type, value;
617      if (control.details.isPassword) {
618        type = 'password';
619        braille.roleMsg = 'input_type_password';
620        value = control.details.value.replace(/./g, '*');
621      } else {
622        type = 'textbox';
623        braille.roleMsg = 'input_type_text';
624        value = control.details.value;
625      }
626      s += msg('describe_' + unnamed + type, [value, name]);
627      braille.value = cvox.BrailleUtil.createValue(
628          value, control.details.selectionStart, control.details.selectionEnd);
629      break;
630    case 'button':
631      earcon = cvox.AbstractEarcons.BUTTON;
632      s += msg('describe_button', [name]);
633      braille.roleMsg = 'tag_button';
634      break;
635    case 'combobox':
636    case 'listbox':
637      earcon = cvox.AbstractEarcons.LISTBOX;
638      var unnamed = name == '' ? 'unnamed_' : '';
639      s += msg('describe_' + unnamed + control.type,
640                            [control.details.value, name]);
641      braille.roleMsg = 'tag_select';
642      break;
643    case 'link':
644      earcon = cvox.AbstractEarcons.LINK;
645      s += msg('describe_link', [name]);
646      braille.roleMsg = 'tag_link';
647      break;
648    case 'tab':
649      s += msg('describe_tab', [name]);
650      braille.roleMsg = 'aria_role_tab';
651      break;
652    case 'slider':
653      s += msg('describe_slider', [control.details.stringValue, name]);
654      braille.value = cvox.BrailleUtil.createValue(control.details.stringValue);
655      braille.roleMsg = 'aria_role_slider';
656      break;
657    case 'treeitem':
658      if (this.prevDescription_ &&
659          this.prevDescription_.details &&
660          goog.isDef(control.details.itemDepth) &&
661          this.prevDescription_.details.itemDepth !=
662              control.details.itemDepth) {
663        s += msg('describe_depth', [control.details.itemDepth]);
664      }
665      s += name + ' ' + msg('aria_role_treeitem');
666      s += control.details.isItemExpanded ?
667          msg('aria_expanded_true') : msg('aria_expanded_false');
668
669      braille.name = Array(control.details.itemDepth).join(' ') + braille.name;
670      braille.roleMsg = 'aria_role_treeitem';
671      braille.state = control.details.isItemExpanded ?
672          msg('aria_expanded_true_brl') : msg('aria_expanded_false_brl');
673      break;
674
675    default:
676      s += name + ', ' + control.type;
677      braille.role = control.type;
678  }
679
680  if (isSelect && control.type != 'slider') {
681    s += msg('describe_selected');
682  }
683  if (control.details && control.details.itemCount >= 0) {
684    s += msg('describe_index',
685        [control.details.itemIndex + 1, control.details.itemCount]);
686    braille.state = braille.state ? braille.state + ' ' : '';
687    braille.state += msg('LIST_POSITION_BRL',
688        [control.details.itemIndex + 1, control.details.itemCount]);
689  }
690
691  var description = {};
692  description.utterance = s;
693  description.ttsProps = ttsProps;
694  var spannable = cvox.BrailleUtil.getTemplated(null, null, braille);
695  var valueSelectionSpan = spannable.getSpanInstanceOf(
696      cvox.BrailleUtil.ValueSelectionSpan);
697  var brailleObj = {text: spannable};
698  if (valueSelectionSpan) {
699    brailleObj.startIndex = spannable.getSpanStart(valueSelectionSpan);
700    brailleObj.endIndex = spannable.getSpanEnd(valueSelectionSpan);
701  }
702  description.braille = new cvox.NavBraille(brailleObj);
703  description.earcon = earcon;
704  this.prevDescription_ = control;
705  return description;
706};
707
708/**
709 * Queues alerts for the active tab, if any, which will be spoken
710 * as soon as speech is idle.
711 */
712cvox.AccessibilityApiHandler.prototype.queueAlertsForActiveTab = function() {
713  this.idleSpeechQueue_.length = 0;
714  var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
715
716  chrome.tabs.query({'active': true, 'currentWindow': true},
717      goog.bind(function(tabs) {
718    if (tabs.length < 1) {
719      return;
720    }
721    chrome.accessibilityPrivate.getAlertsForTab(
722        tabs[0].id, goog.bind(function(alerts) {
723      if (alerts.length == 0) {
724        return;
725      }
726
727      var utterance = '';
728
729      if (alerts.length == 1) {
730        utterance += msg('page_has_one_alert_singular');
731      } else {
732        utterance += msg('page_has_alerts_plural',
733                         [alerts.length]);
734      }
735
736      for (var i = 0; i < alerts.length; i++) {
737        utterance += ' ' + alerts[i].message;
738      }
739
740      utterance += ' ' + msg('review_alerts');
741
742      if (this.idleSpeechQueue_.indexOf(utterance) == -1) {
743        this.idleSpeechQueue_.push(utterance);
744      }
745    }, this));
746  }, this));
747};
748