/documentation/ClockBackTutorial/StepByStep/src/com/google/android/marvin/clockback/ClockBackService.java

http://eyes-free.googlecode.com/ · Java · 645 lines · 318 code · 88 blank · 239 comment · 54 complexity · 3c19f4e4a699da07f87d5a04ea80e838 MD5 · raw file

  1. /*
  2. * Copyright (C) 2010 The Android Open Source Project
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. package com.google.android.marvin.clockback;
  17. import android.accessibilityservice.AccessibilityService;
  18. import android.accessibilityservice.AccessibilityServiceInfo;
  19. import android.app.Service;
  20. import android.content.BroadcastReceiver;
  21. import android.content.Context;
  22. import android.content.Intent;
  23. import android.content.IntentFilter;
  24. import android.media.AudioManager;
  25. import android.os.Handler;
  26. import android.os.Message;
  27. import android.os.Vibrator;
  28. import android.speech.tts.TextToSpeech;
  29. import android.util.Log;
  30. import android.util.SparseArray;
  31. import android.view.accessibility.AccessibilityEvent;
  32. import java.util.List;
  33. /**
  34. * This class is an {@link AccessibilityService} that provides custom feedback
  35. * for the Clock application that comes by default with Android devices. It
  36. * demonstrates the following key features of the Android accessibility APIs:
  37. * <ol>
  38. * <li>
  39. * Simple demonstration of how to use the accessibility APIs.
  40. * </li>
  41. * <li>
  42. * Hands-on example of various ways to utilize the accessibility API for
  43. * providing alternative and complementary feedback.
  44. * </li>
  45. * <li>
  46. * Providing application specific feedback - the service handles only
  47. * accessibility events from the clock application.
  48. * </li>
  49. * <li>
  50. * Providing dynamic, context-dependent feedback - feedback type changes
  51. * depending on the ringer state.</li>
  52. * <li>
  53. * Application specific UI enhancement - application domain knowledge is
  54. * utilized to enhance the provided feedback.
  55. * </li>
  56. * </ol>
  57. *
  58. * @author svetoslavganov@google.com (Svetoslav R. Ganov)
  59. */
  60. public class ClockBackService extends AccessibilityService {
  61. /** Tag for logging from this service */
  62. private static final String LOG_TAG = "ClockBackService";
  63. // fields for configuring how the system handles this accessibility service
  64. /** Minimal timeout between accessibility events we want to receive */
  65. private static final int EVENT_NOTIFICATION_TIMEOUT_MILLIS = 80;
  66. /** Packages we are interested in */
  67. // This works with AlarmClock and Clock whose package name changes in different releases
  68. private static final String[] PACKAGE_NAMES = new String[] {
  69. "com.android.alarmclock", "com.google.android.deskclock", "com.android.deskclock"
  70. };
  71. // message types we are passing around
  72. /** Speak */
  73. private static final int WHAT_SPEAK = 1;
  74. /** Stop speaking */
  75. private static final int WHAT_STOP_SPEAK = 2;
  76. /** Start the TTS service */
  77. private static final int WHAT_START_TTS = 3;
  78. /** Stop the TTS service */
  79. private static final int WHAT_SHUTDOWN_TTS = 4;
  80. /** Play an earcon */
  81. private static final int WHAT_PLAY_EARCON = 5;
  82. /** Stop playing an earcon */
  83. private static final int WHAT_STOP_PLAY_EARCON = 6;
  84. /** Vibrate a pattern */
  85. private static final int WHAT_VIBRATE = 7;
  86. /** Stop vibrating */
  87. private static final int WHAT_STOP_VIBRATE = 8;
  88. //screen state broadcast related constants
  89. /** Feedback mapping index used as a key for the screen on broadcast */
  90. private static final int INDEX_SCREEN_ON = 0x00000100;
  91. /** Feedback mapping index used as a key for the screen off broadcast */
  92. private static final int INDEX_SCREEN_OFF = 0x00000200;
  93. // ringer mode change related constants
  94. /** Feedback mapping index used as a key for normal ringer mode */
  95. private static final int INDEX_RINGER_NORMAL = 0x00000400;
  96. /** Feedback mapping index used as a key for vibration ringer mode */
  97. private static final int INDEX_RINGER_VIBRATE = 0x00000800;
  98. /** Feedback mapping index used as a key for silent ringer mode */
  99. private static final int INDEX_RINGER_SILENT = 0x00001000;
  100. // speech related constants
  101. /**
  102. * The queuing mode we are using - interrupt a spoken utterance before
  103. * speaking another one
  104. */
  105. private static final int QUEUING_MODE_INTERRUPT = 2;
  106. /** The empty string constant */
  107. private static final String SPACE = " ";
  108. /**
  109. * The class name of the number picker buttons with no text we want to
  110. * announce in the Clock application.
  111. */
  112. private static final String CLASS_NAME_NUMBER_PICKER_BUTTON_CLOCK = "android.widget.NumberPickerButton";
  113. /**
  114. * The class name of the number picker buttons with no text we want to
  115. * announce in the AlarmClock application.
  116. */
  117. private static final String CLASS_NAME_NUMBER_PICKER_BUTTON_ALARM_CLOCK = "com.android.internal.widget.NumberPickerButton";
  118. /**
  119. * The class name of the edit text box for hours and minutes we want to
  120. * better announce
  121. */
  122. private static final String CLASS_NAME_EDIT_TEXT = "android.widget.EditText";
  123. /**
  124. * Mapping from integer to string resource id where the keys are generated
  125. * from the {@link AccessibilityEvent#getItemCount()} and
  126. * {@link AccessibilityEvent#getCurrentItemIndex()} properties.
  127. */
  128. private static final SparseArray<Integer> sPositionMappedStringResourceIds = new SparseArray<Integer>();
  129. static {
  130. sPositionMappedStringResourceIds.put(11, R.string.value_plus);
  131. sPositionMappedStringResourceIds.put(114, R.string.value_plus);
  132. sPositionMappedStringResourceIds.put(112, R.string.value_minus);
  133. sPositionMappedStringResourceIds.put(116, R.string.value_minus);
  134. sPositionMappedStringResourceIds.put(111, R.string.value_hours);
  135. sPositionMappedStringResourceIds.put(115, R.string.value_minutes);
  136. }
  137. /** Mapping from integers to vibration patterns for haptic feedback */
  138. private static final SparseArray<long[]> sVibrationPatterns = new SparseArray<long[]>();
  139. static {
  140. sVibrationPatterns.put(AccessibilityEvent.TYPE_VIEW_CLICKED, new long[] {
  141. 0L, 100L
  142. });
  143. sVibrationPatterns.put(AccessibilityEvent.TYPE_VIEW_SELECTED, new long[] {
  144. 0L, 15L, 10L, 15L
  145. });
  146. sVibrationPatterns.put(AccessibilityEvent.TYPE_VIEW_FOCUSED, new long[] {
  147. 0L, 15L, 10L, 15L
  148. });
  149. sVibrationPatterns.put(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED, new long[] {
  150. 0L, 25L, 50L, 25L, 50L, 25L
  151. });
  152. sVibrationPatterns.put(INDEX_SCREEN_ON, new long[] {
  153. 0L, 10L, 10L, 20L, 20L, 30L
  154. });
  155. sVibrationPatterns.put(INDEX_SCREEN_OFF, new long[] {
  156. 0L, 30L, 20L, 20L, 10L, 10L
  157. });
  158. }
  159. /** Mapping from integers to raw sound resource ids */
  160. private static SparseArray<Integer> sSoundsResourceIds = new SparseArray<Integer>();
  161. static {
  162. sSoundsResourceIds.put(AccessibilityEvent.TYPE_VIEW_CLICKED, R.raw.sound1);
  163. sSoundsResourceIds.put(AccessibilityEvent.TYPE_VIEW_SELECTED, R.raw.sound2);
  164. sSoundsResourceIds.put(AccessibilityEvent.TYPE_VIEW_FOCUSED, R.raw.sound2);
  165. sSoundsResourceIds.put(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED, R.raw.sound3);
  166. sSoundsResourceIds.put(INDEX_SCREEN_ON, R.raw.sound4);
  167. sSoundsResourceIds.put(INDEX_SCREEN_OFF, R.raw.sound5);
  168. sSoundsResourceIds.put(INDEX_RINGER_SILENT, R.raw.sound6);
  169. sSoundsResourceIds.put(INDEX_RINGER_VIBRATE, R.raw.sound7);
  170. sSoundsResourceIds.put(INDEX_RINGER_NORMAL, R.raw.sound8);
  171. }
  172. // sound pool related member fields
  173. /** Mapping from integers to earcon names - dynamically populated. */
  174. private final SparseArray<String> mEarconNames = new SparseArray<String>();
  175. // auxiliary fields
  176. /**
  177. * Handle to this service to enable inner classes to access the {@link Context}
  178. */
  179. private Context mContext;
  180. /** The feedback this service is currently providing */
  181. private int mProvidedFeedbackType;
  182. /** Reusable instance for building utterances */
  183. private final StringBuilder mUtterance = new StringBuilder();
  184. // feedback providing services
  185. /** The {@link TextToSpeech} used for speaking */
  186. private TextToSpeech mTts;
  187. /** The {@link AudioManager} for detecting ringer state */
  188. private AudioManager mAudioManager;
  189. /** Vibrator for providing haptic feedback */
  190. private Vibrator mVibrator;
  191. /** Flag if the infrastructure is initialized */
  192. private boolean isInfrastructureInitialized;
  193. /** {@link Handler} for executing messages on the service main thread */
  194. Handler mHandler = new Handler() {
  195. @Override
  196. public void handleMessage(Message message) {
  197. switch (message.what) {
  198. case WHAT_SPEAK:
  199. String utterance = (String) message.obj;
  200. mTts.speak(utterance, QUEUING_MODE_INTERRUPT, null);
  201. return;
  202. case WHAT_STOP_SPEAK:
  203. mTts.stop();
  204. return;
  205. case WHAT_START_TTS:
  206. mTts = new TextToSpeech(mContext, new TextToSpeech.OnInitListener() {
  207. @Override
  208. public void onInit(int status) {
  209. // register here since to add earcons the TTS must be initialized
  210. // the receiver is called immediately with the current ringer mode
  211. registerBroadCastReceiver();
  212. }
  213. });
  214. return;
  215. case WHAT_SHUTDOWN_TTS:
  216. mTts.shutdown();
  217. return;
  218. case WHAT_PLAY_EARCON:
  219. int resourceId = message.arg1;
  220. playEarcon(resourceId);
  221. return;
  222. case WHAT_STOP_PLAY_EARCON:
  223. mTts.stop();
  224. return;
  225. case WHAT_VIBRATE:
  226. int key = message.arg1;
  227. long[] pattern = sVibrationPatterns.get(key);
  228. mVibrator.vibrate(pattern, -1);
  229. return;
  230. case WHAT_STOP_VIBRATE:
  231. mVibrator.cancel();
  232. return;
  233. }
  234. }
  235. };
  236. /**
  237. * {@link BroadcastReceiver} for receiving updates for our context - device
  238. * state
  239. */
  240. private BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() {
  241. @Override
  242. public void onReceive(Context context, Intent intent) {
  243. String action = intent.getAction();
  244. if (AudioManager.RINGER_MODE_CHANGED_ACTION.equals(action)) {
  245. int ringerMode = intent.getIntExtra(AudioManager.EXTRA_RINGER_MODE,
  246. AudioManager.RINGER_MODE_NORMAL);
  247. configureForRingerMode(ringerMode);
  248. } else if (Intent.ACTION_SCREEN_ON.equals(action)) {
  249. provideScreenStateChangeFeedback(INDEX_SCREEN_ON);
  250. } else if (Intent.ACTION_SCREEN_OFF.equals(action)) {
  251. provideScreenStateChangeFeedback(INDEX_SCREEN_OFF);
  252. } else {
  253. Log.w(LOG_TAG, "Registered for but not handling action " + action);
  254. }
  255. }
  256. /**
  257. * Provides feedback to announce the screen state change. Such a change
  258. * is turning the screen on or off.
  259. *
  260. * @param feedbackIndex The index of the feedback in the statically
  261. * mapped feedback resources.
  262. */
  263. private void provideScreenStateChangeFeedback(int feedbackIndex) {
  264. // we take a specific action depending on the feedback we currently provide
  265. switch (mProvidedFeedbackType) {
  266. case AccessibilityServiceInfo.FEEDBACK_SPOKEN:
  267. String utterance = generateScreenOnOrOffUtternace(feedbackIndex);
  268. mHandler.obtainMessage(WHAT_SPEAK, utterance).sendToTarget();
  269. return;
  270. case AccessibilityServiceInfo.FEEDBACK_AUDIBLE:
  271. mHandler.obtainMessage(WHAT_PLAY_EARCON, feedbackIndex, 0).sendToTarget();
  272. return;
  273. case AccessibilityServiceInfo.FEEDBACK_HAPTIC:
  274. mHandler.obtainMessage(WHAT_VIBRATE, feedbackIndex, 0).sendToTarget();
  275. return;
  276. default:
  277. throw new IllegalStateException("Unexpected feedback type "
  278. + mProvidedFeedbackType);
  279. }
  280. }
  281. };
  282. @Override
  283. public void onServiceConnected() {
  284. if (isInfrastructureInitialized) {
  285. return;
  286. }
  287. mContext = this;
  288. // send a message to start the TTS
  289. mHandler.sendEmptyMessage(WHAT_START_TTS);
  290. // get the vibrator service
  291. mVibrator = (Vibrator) getSystemService(Service.VIBRATOR_SERVICE);
  292. // get the AudioManager and configure according the current ring mode
  293. mAudioManager = (AudioManager) getSystemService(Service.AUDIO_SERVICE);
  294. setServiceInfo(AccessibilityServiceInfo.FEEDBACK_SPOKEN);
  295. // we are in an initialized state now
  296. isInfrastructureInitialized = true;
  297. }
  298. @Override
  299. public boolean onUnbind(Intent intent) {
  300. if (isInfrastructureInitialized) {
  301. // stop the TTS service
  302. mHandler.sendEmptyMessage(WHAT_SHUTDOWN_TTS);
  303. // unregister the intent broadcast receiver
  304. if (mBroadcastReceiver != null) {
  305. unregisterReceiver(mBroadcastReceiver);
  306. }
  307. // we are not in an initialized state anymore
  308. isInfrastructureInitialized = false;
  309. }
  310. return false;
  311. }
  312. /**
  313. * Registers the phone state observing broadcast receiver.
  314. */
  315. private void registerBroadCastReceiver() {
  316. //Create a filter with the broadcast intents we are interested in
  317. IntentFilter filter = new IntentFilter();
  318. filter.addAction(AudioManager.RINGER_MODE_CHANGED_ACTION);
  319. filter.addAction(Intent.ACTION_SCREEN_ON);
  320. filter.addAction(Intent.ACTION_SCREEN_OFF);
  321. // register for broadcasts of interest
  322. registerReceiver(mBroadcastReceiver, filter, null, null);
  323. }
  324. /**
  325. * Generates an utterance for announcing screen on and screen off.
  326. *
  327. * @param feedbackIndex The feedback index for looking up feedback value.
  328. * @return The utterance.
  329. */
  330. private String generateScreenOnOrOffUtternace(int feedbackIndex) {
  331. // get the announce template
  332. int resourceId = (feedbackIndex == INDEX_SCREEN_ON) ? R.string.template_screen_on
  333. : R.string.template_screen_off;
  334. String template = mContext.getString(resourceId);
  335. // format the template with the ringer percentage
  336. int currentRingerVolume = mAudioManager.getStreamVolume(AudioManager.STREAM_RING);
  337. int maxRingerVolume = mAudioManager.getStreamMaxVolume(AudioManager.STREAM_RING);
  338. int volumePercent = (100 / maxRingerVolume) * currentRingerVolume;
  339. // let us round to five so it sounds better
  340. int adjustment = volumePercent % 10;
  341. if (adjustment < 5) {
  342. volumePercent -= adjustment;
  343. } else if (adjustment > 5) {
  344. volumePercent += (10 - adjustment);
  345. }
  346. return String.format(template, volumePercent);
  347. }
  348. /**
  349. * Configures the service according to a ringer mode. Possible
  350. * configurations:
  351. * </p>
  352. * 1. {@link AudioManager#RINGER_MODE_SILENT}</br>
  353. * Goal: Provide only custom haptic feedback.</br>
  354. * Approach: Take over the haptic feedback by configuring this service to to provide
  355. * such and do so. This way the system will not call the default haptic
  356. * feedback service KickBack.</br>
  357. * Take over the audible and spoken feedback by configuring this
  358. * service to provide such feedback but not doing so. This way the system
  359. * will not call the default spoken feedback service TalkBack and the
  360. * default audible feedback service SoundBack.
  361. * </p>
  362. * 2. {@link AudioManager#RINGER_MODE_VIBRATE}</br>
  363. * Goal: Provide custom audible and default haptic feedback.</p>
  364. * Approach: Take over the audible feedback and provide custom one.</p>
  365. * Take over the spoken feedback but do not provide such.</br>
  366. * Let some other service provide haptic feedback (KickBack).
  367. * </p>
  368. * 3. {@link AudioManager#RINGER_MODE_NORMAL}</p>
  369. * Goal: Provide custom spoken, default audible and default haptic feedback.</br>
  370. * Approach: Take over the spoken feedback and provide custom one.</br>
  371. * Let some other services provide audible feedback (SounBack) and haptic
  372. * feedback (KickBack).
  373. * </p>
  374. * Note: In the above description an assumption is made that all default feedback
  375. * services are enabled. Such services are TalkBack, SoundBack, and KickBack.
  376. * Also the feature of defining a service as the default for a given feedback
  377. * type will be available in Froyo and after. For previous releases the package
  378. * specific accessibility service must be registered first i.e. checked in the
  379. * settings.
  380. *
  381. * @param ringerMode The device ringer mode.
  382. */
  383. private void configureForRingerMode(int ringerMode) {
  384. if (ringerMode == AudioManager.RINGER_MODE_SILENT) {
  385. // when the ringer is silent we want to provide only haptic feedback
  386. mProvidedFeedbackType = AccessibilityServiceInfo.FEEDBACK_HAPTIC;
  387. // take over the spoken and sound feedback so no such feedback is provided
  388. setServiceInfo(AccessibilityServiceInfo.FEEDBACK_HAPTIC
  389. | AccessibilityServiceInfo.FEEDBACK_SPOKEN
  390. | AccessibilityServiceInfo.FEEDBACK_AUDIBLE);
  391. // use only an earcon to announce ringer state change
  392. mHandler.obtainMessage(WHAT_PLAY_EARCON, INDEX_RINGER_SILENT, 0).sendToTarget();
  393. } else if (ringerMode == AudioManager.RINGER_MODE_VIBRATE) {
  394. // when the ringer is vibrating we want to provide only audible
  395. // feedback
  396. mProvidedFeedbackType = AccessibilityServiceInfo.FEEDBACK_AUDIBLE;
  397. // take over the spoken feedback so no spoken feedback is provided
  398. setServiceInfo(AccessibilityServiceInfo.FEEDBACK_AUDIBLE
  399. | AccessibilityServiceInfo.FEEDBACK_SPOKEN);
  400. // use only an earcon to announce ringer state change
  401. mHandler.obtainMessage(WHAT_PLAY_EARCON, INDEX_RINGER_VIBRATE, 0).sendToTarget();
  402. } else if (ringerMode == AudioManager.RINGER_MODE_NORMAL) {
  403. // when the ringer is ringing we want to provide spoken feedback
  404. // overriding the default spoken feedback
  405. mProvidedFeedbackType = AccessibilityServiceInfo.FEEDBACK_SPOKEN;
  406. setServiceInfo(AccessibilityServiceInfo.FEEDBACK_SPOKEN);
  407. // use only an earcon to announce ringer state change
  408. mHandler.obtainMessage(WHAT_PLAY_EARCON, INDEX_RINGER_NORMAL, 0).sendToTarget();
  409. }
  410. }
  411. /**
  412. * Sets the {@link AccessibilityServiceInfo} which informs the system how to
  413. * handle this {@link AccessibilityService}.
  414. *
  415. * @param feedbackType The type of feedback this service will provide. </p>
  416. * Note: The feedbackType parameter is an bitwise or of all
  417. * feedback types this service would like to provide.
  418. */
  419. private void setServiceInfo(int feedbackType) {
  420. AccessibilityServiceInfo info = new AccessibilityServiceInfo();
  421. // we are interested in all types of accessibility events
  422. info.eventTypes = AccessibilityEvent.TYPES_ALL_MASK;
  423. // we want to provide specific type of feedback
  424. info.feedbackType = feedbackType;
  425. // we want to receive events in a certain interval
  426. info.notificationTimeout = EVENT_NOTIFICATION_TIMEOUT_MILLIS;
  427. // we want to receive accessibility events only from certain packages
  428. info.packageNames = PACKAGE_NAMES;
  429. setServiceInfo(info);
  430. }
  431. @Override
  432. public void onAccessibilityEvent(AccessibilityEvent event) {
  433. Log.i(LOG_TAG, mProvidedFeedbackType + " " + event.toString());
  434. // here we act according to the feedback type we are currently providing
  435. if (mProvidedFeedbackType == AccessibilityServiceInfo.FEEDBACK_SPOKEN) {
  436. mHandler.obtainMessage(WHAT_SPEAK, formatUtterance(event)).sendToTarget();
  437. } else if (mProvidedFeedbackType == AccessibilityServiceInfo.FEEDBACK_AUDIBLE) {
  438. mHandler.obtainMessage(WHAT_PLAY_EARCON, event.getEventType(), 0).sendToTarget();
  439. } else if (mProvidedFeedbackType == AccessibilityServiceInfo.FEEDBACK_HAPTIC) {
  440. mHandler.obtainMessage(WHAT_VIBRATE, event.getEventType(), 0).sendToTarget();
  441. } else {
  442. throw new IllegalStateException("Unexpected feedback type " + mProvidedFeedbackType);
  443. }
  444. }
  445. @Override
  446. public void onInterrupt() {
  447. // here we act according to the feedback type we are currently providing
  448. if (mProvidedFeedbackType == AccessibilityServiceInfo.FEEDBACK_SPOKEN) {
  449. mHandler.obtainMessage(WHAT_STOP_SPEAK);
  450. } else if (mProvidedFeedbackType == AccessibilityServiceInfo.FEEDBACK_AUDIBLE) {
  451. mHandler.obtainMessage(WHAT_STOP_PLAY_EARCON);
  452. } else if (mProvidedFeedbackType == AccessibilityServiceInfo.FEEDBACK_HAPTIC) {
  453. mHandler.obtainMessage(WHAT_STOP_VIBRATE);
  454. } else {
  455. throw new IllegalStateException("Unexpected feedback type " + mProvidedFeedbackType);
  456. }
  457. }
  458. /**
  459. * Formats an utterance from an {@link AccessibilityEvent}.
  460. *
  461. * @param event The event from which to format an utterance.
  462. * @return The formatted utterance.
  463. */
  464. private String formatUtterance(AccessibilityEvent event) {
  465. StringBuilder utterance = mUtterance;
  466. // clear the utterance before appending the formatted text
  467. utterance.delete(0, utterance.length());
  468. List<CharSequence> eventText = event.getText();
  469. // We try to get the event text if such
  470. if (!eventText.isEmpty()) {
  471. for (CharSequence subText : eventText) {
  472. utterance.append(subText);
  473. utterance.append(SPACE);
  474. }
  475. // here we do a bit of enhancement of the UI presentation by using the semantic
  476. // of the event source in the context of the Clock application
  477. if (CLASS_NAME_EDIT_TEXT.equals(event.getClassName())) {
  478. // if the source is an edit text box and we have a mapping based on
  479. // its position in the items of the container parent of the event source
  480. // we append that value as well. We say "XX hours" and "XX minutes".
  481. String resourceValue = getPositionMappedStringResource(event.getItemCount(),
  482. event.getCurrentItemIndex());
  483. if (resourceValue != null) {
  484. utterance.append(resourceValue);
  485. }
  486. }
  487. return utterance.toString();
  488. }
  489. // There is no event text but we try to get the content description which is
  490. // an optional attribute for describing a view (typically used with ImageView)
  491. CharSequence contentDescription = event.getContentDescription();
  492. if (contentDescription != null) {
  493. utterance.append(contentDescription);
  494. return utterance.toString();
  495. }
  496. // No text and content description for the plus and minus buttons, so we lookup
  497. // custom values based on the event's itemCount and currentItemIndex properties.
  498. CharSequence className = event.getClassName();
  499. if (CLASS_NAME_NUMBER_PICKER_BUTTON_ALARM_CLOCK.equals(className)
  500. || CLASS_NAME_NUMBER_PICKER_BUTTON_CLOCK.equals(className)) {
  501. String resourceValue = getPositionMappedStringResource(event.getItemCount(),
  502. event.getCurrentItemIndex());
  503. utterance.append(resourceValue);
  504. }
  505. return utterance.toString();
  506. }
  507. /**
  508. * Returns a string resource mapped for a given position based on
  509. * {@link AccessibilityEvent#getItemCount()} and
  510. * {@link AccessibilityEvent#getCurrentItemIndex()} properties.
  511. *
  512. * @param itemCount The value of {@link AccessibilityEvent#getItemCount()}.
  513. * @param currentItemIndex The value of
  514. * {@link AccessibilityEvent#getCurrentItemIndex()}.
  515. * @return The mapped string if such exists, null otherwise.
  516. */
  517. private String getPositionMappedStringResource(int itemCount, int currentItemIndex) {
  518. int lookupIndex = computeLookupIndex(itemCount, currentItemIndex);
  519. int resourceId = sPositionMappedStringResourceIds.get(lookupIndex);
  520. return getString(resourceId);
  521. }
  522. /**
  523. * Computes an index for looking up the custom text for views with neither
  524. * text not content description. The index is computed based on
  525. * {@link AccessibilityEvent#getItemCount()} and
  526. * {@link AccessibilityEvent#getCurrentItemIndex()} properties.
  527. *
  528. * @param itemCount The number of all items in the event source.
  529. * @param currentItemIndex The index of the item source of the event.
  530. * @return The lookup index.
  531. */
  532. private int computeLookupIndex(int itemCount, int currentItemIndex) {
  533. int lookupIndex = itemCount;
  534. int divided = currentItemIndex;
  535. while (divided > 0) {
  536. lookupIndex *= 10;
  537. divided /= 10;
  538. }
  539. return (lookupIndex += currentItemIndex);
  540. }
  541. /**
  542. * Plays an earcon given its id.
  543. *
  544. * @param earconId The id of the earcon to be played.
  545. */
  546. private void playEarcon(int earconId) {
  547. String earconName = mEarconNames.get(earconId);
  548. if (earconName == null) {
  549. // we do not know the sound id, hence we need to load the sound
  550. int resourceId = sSoundsResourceIds.get(earconId);
  551. earconName = "[" + earconId + "]";
  552. mTts.addEarcon(earconName, getPackageName(), resourceId);
  553. mEarconNames.put(earconId, earconName);
  554. }
  555. mTts.playEarcon(earconName, QUEUING_MODE_INTERRUPT, null);
  556. }
  557. }