PageRenderTime 56ms CodeModel.GetById 16ms app.highlight 33ms RepoModel.GetById 1ms app.codeStats 0ms

/ime/latinime/src/com/googlecode/eyesfree/inputmethod/voice/VoiceInput.java

http://eyes-free.googlecode.com/
Java | 644 lines | 431 code | 87 blank | 126 comment | 37 complexity | d5632ef1607e76df7b58609f0d1bda04 MD5 | raw file
  1/*
  2 * Copyright (C) 2009 Google Inc.
  3 *
  4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
  5 * use this file except in compliance with the License. You may obtain a copy of
  6 * the License at
  7 *
  8 * http://www.apache.org/licenses/LICENSE-2.0
  9 *
 10 * Unless required by applicable law or agreed to in writing, software
 11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 13 * License for the specific language governing permissions and limitations under
 14 * the License.
 15 */
 16
 17package com.googlecode.eyesfree.inputmethod.voice;
 18
 19import android.content.ContentResolver;
 20import android.content.Context;
 21import android.content.Intent;
 22import android.os.Build;
 23import android.os.Bundle;
 24import android.os.Handler;
 25import android.os.Message;
 26import android.os.Parcelable;
 27import android.speech.RecognitionListener;
 28import android.speech.RecognizerIntent;
 29import android.speech.SpeechRecognizer;
 30import android.util.Log;
 31import android.view.View;
 32import android.view.View.OnClickListener;
 33import android.view.inputmethod.InputConnection;
 34
 35import com.googlecode.eyesfree.inputmethod.latin.EditingUtil;
 36import com.googlecode.eyesfree.inputmethod.latin.R;
 37
 38import java.io.ByteArrayOutputStream;
 39import java.io.IOException;
 40import java.util.ArrayList;
 41import java.util.HashMap;
 42import java.util.List;
 43import java.util.Locale;
 44import java.util.Map;
 45
 46/**
 47 * Speech recognition input, including both user interface and a background
 48 * process to stream audio to the network recognizer. This class supplies a
 49 * View (getView()), which it updates as recognition occurs. The user of this
 50 * class is responsible for making the view visible to the user, as well as
 51 * handling various events returned through UiListener.
 52 */
 53public class VoiceInput implements OnClickListener {
 54    private static final String TAG = "VoiceInput";
 55    private static final String EXTRA_RECOGNITION_CONTEXT =
 56            "android.speech.extras.RECOGNITION_CONTEXT";
 57    private static final String EXTRA_CALLING_PACKAGE = "calling_package";
 58    private static final String EXTRA_ALTERNATES = "android.speech.extra.ALTERNATES";
 59    private static final int MAX_ALT_LIST_LENGTH = 6;
 60
 61    private static final String DEFAULT_RECOMMENDED_PACKAGES =
 62            "com.android.mms " +
 63            "com.google.android.gm " +
 64            "com.google.android.talk " +
 65            "com.google.android.apps.googlevoice " +
 66            "com.android.email " +
 67            "com.android.browser ";
 68
 69    // WARNING! Before enabling this, fix the problem with calling getExtractedText() in
 70    // landscape view. It causes Extracted text updates to be rejected due to a token mismatch
 71    public static boolean ENABLE_WORD_CORRECTIONS = true;
 72
 73    // Dummy word suggestion which means "delete current word"
 74    public static final String DELETE_SYMBOL = " \u00D7 ";  // times symbol
 75
 76    private Whitelist mRecommendedList;
 77    private Whitelist mBlacklist;
 78
 79    private VoiceInputLogger mLogger;
 80
 81    // Names of a few extras defined in VoiceSearch's RecognitionController
 82    // Note, the version of voicesearch that shipped in Froyo returns the raw
 83    // RecognitionClientAlternates protocol buffer under the key "alternates",
 84    // so a VS market update must be installed on Froyo devices in order to see
 85    // alternatives.
 86    private static final String ALTERNATES_BUNDLE = "alternates_bundle";
 87
 88    //  This is copied from the VoiceSearch app.
 89    private static final class AlternatesBundleKeys {
 90        public static final String ALTERNATES = "alternates";
 91        public static final String LENGTH = "length";
 92        public static final String SPANS = "spans";
 93        public static final String START = "start";
 94        public static final String TEXT = "text";
 95    }
 96
 97    // Names of a few intent extras defined in VoiceSearch's RecognitionService.
 98    // These let us tweak the endpointer parameters.
 99    private static final String EXTRA_SPEECH_MINIMUM_LENGTH_MILLIS =
100            "android.speech.extras.SPEECH_INPUT_MINIMUM_LENGTH_MILLIS";
101    private static final String EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS =
102            "android.speech.extras.SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS";
103    private static final String EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS =
104            "android.speech.extras.SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS";
105
106    // The usual endpointer default value for input complete silence length is 0.5 seconds,
107    // but that's used for things like voice search. For dictation-like voice input like this,
108    // we go with a more liberal value of 1 second. This value will only be used if a value
109    // is not provided from Gservices.
110    private static final String INPUT_COMPLETE_SILENCE_LENGTH_DEFAULT_VALUE_MILLIS = "1000";
111
112    // Used to record part of that state for logging purposes.
113    public static final int DEFAULT = 0;
114    public static final int LISTENING = 1;
115    public static final int WORKING = 2;
116    public static final int ERROR = 3;
117
118    private int mAfterVoiceInputDeleteCount = 0;
119    private int mAfterVoiceInputInsertCount = 0;
120    private int mAfterVoiceInputInsertPunctuationCount = 0;
121    private int mAfterVoiceInputCursorPos = 0;
122    private int mAfterVoiceInputSelectionSpan = 0;
123
124    private int mState = DEFAULT;
125    
126    private final static int MSG_CLOSE_ERROR_DIALOG = 1;
127
128    private final Handler mHandler = new Handler() {
129        @Override
130        public void handleMessage(Message msg) {
131            if (msg.what == MSG_CLOSE_ERROR_DIALOG) {
132                mState = DEFAULT;
133                mRecognitionView.finish();
134                mUiListener.onCancelVoice();
135            }
136        }
137    };
138
139    /**
140     * Events relating to the recognition UI. You must implement these.
141     */
142    public interface UiListener {
143
144        /**
145         * @param recognitionResults a set of transcripts for what the user
146         *   spoke, sorted by likelihood.
147         */
148        public void onVoiceResults(
149            List<String> recognitionResults,
150            Map<String, List<CharSequence>> alternatives);
151
152        /**
153         * Called when the user cancels speech recognition.
154         */
155        public void onCancelVoice();
156    }
157
158    private SpeechRecognizer mSpeechRecognizer;
159    private RecognitionListener mRecognitionListener;
160    private RecognitionView mRecognitionView;
161    private UiListener mUiListener;
162    private Context mContext;
163
164    /**
165     * @param context the service or activity in which we're running.
166     * @param uiHandler object to receive events from VoiceInput.
167     */
168    public VoiceInput(Context context, UiListener uiHandler) {
169        mLogger = VoiceInputLogger.getLogger(context);
170        mRecognitionListener = new ImeRecognitionListener();
171        mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(context);
172        mSpeechRecognizer.setRecognitionListener(mRecognitionListener);
173        mUiListener = uiHandler;
174        mContext = context;
175        newView();
176
177        String recommendedPackages = SettingsUtil.getSettingsString(
178                context.getContentResolver(),
179                SettingsUtil.LATIN_IME_VOICE_INPUT_RECOMMENDED_PACKAGES,
180                DEFAULT_RECOMMENDED_PACKAGES);
181
182        mRecommendedList = new Whitelist();
183        for (String recommendedPackage : recommendedPackages.split("\\s+")) {
184            mRecommendedList.addApp(recommendedPackage);
185        }
186
187        mBlacklist = new Whitelist();
188        mBlacklist.addApp("com.android.setupwizard");
189    }
190
191    public void setCursorPos(int pos) {
192        mAfterVoiceInputCursorPos = pos;
193    }
194
195    public int getCursorPos() {
196        return mAfterVoiceInputCursorPos;
197    }
198
199    public void setSelectionSpan(int span) {
200        mAfterVoiceInputSelectionSpan = span;
201    }
202
203    public int getSelectionSpan() {
204        return mAfterVoiceInputSelectionSpan;
205    }
206
207    public void incrementTextModificationDeleteCount(int count){
208        mAfterVoiceInputDeleteCount += count;
209        // Send up intents for other text modification types
210        if (mAfterVoiceInputInsertCount > 0) {
211            logTextModifiedByTypingInsertion(mAfterVoiceInputInsertCount);
212            mAfterVoiceInputInsertCount = 0;
213        }
214        if (mAfterVoiceInputInsertPunctuationCount > 0) {
215            logTextModifiedByTypingInsertionPunctuation(mAfterVoiceInputInsertPunctuationCount);
216            mAfterVoiceInputInsertPunctuationCount = 0;
217        }
218
219    }
220
221    public void incrementTextModificationInsertCount(int count){
222        mAfterVoiceInputInsertCount += count;
223        if (mAfterVoiceInputSelectionSpan > 0) {
224            // If text was highlighted before inserting the char, count this as
225            // a delete.
226            mAfterVoiceInputDeleteCount += mAfterVoiceInputSelectionSpan;
227        }
228        // Send up intents for other text modification types
229        if (mAfterVoiceInputDeleteCount > 0) {
230            logTextModifiedByTypingDeletion(mAfterVoiceInputDeleteCount);
231            mAfterVoiceInputDeleteCount = 0;
232        }
233        if (mAfterVoiceInputInsertPunctuationCount > 0) {
234            logTextModifiedByTypingInsertionPunctuation(mAfterVoiceInputInsertPunctuationCount);
235            mAfterVoiceInputInsertPunctuationCount = 0;
236        }
237    }
238
239    public void incrementTextModificationInsertPunctuationCount(int count){
240        mAfterVoiceInputInsertPunctuationCount += 1;
241        if (mAfterVoiceInputSelectionSpan > 0) {
242            // If text was highlighted before inserting the char, count this as
243            // a delete.
244            mAfterVoiceInputDeleteCount += mAfterVoiceInputSelectionSpan;
245        }
246        // Send up intents for aggregated non-punctuation insertions
247        if (mAfterVoiceInputDeleteCount > 0) {
248            logTextModifiedByTypingDeletion(mAfterVoiceInputDeleteCount);
249            mAfterVoiceInputDeleteCount = 0;
250        }
251        if (mAfterVoiceInputInsertCount > 0) {
252            logTextModifiedByTypingInsertion(mAfterVoiceInputInsertCount);
253            mAfterVoiceInputInsertCount = 0;
254        }
255    }
256
257    public void flushAllTextModificationCounters() {
258        if (mAfterVoiceInputInsertCount > 0) {
259            logTextModifiedByTypingInsertion(mAfterVoiceInputInsertCount);
260            mAfterVoiceInputInsertCount = 0;
261        }
262        if (mAfterVoiceInputDeleteCount > 0) {
263            logTextModifiedByTypingDeletion(mAfterVoiceInputDeleteCount);
264            mAfterVoiceInputDeleteCount = 0;
265        }
266        if (mAfterVoiceInputInsertPunctuationCount > 0) {
267            logTextModifiedByTypingInsertionPunctuation(mAfterVoiceInputInsertPunctuationCount);
268            mAfterVoiceInputInsertPunctuationCount = 0;
269        }
270    }
271
272    /**
273     * The configuration of the IME changed and may have caused the views to be layed out
274     * again. Restore the state of the recognition view.
275     */
276    public void onConfigurationChanged() {
277        mRecognitionView.restoreState();
278    }
279
280    /**
281     * @return true if field is blacklisted for voice
282     */
283    public boolean isBlacklistedField(FieldContext context) {
284        return mBlacklist.matches(context);
285    }
286
287    /**
288     * Used to decide whether to show voice input hints for this field, etc.
289     *
290     * @return true if field is recommended for voice
291     */
292    public boolean isRecommendedField(FieldContext context) {
293        return mRecommendedList.matches(context);
294    }
295
296    /**
297     * Start listening for speech from the user. This will grab the microphone
298     * and start updating the view provided by getView(). It is the caller's
299     * responsibility to ensure that the view is visible to the user at this stage.
300     *
301     * @param context the same FieldContext supplied to voiceIsEnabled()
302     * @param swipe whether this voice input was started by swipe, for logging purposes
303     */
304    public void startListening(FieldContext context, boolean swipe) {
305        mState = DEFAULT;
306        
307        Locale locale = Locale.getDefault();
308        String localeString = locale.getLanguage() + "-" + locale.getCountry();
309
310        mLogger.start(localeString, swipe);
311
312        mState = LISTENING;
313
314        mRecognitionView.showInitializing();
315        startListeningAfterInitialization(context);
316    }
317
318    /**
319     * Called only when the recognition manager's initialization completed
320     *
321     * @param context context with which {@link #startListening(FieldContext, boolean)} was executed
322     */
323    private void startListeningAfterInitialization(FieldContext context) {
324        Intent intent = makeIntent();
325        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, "");
326        intent.putExtra(EXTRA_RECOGNITION_CONTEXT, context.getBundle());
327        intent.putExtra(EXTRA_CALLING_PACKAGE, "VoiceIME");
328        intent.putExtra(EXTRA_ALTERNATES, true);
329        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS,
330                SettingsUtil.getSettingsInt(
331                        mContext.getContentResolver(),
332                        SettingsUtil.LATIN_IME_MAX_VOICE_RESULTS,
333                        1));
334        // Get endpointer params from Gservices.
335        // TODO: Consider caching these values for improved performance on slower devices.
336        final ContentResolver cr = mContext.getContentResolver();
337        putEndpointerExtra(
338                cr,
339                intent,
340                SettingsUtil.LATIN_IME_SPEECH_MINIMUM_LENGTH_MILLIS,
341                EXTRA_SPEECH_MINIMUM_LENGTH_MILLIS,
342                null  /* rely on endpointer default */);
343        putEndpointerExtra(
344                cr,
345                intent,
346                SettingsUtil.LATIN_IME_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS,
347                EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS,
348                INPUT_COMPLETE_SILENCE_LENGTH_DEFAULT_VALUE_MILLIS
349                /* our default value is different from the endpointer's */);
350        putEndpointerExtra(
351                cr,
352                intent,
353                SettingsUtil.
354                        LATIN_IME_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
355                EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
356                null  /* rely on endpointer default */);
357
358        mSpeechRecognizer.startListening(intent);
359    }
360
361    /**
362     * Gets the value of the provided Gservices key, attempts to parse it into a long,
363     * and if successful, puts the long value as an extra in the provided intent.
364     */
365    private void putEndpointerExtra(ContentResolver cr, Intent i,
366            String gservicesKey, String intentExtraKey, String defaultValue) {
367        long l = -1;
368        String s = SettingsUtil.getSettingsString(cr, gservicesKey, defaultValue);
369        if (s != null) {
370            try {
371                l = Long.valueOf(s);
372            } catch (NumberFormatException e) {
373                Log.e(TAG, "could not parse value for " + gservicesKey + ": " + s);
374            }
375        }
376
377        if (l != -1) i.putExtra(intentExtraKey, l);
378    }
379
380    public void destroy() {
381        mSpeechRecognizer.destroy();
382    }
383
384    /**
385     * Creates a new instance of the view that is returned by {@link #getView()}
386     * Clients should use this when a previously returned view is stuck in a
387     * layout that is being thrown away and a new one is need to show to the
388     * user.
389     */
390    public void newView() {
391        mRecognitionView = new RecognitionView(mContext, this);
392    }
393
394    /**
395     * @return a view that shows the recognition flow--e.g., "Speak now" and
396     * "working" dialogs.
397     */
398    public View getView() {
399        return mRecognitionView.getView();
400    }
401
402    /**
403     * Handle the cancel button.
404     */
405    public void onClick(View view) {
406        if (view.getId() == R.id.button) {
407            cancel();
408        }
409    }
410
411    public void logTextModifiedByTypingInsertion(int length) {
412        mLogger.textModifiedByTypingInsertion(length);
413    }
414
415    public void logTextModifiedByTypingInsertionPunctuation(int length) {
416        mLogger.textModifiedByTypingInsertionPunctuation(length);
417    }
418
419    public void logTextModifiedByTypingDeletion(int length) {
420        mLogger.textModifiedByTypingDeletion(length);
421    }
422
423    public void logTextModifiedByChooseSuggestion(String suggestion, int index,
424                                                  String wordSeparators, InputConnection ic) {
425        EditingUtil.Range range = new EditingUtil.Range();
426        String wordToBeReplaced = EditingUtil.getWordAtCursor(ic, wordSeparators, range);
427        // If we enable phrase-based alternatives, only send up the first word
428        // in suggestion and wordToBeReplaced.
429        mLogger.textModifiedByChooseSuggestion(suggestion.length(), wordToBeReplaced.length(),
430                                               index, wordToBeReplaced, suggestion);
431    }
432
433    public void logKeyboardWarningDialogShown() {
434        mLogger.keyboardWarningDialogShown();
435    }
436
437    public void logKeyboardWarningDialogDismissed() {
438        mLogger.keyboardWarningDialogDismissed();
439    }
440
441    public void logKeyboardWarningDialogOk() {
442        mLogger.keyboardWarningDialogOk();
443    }
444
445    public void logKeyboardWarningDialogCancel() {
446        mLogger.keyboardWarningDialogCancel();
447    }
448
449    public void logSwipeHintDisplayed() {
450        mLogger.swipeHintDisplayed();
451    }
452
453    public void logPunctuationHintDisplayed() {
454        mLogger.punctuationHintDisplayed();
455    }
456
457    public void logVoiceInputDelivered(int length) {
458        mLogger.voiceInputDelivered(length);
459    }
460
461    public void logInputEnded() {
462        mLogger.inputEnded();
463    }
464
465    public void flushLogs() {
466        mLogger.flush();
467    }
468
469    private static Intent makeIntent() {
470        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
471
472        // On Cupcake, use VoiceIMEHelper since VoiceSearch doesn't support.
473        // On Donut, always use VoiceSearch, since VoiceIMEHelper and
474        // VoiceSearch may conflict.
475        if (Build.VERSION.RELEASE.equals("1.5")) {
476            intent = intent.setClassName(
477              "com.google.android.voiceservice",
478              "com.google.android.voiceservice.IMERecognitionService");
479        } else {
480            intent = intent.setClassName(
481              "com.google.android.voicesearch",
482              "com.google.android.voicesearch.RecognitionService");
483        }
484
485        return intent;
486    }
487
488    /**
489     * Cancel in-progress speech recognition.
490     */
491    public void cancel() {
492        switch (mState) {
493        case LISTENING:
494            mLogger.cancelDuringListening();
495            break;
496        case WORKING:
497            mLogger.cancelDuringWorking();
498            break;
499        case ERROR:
500            mLogger.cancelDuringError();
501            break;
502        }
503        mState = DEFAULT;
504
505        // Remove all pending tasks (e.g., timers to cancel voice input)
506        mHandler.removeMessages(MSG_CLOSE_ERROR_DIALOG);
507
508        mSpeechRecognizer.cancel();
509        mUiListener.onCancelVoice();
510        mRecognitionView.finish();
511    }
512
513    private int getErrorStringId(int errorType, boolean endpointed) {
514        switch (errorType) {
515            // We use CLIENT_ERROR to signify that voice search is not available on the device.
516            case SpeechRecognizer.ERROR_CLIENT:
517                return R.string.voice_not_installed;
518            case SpeechRecognizer.ERROR_NETWORK:
519                return R.string.voice_network_error;
520            case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
521                return endpointed ?
522                        R.string.voice_network_error : R.string.voice_too_much_speech;
523            case SpeechRecognizer.ERROR_AUDIO:
524                return R.string.voice_audio_error;
525            case SpeechRecognizer.ERROR_SERVER:
526                return R.string.voice_server_error;
527            case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
528                return R.string.voice_speech_timeout;
529            case SpeechRecognizer.ERROR_NO_MATCH:
530                return R.string.voice_no_match;
531            default: return R.string.voice_error;
532        }
533    }
534
535    private void onError(int errorType, boolean endpointed) {
536        Log.i(TAG, "error " + errorType);
537        mLogger.error(errorType);
538        onError(mContext.getString(getErrorStringId(errorType, endpointed)));
539    }
540
541    private void onError(String error) {
542        mState = ERROR;
543        mRecognitionView.showError(error);
544        // Wait a couple seconds and then automatically dismiss message.
545        mHandler.sendMessageDelayed(Message.obtain(mHandler, MSG_CLOSE_ERROR_DIALOG), 2000);
546    }
547
548    private class ImeRecognitionListener implements RecognitionListener {
549        // Waveform data
550        final ByteArrayOutputStream mWaveBuffer = new ByteArrayOutputStream();
551        int mSpeechStart;
552        private boolean mEndpointed = false;
553
554        public void onReadyForSpeech(Bundle noiseParams) {
555            mRecognitionView.showListening();
556        }
557
558        public void onBeginningOfSpeech() {
559            mEndpointed = false;
560            mSpeechStart = mWaveBuffer.size();
561        }
562
563        public void onRmsChanged(float rmsdB) {
564            mRecognitionView.updateVoiceMeter(rmsdB);
565        }
566
567        public void onBufferReceived(byte[] buf) {
568            try {
569                mWaveBuffer.write(buf);
570            } catch (IOException e) {}
571        }
572
573        public void onEndOfSpeech() {
574            mEndpointed = true;
575            mState = WORKING;
576            mRecognitionView.showWorking(mWaveBuffer, mSpeechStart, mWaveBuffer.size());
577        }
578
579        public void onError(int errorType) {
580            mState = ERROR;
581            VoiceInput.this.onError(errorType, mEndpointed);
582        }
583
584        public void onResults(Bundle resultsBundle) {
585            List<String> results = resultsBundle
586                    .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
587            // VS Market update is needed for IME froyo clients to access the alternatesBundle
588            // TODO: verify this.
589            Bundle alternatesBundle = resultsBundle.getBundle(ALTERNATES_BUNDLE);
590            mState = DEFAULT;
591
592            final Map<String, List<CharSequence>> alternatives =
593                new HashMap<String, List<CharSequence>>();
594
595            if (ENABLE_WORD_CORRECTIONS && alternatesBundle != null && results.size() > 0) {
596                // Use the top recognition result to map each alternative's start:length to a word.
597                String[] words = results.get(0).split(" ");
598                Bundle spansBundle = alternatesBundle.getBundle(AlternatesBundleKeys.SPANS);
599                for (String key : spansBundle.keySet()) {
600                    // Get the word for which these alternates correspond to.
601                    Bundle spanBundle = spansBundle.getBundle(key);
602                    int start = spanBundle.getInt(AlternatesBundleKeys.START);
603                    int length = spanBundle.getInt(AlternatesBundleKeys.LENGTH);
604                    // Only keep single-word based alternatives.
605                    if (length == 1 && start < words.length) {
606                        // Get the alternatives associated with the span.
607                        // If a word appears twice in a recognition result,
608                        // concatenate the alternatives for the word.
609                        List<CharSequence> altList = alternatives.get(words[start]);
610                        if (altList == null) {
611                            altList = new ArrayList<CharSequence>();
612                            alternatives.put(words[start], altList);
613                        }
614                        Parcelable[] alternatesArr = spanBundle
615                            .getParcelableArray(AlternatesBundleKeys.ALTERNATES);
616                        for (int j = 0; j < alternatesArr.length &&
617                                 altList.size() < MAX_ALT_LIST_LENGTH; j++) {
618                            Bundle alternateBundle = (Bundle) alternatesArr[j];
619                            String alternate = alternateBundle.getString(AlternatesBundleKeys.TEXT);
620                            // Don't allow duplicates in the alternates list.
621                            if (!altList.contains(alternate)) {
622                                altList.add(alternate);
623                            }
624                        }
625                    }
626                }
627            }
628
629            if (results.size() > 5) {
630                results = results.subList(0, 5);
631            }
632            mUiListener.onVoiceResults(results, alternatives);
633            mRecognitionView.finish();
634        }
635
636        public void onPartialResults(final Bundle partialResults) {
637            // currently - do nothing
638        }
639
640        public void onEvent(int eventType, Bundle params) {
641            // do nothing - reserved for events that might be added in the future
642        }
643    }
644}