PageRenderTime 40ms CodeModel.GetById 22ms app.highlight 15ms RepoModel.GetById 1ms app.codeStats 0ms

/ocr/ocrservice/src/com/googlecode/eyesfree/ocr/intent/VoiceGestureView.java

http://eyes-free.googlecode.com/
Java | 297 lines | 202 code | 62 blank | 33 comment | 23 complexity | 6737a539f2124211394a3870ff2dbf3f MD5 | raw file
  1/*
  2 * Copyright (C) 2011 Google Inc.
  3 *
  4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
  5 * use this file except in compliance with the License. You may obtain a copy of
  6 * the License at
  7 *
  8 * http://www.apache.org/licenses/LICENSE-2.0
  9 *
 10 * Unless required by applicable law or agreed to in writing, software
 11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 13 * License for the specific language governing permissions and limitations under
 14 * the License.
 15 */
 16
 17package com.googlecode.eyesfree.ocr.intent;
 18
 19import android.content.Context;
 20import android.graphics.Canvas;
 21import android.graphics.Color;
 22import android.graphics.Paint;
 23import android.graphics.Rect;
 24import android.speech.tts.TextToSpeech;
 25import android.util.AttributeSet;
 26import android.view.GestureDetector;
 27import android.view.GestureDetector.SimpleOnGestureListener;
 28import android.view.MotionEvent;
 29import android.view.View;
 30
 31import com.googlecode.eyesfree.ocr.R;
 32
 33import java.util.HashMap;
 34import java.util.LinkedList;
 35import java.util.Vector;
 36
 37/**
 38 * @author alanv@google.com (Alan Viverette)
 39 */
 40public class VoiceGestureView extends View {
 41    private static final String TAG = "VoiceGestureView";
 42    private static final String PACKAGE = "com.googlecode.eyesfree.ocr";
 43    private static final String EARCON_CLICK = "[click]";
 44    private static final String EARCON_LOUD_BEEP = "[long_beep]";
 45    private static final String EARCON_DOUBLE_BEEP = "[double_beep]";
 46
 47    private GestureDetector mDetector;
 48    private Paint mPaint;
 49    private HashMap<String, String> mParams;
 50    private TextToSpeech mTts;
 51
 52    private LinkedList<String> mOldUtterances;
 53    private LinkedList<String> mNewUtterances;
 54
 55    private String mCurrentUtterance;
 56
 57    private boolean mTtsReady;
 58    private boolean mManualMode;
 59
 60    public VoiceGestureView(Context context) {
 61        super(context);
 62
 63        init();
 64    }
 65
 66    public VoiceGestureView(Context context, AttributeSet attrs) {
 67        super(context, attrs);
 68
 69        init();
 70    }
 71
 72    public VoiceGestureView(Context context, AttributeSet attrs, int defStyle) {
 73        super(context, attrs, defStyle);
 74
 75        init();
 76    }
 77
 78    private void init() {
 79        Context context = getContext();
 80
 81        mDetector = new GestureDetector(gestureListener);
 82
 83        mPaint = new Paint();
 84
 85        mParams = new HashMap<String, String>();
 86        mParams.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, TAG);
 87
 88        mTts = new TextToSpeech(context, ttsInitListener);
 89
 90        mOldUtterances = new LinkedList<String>();
 91        mNewUtterances = new LinkedList<String>();
 92        mCurrentUtterance = null;
 93
 94        mManualMode = false;
 95    }
 96
 97    @Override
 98    protected void onDetachedFromWindow() {
 99        super.onDetachedFromWindow();
100
101        mTts.shutdown();
102    }
103
104    @Override
105    protected void onDraw(Canvas canvas) {
106        Vector<String> lines = new Vector<String>();
107        lines.add("Queued: " + mNewUtterances.size());
108        lines.add("Speaking: " + (mCurrentUtterance != null));
109        lines.add("Spoken: " + mOldUtterances.size());
110
111        final Paint p = new Paint();
112        final int kLargeTextSize = 20;
113        final int kSmallTextSize = 16;
114        final int kTextBufferSize = 4;
115
116        // TODO(andrewharp): Don't hardcode this, figure out the text
117        // length.
118        final int shadedWidth = 200;
119
120        // Each block has one large header line followed by a buffer, then N
121        // smaller lines each followed by a buffer, and then an additional
122        // buffer.
123        final int shadedHeight = kLargeTextSize + kTextBufferSize
124                + (kSmallTextSize + kTextBufferSize) * lines.size() + kTextBufferSize;
125
126        int startingYPos = 0;
127
128        p.setColor(Color.BLACK);
129        p.setAlpha(100);
130
131        int yPos = startingYPos;
132        int xPos = 0;
133
134        canvas.drawRect(new Rect(xPos, yPos, xPos + shadedWidth, yPos + shadedHeight), p);
135
136        // Header line.
137        p.setAlpha(255);
138
139        p.setAntiAlias(true);
140        p.setColor(Color.CYAN);
141        p.setTextSize(kLargeTextSize);
142        yPos += kLargeTextSize + kTextBufferSize;
143        canvas.drawText(TAG, xPos, yPos, p);
144
145        mPaint.setColor(Color.WHITE);
146        mPaint.setTextSize(kSmallTextSize);
147        for (final String line : lines) {
148            yPos += kSmallTextSize + kTextBufferSize;
149            canvas.drawText(line, xPos, yPos, mPaint);
150        }
151    }
152
153    public void shutdown() {
154        mTts.shutdown();
155    }
156
157    /**
158     * Adds an utterance to the queue. If the queue is empty and no utterance is
159     * currently being spoken, plays the utterance immediately.
160     *
161     * @param utterance
162     */
163    public void addUtterance(String utterance) {
164        synchronized (this) {
165            mNewUtterances.addLast(utterance);
166
167            // If the current utterance is null, advance to the one we added.
168            if (mCurrentUtterance == null && !mManualMode) {
169                changeUtterance(1);
170            }
171        }
172    }
173
174    @Override
175    public boolean onTouchEvent(MotionEvent e) {
176        if (mDetector.onTouchEvent(e))
177            return true;
178
179        return true;
180    }
181
182    private boolean onSingleTap() {
183        if (mManualMode) {
184            mManualMode = false;
185            return changeUtterance(1);
186        } else {
187            mManualMode = true;
188            return changeUtterance(0);
189        }
190    }
191
192    private boolean onVerticalSwipe(float delta) {
193        mManualMode = true;
194
195        if (delta > 0) {
196            return changeUtterance(1);
197        } else {
198            return changeUtterance(-1);
199        }
200    }
201
202    private boolean changeUtterance(int direction) {
203        boolean changed;
204
205        synchronized (this) {
206            if (!mTtsReady) {
207                return false;
208            }
209
210            LinkedList<String> src;
211            LinkedList<String> dst;
212
213            if (direction < 0) {
214                src = mOldUtterances;
215                dst = mNewUtterances;
216            } else if (direction > 0){
217                src = mNewUtterances;
218                dst = mOldUtterances;
219            } else {
220                src = null;
221                dst = mOldUtterances;
222            }
223
224            if (mCurrentUtterance != null) {
225                dst.addFirst(mCurrentUtterance);
226            }
227
228            if (src != null && !src.isEmpty()) {
229                mCurrentUtterance = src.removeFirst();
230                mTts.speak(mCurrentUtterance, TextToSpeech.QUEUE_FLUSH, null);
231                mTts.speak(EARCON_CLICK, TextToSpeech.QUEUE_ADD, mParams);
232
233                changed = true;
234            } else {
235                mCurrentUtterance = null;
236                mTts.speak(EARCON_LOUD_BEEP, TextToSpeech.QUEUE_FLUSH, null);
237
238                changed = false;
239                mManualMode = false;
240            }
241        }
242
243        postInvalidate();
244
245        return changed;
246    }
247
248    private static final float X_TOLERANCE = 0.25f;
249
250    private final SimpleOnGestureListener gestureListener = new SimpleOnGestureListener() {
251        @Override
252        public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
253            int width = getWidth();
254            int height = getHeight();
255
256            float dX = (e2.getX() - e1.getX()) / width;
257            float dY = (e2.getY() - e1.getY()) / height;
258
259            if (Math.abs(dX) > X_TOLERANCE) {
260                return onVerticalSwipe(dX);
261            }
262
263            return false;
264        }
265
266        @Override
267        public boolean onSingleTapUp(MotionEvent e) {
268            return onSingleTap();
269        }
270    };
271
272    private final TextToSpeech.OnInitListener ttsInitListener = new TextToSpeech.OnInitListener() {
273        @Override
274        public void onInit(int status) {
275            synchronized (VoiceGestureView.this) {
276                mTtsReady = true;
277
278                mTts.setOnUtteranceCompletedListener(utteranceListener);
279                mTts.addSpeech(EARCON_CLICK, PACKAGE, R.raw.click);
280                mTts.addSpeech(EARCON_LOUD_BEEP, PACKAGE, R.raw.loud_beep);
281                mTts.addSpeech(EARCON_DOUBLE_BEEP, PACKAGE, R.raw.double_beep);
282            }
283        }
284    };
285
286    private final TextToSpeech.OnUtteranceCompletedListener utteranceListener =
287            new TextToSpeech.OnUtteranceCompletedListener() {
288                @Override
289                public void onUtteranceCompleted(String utteranceId) {
290                    // When we finish an utterance, immediately move to the next
291                    // one.
292                    if (!mManualMode) {
293                        changeUtterance(1);
294                    }
295                }
296            };
297}