1 /* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not 5 * use this file except in compliance with the License. You may obtain a copy of 6 * the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13 * License for the specific language governing permissions and limitations under 14 * the License. 15 */ 16 17 package android.speech; 18 19 import android.Manifest; 20 import android.annotation.NonNull; 21 import android.annotation.Nullable; 22 import android.annotation.SdkConstant; 23 import android.annotation.SdkConstant.SdkConstantType; 24 import android.annotation.SuppressLint; 25 import android.app.AppOpsManager; 26 import android.app.Service; 27 import android.content.AttributionSource; 28 import android.content.Context; 29 import android.content.ContextParams; 30 import android.content.Intent; 31 import android.content.PermissionChecker; 32 import android.os.Binder; 33 import android.os.Bundle; 34 import android.os.Handler; 35 import android.os.IBinder; 36 import android.os.Looper; 37 import android.os.Message; 38 import android.os.RemoteException; 39 import android.util.Log; 40 41 import com.android.internal.annotations.GuardedBy; 42 import com.android.internal.util.function.pooled.PooledLambda; 43 44 import java.lang.ref.WeakReference; 45 import java.util.HashMap; 46 import java.util.Map; 47 import java.util.Objects; 48 49 /** 50 * This class provides a base class for recognition service implementations. This class should be 51 * extended only in case you wish to implement a new speech recognizer. Please note that the 52 * implementation of this service is stateless. 53 */ 54 public abstract class RecognitionService extends Service { 55 /** 56 * The {@link Intent} that must be declared as handled by the service. 57 */ 58 @SdkConstant(SdkConstantType.SERVICE_ACTION) 59 public static final String SERVICE_INTERFACE = "android.speech.RecognitionService"; 60 61 /** 62 * Name under which a RecognitionService component publishes information about itself. 63 * This meta-data should reference an XML resource containing a 64 * <code><{@link android.R.styleable#RecognitionService recognition-service}></code> or 65 * <code><{@link android.R.styleable#RecognitionService on-device-recognition-service} 66 * ></code> tag. 67 */ 68 public static final String SERVICE_META_DATA = "android.speech"; 69 70 /** Log messages identifier */ 71 private static final String TAG = "RecognitionService"; 72 73 /** Debugging flag */ 74 private static final boolean DBG = false; 75 76 private static final int DEFAULT_MAX_CONCURRENT_SESSIONS_COUNT = 1; 77 78 private final Map<IBinder, SessionState> mSessions = new HashMap<>(); 79 80 /** Binder of the recognition service */ 81 private final RecognitionServiceBinder mBinder = new RecognitionServiceBinder(this); 82 83 private static final int MSG_START_LISTENING = 1; 84 85 private static final int MSG_STOP_LISTENING = 2; 86 87 private static final int MSG_CANCEL = 3; 88 89 private static final int MSG_RESET = 4; 90 91 private static final int MSG_CHECK_RECOGNITION_SUPPORT = 5; 92 93 private static final int MSG_TRIGGER_MODEL_DOWNLOAD = 6; 94 95 private final Handler mHandler = new Handler() { 96 @Override 97 public void handleMessage(Message msg) { 98 switch (msg.what) { 99 case MSG_START_LISTENING: 100 StartListeningArgs args = (StartListeningArgs) msg.obj; 101 dispatchStartListening(args.mIntent, args.mListener, args.mAttributionSource); 102 break; 103 case MSG_STOP_LISTENING: 104 dispatchStopListening((IRecognitionListener) msg.obj); 105 break; 106 case MSG_CANCEL: 107 dispatchCancel((IRecognitionListener) msg.obj); 108 break; 109 case MSG_RESET: 110 dispatchClearCallback((IRecognitionListener) msg.obj); 111 break; 112 case MSG_CHECK_RECOGNITION_SUPPORT: 113 CheckRecognitionSupportArgs checkArgs = (CheckRecognitionSupportArgs) msg.obj; 114 dispatchCheckRecognitionSupport( 115 checkArgs.mIntent, checkArgs.callback, checkArgs.mAttributionSource); 116 break; 117 case MSG_TRIGGER_MODEL_DOWNLOAD: 118 ModelDownloadArgs modelDownloadArgs = (ModelDownloadArgs) msg.obj; 119 dispatchTriggerModelDownload( 120 modelDownloadArgs.mIntent, 121 modelDownloadArgs.mAttributionSource, 122 modelDownloadArgs.mListener); 123 break; 124 } 125 } 126 }; 127 dispatchStartListening(Intent intent, final IRecognitionListener listener, @NonNull AttributionSource attributionSource)128 private void dispatchStartListening(Intent intent, final IRecognitionListener listener, 129 @NonNull AttributionSource attributionSource) { 130 Callback currentCallback = null; 131 SessionState sessionState = mSessions.get(listener.asBinder()); 132 133 try { 134 if (sessionState == null) { 135 if (mSessions.size() >= getMaxConcurrentSessionsCount()) { 136 listener.onError(SpeechRecognizer.ERROR_RECOGNIZER_BUSY); 137 Log.i(TAG, "#startListening received " 138 + "when the service's capacity is full - ignoring this call."); 139 return; 140 } 141 142 boolean preflightPermissionCheckPassed = 143 intent.hasExtra(RecognizerIntent.EXTRA_AUDIO_SOURCE) 144 || checkPermissionForPreflightNotHardDenied(attributionSource); 145 if (preflightPermissionCheckPassed) { 146 currentCallback = new Callback(listener, attributionSource); 147 sessionState = new SessionState(currentCallback); 148 mSessions.put(listener.asBinder(), sessionState); 149 if (DBG) { 150 Log.d(TAG, "Added a new session to the map, pending permission checks"); 151 } 152 RecognitionService.this.onStartListening(intent, currentCallback); 153 } 154 155 if (!preflightPermissionCheckPassed 156 || !checkPermissionAndStartDataDelivery(sessionState)) { 157 listener.onError(SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS); 158 if (preflightPermissionCheckPassed) { 159 // If start listening was attempted, cancel the callback. 160 RecognitionService.this.onCancel(currentCallback); 161 mSessions.remove(listener.asBinder()); 162 finishDataDelivery(sessionState); 163 sessionState.reset(); 164 } 165 Log.i(TAG, "#startListening received from a caller " 166 + "without permission " + Manifest.permission.RECORD_AUDIO + "."); 167 } 168 } else { 169 listener.onError(SpeechRecognizer.ERROR_CLIENT); 170 Log.i(TAG, "#startListening received " 171 + "for a listener which is already in session - ignoring this call."); 172 } 173 } catch (RemoteException e) { 174 Log.d(TAG, "#onError call from #startListening failed."); 175 } 176 } 177 dispatchStopListening(IRecognitionListener listener)178 private void dispatchStopListening(IRecognitionListener listener) { 179 SessionState sessionState = mSessions.get(listener.asBinder()); 180 if (sessionState == null) { 181 try { 182 listener.onError(SpeechRecognizer.ERROR_CLIENT); 183 } catch (RemoteException e) { 184 Log.d(TAG, "#onError call from #stopListening failed."); 185 } 186 Log.w(TAG, "#stopListening received for a listener " 187 + "which has not started a session - ignoring this call."); 188 } else { 189 RecognitionService.this.onStopListening(sessionState.mCallback); 190 } 191 } 192 dispatchCancel(IRecognitionListener listener)193 private void dispatchCancel(IRecognitionListener listener) { 194 SessionState sessionState = mSessions.get(listener.asBinder()); 195 if (sessionState == null) { 196 Log.w(TAG, "#cancel received for a listener which has not started a session " 197 + "- ignoring this call."); 198 } else { 199 RecognitionService.this.onCancel(sessionState.mCallback); 200 dispatchClearCallback(listener); 201 } 202 } 203 dispatchClearCallback(IRecognitionListener listener)204 private void dispatchClearCallback(IRecognitionListener listener) { 205 SessionState sessionState = mSessions.remove(listener.asBinder()); 206 if (sessionState != null) { 207 if (DBG) { 208 Log.d(TAG, "Removed session from the map for listener = " 209 + listener.asBinder() + "."); 210 } 211 finishDataDelivery(sessionState); 212 sessionState.reset(); 213 } 214 } 215 dispatchCheckRecognitionSupport( Intent intent, IRecognitionSupportCallback callback, AttributionSource attributionSource)216 private void dispatchCheckRecognitionSupport( 217 Intent intent, IRecognitionSupportCallback callback, 218 AttributionSource attributionSource) { 219 RecognitionService.this.onCheckRecognitionSupport( 220 intent, 221 attributionSource, 222 new SupportCallback(callback)); 223 } 224 dispatchTriggerModelDownload( Intent intent, AttributionSource attributionSource, IModelDownloadListener listener)225 private void dispatchTriggerModelDownload( 226 Intent intent, 227 AttributionSource attributionSource, 228 IModelDownloadListener listener) { 229 if (listener == null) { 230 RecognitionService.this.onTriggerModelDownload(intent, attributionSource); 231 } else { 232 RecognitionService.this.onTriggerModelDownload( 233 intent, 234 attributionSource, 235 new ModelDownloadListener() { 236 237 private final Object mLock = new Object(); 238 239 @GuardedBy("mLock") 240 private boolean mIsTerminated = false; 241 242 @Override 243 public void onProgress(int completedPercent) { 244 synchronized (mLock) { 245 if (mIsTerminated) { 246 return; 247 } 248 try { 249 listener.onProgress(completedPercent); 250 } catch (RemoteException e) { 251 throw e.rethrowFromSystemServer(); 252 } 253 } 254 } 255 256 @Override 257 public void onSuccess() { 258 synchronized (mLock) { 259 if (mIsTerminated) { 260 return; 261 } 262 mIsTerminated = true; 263 try { 264 listener.onSuccess(); 265 } catch (RemoteException e) { 266 throw e.rethrowFromSystemServer(); 267 } 268 } 269 } 270 271 @Override 272 public void onScheduled() { 273 synchronized (mLock) { 274 if (mIsTerminated) { 275 return; 276 } 277 mIsTerminated = true; 278 try { 279 listener.onScheduled(); 280 } catch (RemoteException e) { 281 throw e.rethrowFromSystemServer(); 282 } 283 } 284 } 285 286 @Override 287 public void onError(int error) { 288 synchronized (mLock) { 289 if (mIsTerminated) { 290 return; 291 } 292 mIsTerminated = true; 293 try { 294 listener.onError(error); 295 } catch (RemoteException e) { 296 throw e.rethrowFromSystemServer(); 297 } 298 } 299 } 300 }); 301 } 302 } 303 304 private static class StartListeningArgs { 305 public final Intent mIntent; 306 307 public final IRecognitionListener mListener; 308 @NonNull public final AttributionSource mAttributionSource; 309 StartListeningArgs(Intent intent, IRecognitionListener listener, @NonNull AttributionSource attributionSource)310 public StartListeningArgs(Intent intent, IRecognitionListener listener, 311 @NonNull AttributionSource attributionSource) { 312 this.mIntent = intent; 313 this.mListener = listener; 314 this.mAttributionSource = attributionSource; 315 } 316 } 317 318 private static class CheckRecognitionSupportArgs { 319 public final Intent mIntent; 320 public final IRecognitionSupportCallback callback; 321 public final AttributionSource mAttributionSource; 322 CheckRecognitionSupportArgs( Intent intent, IRecognitionSupportCallback callback, AttributionSource attributionSource)323 private CheckRecognitionSupportArgs( 324 Intent intent, 325 IRecognitionSupportCallback callback, 326 AttributionSource attributionSource) { 327 this.mIntent = intent; 328 this.callback = callback; 329 this.mAttributionSource = attributionSource; 330 } 331 } 332 333 private static class ModelDownloadArgs { 334 final Intent mIntent; 335 final AttributionSource mAttributionSource; 336 @Nullable final IModelDownloadListener mListener; 337 ModelDownloadArgs( Intent intent, AttributionSource attributionSource, @Nullable IModelDownloadListener listener)338 private ModelDownloadArgs( 339 Intent intent, 340 AttributionSource attributionSource, 341 @Nullable IModelDownloadListener listener) { 342 this.mIntent = intent; 343 this.mAttributionSource = attributionSource; 344 this.mListener = listener; 345 } 346 } 347 348 /** 349 * Notifies the service that it should start listening for speech. 350 * 351 * <p> If you are recognizing speech from the microphone, in this callback you 352 * should create an attribution context for the caller such that when you access 353 * the mic the caller would be properly blamed (and their permission checked in 354 * the process) for accessing the microphone and that you served as a proxy for 355 * this sensitive data (and your permissions would be checked in the process). 356 * You should also open the mic in this callback via the attribution context 357 * and close the mic before returning the recognized result. If you don't do 358 * that then the caller would be blamed and you as being a proxy as well as you 359 * would get one more blame on yourself when you open the microphone. 360 * 361 * <pre> 362 * Context attributionContext = context.createContext(new ContextParams.Builder() 363 * .setNextAttributionSource(callback.getCallingAttributionSource()) 364 * .build()); 365 * 366 * AudioRecord recorder = AudioRecord.Builder() 367 * .setContext(attributionContext); 368 * . . . 369 * .build(); 370 * 371 * recorder.startRecording() 372 * </pre> 373 * 374 * @param recognizerIntent contains parameters for the recognition to be performed. The intent 375 * may also contain optional extras, see {@link RecognizerIntent}. If these values are 376 * not set explicitly, default values should be used by the recognizer. 377 * @param listener that will receive the service's callbacks 378 */ onStartListening(Intent recognizerIntent, Callback listener)379 protected abstract void onStartListening(Intent recognizerIntent, Callback listener); 380 381 /** 382 * Notifies the service that it should cancel the speech recognition. 383 */ onCancel(Callback listener)384 protected abstract void onCancel(Callback listener); 385 386 /** 387 * Notifies the service that it should stop listening for speech. Speech captured so far should 388 * be recognized as if the user had stopped speaking at this point. This method is only called 389 * if the application calls it explicitly. 390 */ onStopListening(Callback listener)391 protected abstract void onStopListening(Callback listener); 392 393 /** 394 * Queries the service on whether it would support a {@link #onStartListening(Intent, Callback)} 395 * for the same {@code recognizerIntent}. 396 * 397 * <p>The service will notify the caller about the level of support or error via 398 * {@link SupportCallback}. 399 * 400 * <p>If the service does not offer the support check it will notify the caller with 401 * {@link SpeechRecognizer#ERROR_CANNOT_CHECK_SUPPORT}. 402 */ onCheckRecognitionSupport( @onNull Intent recognizerIntent, @NonNull SupportCallback supportCallback)403 public void onCheckRecognitionSupport( 404 @NonNull Intent recognizerIntent, 405 @NonNull SupportCallback supportCallback) { 406 if (DBG) { 407 Log.i(TAG, String.format("#onSupports [%s]", recognizerIntent)); 408 } 409 supportCallback.onError(SpeechRecognizer.ERROR_CANNOT_CHECK_SUPPORT); 410 } 411 412 /** 413 * Queries the service on whether it would support a {@link #onStartListening(Intent, Callback)} 414 * for the same {@code recognizerIntent}. 415 * 416 * <p>The service will notify the caller about the level of support or error via 417 * {@link SupportCallback}. 418 * 419 * <p>If the service does not offer the support check it will notify the caller with 420 * {@link SpeechRecognizer#ERROR_CANNOT_CHECK_SUPPORT}. 421 * 422 * <p>Provides the calling AttributionSource to the service implementation so that permissions 423 * and bandwidth could be correctly blamed.</p> 424 */ onCheckRecognitionSupport( @onNull Intent recognizerIntent, @NonNull AttributionSource attributionSource, @NonNull SupportCallback supportCallback)425 public void onCheckRecognitionSupport( 426 @NonNull Intent recognizerIntent, 427 @NonNull AttributionSource attributionSource, 428 @NonNull SupportCallback supportCallback) { 429 onCheckRecognitionSupport(recognizerIntent, supportCallback); 430 } 431 432 /** 433 * Requests the download of the recognizer support for {@code recognizerIntent}. 434 */ onTriggerModelDownload(@onNull Intent recognizerIntent)435 public void onTriggerModelDownload(@NonNull Intent recognizerIntent) { 436 if (DBG) { 437 Log.i(TAG, String.format("#downloadModel [%s]", recognizerIntent)); 438 } 439 } 440 441 /** 442 * Requests the download of the recognizer support for {@code recognizerIntent}. 443 * 444 * <p>Provides the calling AttributionSource to the service implementation so that permissions 445 * and bandwidth could be correctly blamed.</p> 446 */ onTriggerModelDownload( @onNull Intent recognizerIntent, @NonNull AttributionSource attributionSource)447 public void onTriggerModelDownload( 448 @NonNull Intent recognizerIntent, 449 @NonNull AttributionSource attributionSource) { 450 onTriggerModelDownload(recognizerIntent); 451 } 452 453 /** 454 * Requests the download of the recognizer support for {@code recognizerIntent}. 455 * 456 * <p> Provides the calling {@link AttributionSource} to the service implementation so that 457 * permissions and bandwidth could be correctly blamed. 458 * 459 * <p> Client will receive the progress updates via the given {@link ModelDownloadListener}: 460 * 461 * <li> If the model is already available, {@link ModelDownloadListener#onSuccess()} will be 462 * called directly. The model can be safely used afterwards. 463 * 464 * <li> If the {@link RecognitionService} has started the download, 465 * {@link ModelDownloadListener#onProgress(int)} will be called an unspecified (zero or more) 466 * number of times until the download is complete. 467 * When the download finishes, {@link ModelDownloadListener#onSuccess()} will be called. 468 * The model can be safely used afterwards. 469 * 470 * <li> If the {@link RecognitionService} has only scheduled the download, but won't satisfy it 471 * immediately, {@link ModelDownloadListener#onScheduled()} will be called. 472 * There will be no further updates on this listener. 473 * 474 * <li> If the request fails at any time due to a network or scheduling error, 475 * {@link ModelDownloadListener#onError(int)} will be called. 476 * 477 * @param recognizerIntent contains parameters for the recognition to be performed. The intent 478 * may also contain optional extras, see {@link RecognizerIntent}. 479 * @param attributionSource the attribution source of the caller. 480 * @param listener on which to receive updates about the model download request. 481 */ onTriggerModelDownload( @onNull Intent recognizerIntent, @NonNull AttributionSource attributionSource, @NonNull ModelDownloadListener listener)482 public void onTriggerModelDownload( 483 @NonNull Intent recognizerIntent, 484 @NonNull AttributionSource attributionSource, 485 @NonNull ModelDownloadListener listener) { 486 listener.onError(SpeechRecognizer.ERROR_CANNOT_LISTEN_TO_DOWNLOAD_EVENTS); 487 } 488 489 @Override 490 @SuppressLint("MissingNullability") createContext(@onNull ContextParams contextParams)491 public Context createContext(@NonNull ContextParams contextParams) { 492 if (contextParams.getNextAttributionSource() != null) { 493 if (mHandler.getLooper().equals(Looper.myLooper())) { 494 handleAttributionContextCreation(contextParams.getNextAttributionSource()); 495 } else { 496 mHandler.sendMessage( 497 PooledLambda.obtainMessage(this::handleAttributionContextCreation, 498 contextParams.getNextAttributionSource())); 499 } 500 } 501 return super.createContext(contextParams); 502 } 503 handleAttributionContextCreation(@onNull AttributionSource attributionSource)504 private void handleAttributionContextCreation(@NonNull AttributionSource attributionSource) { 505 for (SessionState sessionState : mSessions.values()) { 506 Callback currentCallback = sessionState.mCallback; 507 if (currentCallback != null 508 && currentCallback.mCallingAttributionSource.equals(attributionSource)) { 509 currentCallback.mAttributionContextCreated = true; 510 } 511 } 512 } 513 514 @Override onBind(final Intent intent)515 public final IBinder onBind(final Intent intent) { 516 if (DBG) Log.d(TAG, "#onBind, intent=" + intent); 517 return mBinder; 518 } 519 520 @Override onDestroy()521 public void onDestroy() { 522 if (DBG) Log.d(TAG, "#onDestroy"); 523 for (SessionState sessionState : mSessions.values()) { 524 finishDataDelivery(sessionState); 525 sessionState.reset(); 526 } 527 mSessions.clear(); 528 mBinder.clearReference(); 529 super.onDestroy(); 530 } 531 532 /** 533 * Returns the maximal number of recognition sessions ongoing at the same time. 534 * <p> 535 * The default value is 1, meaning concurrency should be enabled by overriding this method. 536 */ getMaxConcurrentSessionsCount()537 public int getMaxConcurrentSessionsCount() { 538 return DEFAULT_MAX_CONCURRENT_SESSIONS_COUNT; 539 } 540 541 /** 542 * This class receives callbacks from the speech recognition service and forwards them to the 543 * user. An instance of this class is passed to the 544 * {@link RecognitionService#onStartListening(Intent, Callback)} method. Recognizers may call 545 * these methods on any thread. 546 */ 547 public class Callback { 548 private final IRecognitionListener mListener; 549 @NonNull private final AttributionSource mCallingAttributionSource; 550 @Nullable private Context mAttributionContext; 551 private boolean mAttributionContextCreated; 552 Callback(IRecognitionListener listener, @NonNull AttributionSource attributionSource)553 private Callback(IRecognitionListener listener, 554 @NonNull AttributionSource attributionSource) { 555 mListener = listener; 556 mCallingAttributionSource = attributionSource; 557 } 558 559 /** 560 * The service should call this method when the user has started to speak. 561 */ beginningOfSpeech()562 public void beginningOfSpeech() throws RemoteException { 563 mListener.onBeginningOfSpeech(); 564 } 565 566 /** 567 * The service should call this method when sound has been received. The purpose of this 568 * function is to allow giving feedback to the user regarding the captured audio. 569 * 570 * @param buffer a buffer containing a sequence of big-endian 16-bit integers representing a 571 * single channel audio stream. The sample rate is implementation dependent. 572 */ bufferReceived(byte[] buffer)573 public void bufferReceived(byte[] buffer) throws RemoteException { 574 mListener.onBufferReceived(buffer); 575 } 576 577 /** 578 * The service should call this method after the user stops speaking. 579 */ endOfSpeech()580 public void endOfSpeech() throws RemoteException { 581 mListener.onEndOfSpeech(); 582 } 583 584 /** 585 * The service should call this method when a network or recognition error occurred. 586 * 587 * @param error code is defined in {@link SpeechRecognizer} 588 */ error(@peechRecognizer.RecognitionError int error)589 public void error(@SpeechRecognizer.RecognitionError int error) throws RemoteException { 590 Message.obtain(mHandler, MSG_RESET, mListener).sendToTarget(); 591 mListener.onError(error); 592 } 593 594 /** 595 * The service should call this method when partial recognition results are available. This 596 * method can be called at any time between {@link #beginningOfSpeech()} and 597 * {@link #results(Bundle)} when partial results are ready. This method may be called zero, 598 * one or multiple times for each call to {@link SpeechRecognizer#startListening(Intent)}, 599 * depending on the speech recognition service implementation. 600 * 601 * @param partialResults the returned results. To retrieve the results in 602 * ArrayList<String> format use {@link Bundle#getStringArrayList(String)} with 603 * {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter 604 */ partialResults(Bundle partialResults)605 public void partialResults(Bundle partialResults) throws RemoteException { 606 mListener.onPartialResults(partialResults); 607 } 608 609 /** 610 * The service should call this method when the endpointer is ready for the user to start 611 * speaking. 612 * 613 * @param params parameters set by the recognition service. Reserved for future use. 614 */ readyForSpeech(Bundle params)615 public void readyForSpeech(Bundle params) throws RemoteException { 616 mListener.onReadyForSpeech(params); 617 } 618 619 /** 620 * The service should call this method when recognition results are ready. 621 * 622 * @param results the recognition results. To retrieve the results in {@code 623 * ArrayList<String>} format use {@link Bundle#getStringArrayList(String)} with 624 * {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter 625 */ results(Bundle results)626 public void results(Bundle results) throws RemoteException { 627 Message.obtain(mHandler, MSG_RESET, mListener).sendToTarget(); 628 mListener.onResults(results); 629 } 630 631 /** 632 * The service should call this method when the sound level in the audio stream has changed. 633 * There is no guarantee that this method will be called. 634 * 635 * @param rmsdB the new RMS dB value 636 */ rmsChanged(float rmsdB)637 public void rmsChanged(float rmsdB) throws RemoteException { 638 mListener.onRmsChanged(rmsdB); 639 } 640 641 /** 642 * The service should call this method for each ready segment of a long recognition session. 643 * 644 * @param results the recognition results. To retrieve the results in {@code 645 * ArrayList<String>} format use {@link Bundle#getStringArrayList(String)} with 646 * {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter 647 */ 648 @SuppressLint({"CallbackMethodName", "RethrowRemoteException"}) segmentResults(@onNull Bundle results)649 public void segmentResults(@NonNull Bundle results) throws RemoteException { 650 mListener.onSegmentResults(results); 651 } 652 653 /** 654 * The service should call this method to end a segmented session. 655 */ 656 @SuppressLint({"CallbackMethodName", "RethrowRemoteException"}) endOfSegmentedSession()657 public void endOfSegmentedSession() throws RemoteException { 658 Message.obtain(mHandler, MSG_RESET, mListener).sendToTarget(); 659 mListener.onEndOfSegmentedSession(); 660 } 661 662 /** 663 * The service should call this method when the language detection (and switching) 664 * results are available. This method can be called on any number of occasions 665 * at any time between {@link #beginningOfSpeech()} and {@link #endOfSpeech()}, 666 * depending on the speech recognition service implementation. 667 * 668 * @param results the returned language detection (and switching) results. 669 * <p> To retrieve the most confidently detected language IETF tag 670 * (as defined by BCP 47, e.g., "en-US", "de-DE"), 671 * use {@link Bundle#getString(String)} 672 * with {@link SpeechRecognizer#DETECTED_LANGUAGE} as the parameter. 673 * <p> To retrieve the language detection confidence level represented by a value 674 * prefixed by {@code LANGUAGE_DETECTION_CONFIDENCE_LEVEL_} defined in 675 * {@link SpeechRecognizer}, use {@link Bundle#getInt(String)} with 676 * {@link SpeechRecognizer#LANGUAGE_DETECTION_CONFIDENCE_LEVEL} as the parameter. 677 * <p> To retrieve the alternative locales for the same language 678 * retrieved by the key {@link SpeechRecognizer#DETECTED_LANGUAGE}, 679 * use {@link Bundle#getStringArrayList(String)} 680 * with {@link SpeechRecognizer#TOP_LOCALE_ALTERNATIVES} as the parameter. 681 * <p> To retrieve the language switching results represented by a value 682 * prefixed by {@code LANGUAGE_SWITCH_RESULT_} 683 * and defined in {@link SpeechRecognizer}, use {@link Bundle#getInt(String)} 684 * with {@link SpeechRecognizer#LANGUAGE_SWITCH_RESULT} as the parameter. 685 */ 686 @SuppressLint("CallbackMethodName") // For consistency with existing methods. languageDetection(@onNull Bundle results)687 public void languageDetection(@NonNull Bundle results) { 688 try { 689 mListener.onLanguageDetection(results); 690 } catch (RemoteException e) { 691 throw e.rethrowFromSystemServer(); 692 } 693 } 694 695 /** 696 * Return the Linux uid assigned to the process that sent you the current transaction that 697 * is being processed. This is obtained from {@link Binder#getCallingUid()}. 698 */ getCallingUid()699 public int getCallingUid() { 700 return mCallingAttributionSource.getUid(); 701 } 702 703 /** 704 * Gets the permission identity of the calling app. If you want to attribute 705 * the mic access to the calling app you can create an attribution context 706 * via {@link android.content.Context#createContext(android.content.ContextParams)} 707 * and passing this identity to {@link 708 * android.content.ContextParams.Builder#setNextAttributionSource(AttributionSource)}. 709 * 710 * @return The permission identity of the calling app. 711 * 712 * @see android.content.ContextParams.Builder#setNextAttributionSource( 713 * AttributionSource) 714 */ 715 @SuppressLint("CallbackMethodName") 716 @NonNull getCallingAttributionSource()717 public AttributionSource getCallingAttributionSource() { 718 return mCallingAttributionSource; 719 } 720 getAttributionContextForCaller()721 @NonNull Context getAttributionContextForCaller() { 722 if (mAttributionContext == null) { 723 mAttributionContext = createContext(new ContextParams.Builder() 724 .setNextAttributionSource(mCallingAttributionSource) 725 .build()); 726 } 727 return mAttributionContext; 728 } 729 } 730 731 /** 732 * This class receives callbacks from the speech recognition service and forwards them to the 733 * user. An instance of this class is passed to the 734 * {@link RecognitionService#onCheckRecognitionSupport(Intent, SupportCallback)} method. Recognizers may call 735 * these methods on any thread. 736 */ 737 public static class SupportCallback { 738 private final IRecognitionSupportCallback mCallback; 739 SupportCallback( IRecognitionSupportCallback callback)740 private SupportCallback( 741 IRecognitionSupportCallback callback) { 742 this.mCallback = callback; 743 } 744 745 /** The service should call this method to notify the caller about the level of support. */ onSupportResult(@onNull RecognitionSupport recognitionSupport)746 public void onSupportResult(@NonNull RecognitionSupport recognitionSupport) { 747 try { 748 mCallback.onSupportResult(recognitionSupport); 749 } catch (RemoteException e) { 750 throw e.rethrowFromSystemServer(); 751 } 752 } 753 754 /** 755 * The service should call this method when an error occurred and can't satisfy the support 756 * request. 757 * 758 * @param errorCode code is defined in {@link SpeechRecognizer} 759 */ onError(@peechRecognizer.RecognitionError int errorCode)760 public void onError(@SpeechRecognizer.RecognitionError int errorCode) { 761 try { 762 mCallback.onError(errorCode); 763 } catch (RemoteException e) { 764 throw e.rethrowFromSystemServer(); 765 } 766 } 767 } 768 769 /** Binder of the recognition service. */ 770 private static final class RecognitionServiceBinder extends IRecognitionService.Stub { 771 private final WeakReference<RecognitionService> mServiceRef; 772 RecognitionServiceBinder(RecognitionService service)773 public RecognitionServiceBinder(RecognitionService service) { 774 mServiceRef = new WeakReference<>(service); 775 } 776 777 @Override startListening(Intent recognizerIntent, IRecognitionListener listener, @NonNull AttributionSource attributionSource)778 public void startListening(Intent recognizerIntent, IRecognitionListener listener, 779 @NonNull AttributionSource attributionSource) { 780 Objects.requireNonNull(attributionSource); 781 attributionSource.enforceCallingUid(); 782 if (DBG) Log.d(TAG, "startListening called by:" + listener.asBinder()); 783 final RecognitionService service = mServiceRef.get(); 784 if (service != null) { 785 service.mHandler.sendMessage(Message.obtain(service.mHandler, 786 MSG_START_LISTENING, new StartListeningArgs( 787 recognizerIntent, listener, attributionSource))); 788 } 789 } 790 791 @Override stopListening(IRecognitionListener listener)792 public void stopListening(IRecognitionListener listener) { 793 if (DBG) Log.d(TAG, "stopListening called by:" + listener.asBinder()); 794 final RecognitionService service = mServiceRef.get(); 795 if (service != null) { 796 service.mHandler.sendMessage( 797 Message.obtain(service.mHandler, MSG_STOP_LISTENING, listener)); 798 } 799 } 800 801 @Override cancel(IRecognitionListener listener, boolean isShutdown)802 public void cancel(IRecognitionListener listener, boolean isShutdown) { 803 if (DBG) Log.d(TAG, "cancel called by:" + listener.asBinder()); 804 final RecognitionService service = mServiceRef.get(); 805 if (service != null) { 806 service.mHandler.sendMessage( 807 Message.obtain(service.mHandler, MSG_CANCEL, listener)); 808 } 809 } 810 811 @Override checkRecognitionSupport( Intent recognizerIntent, @NonNull AttributionSource attributionSource, IRecognitionSupportCallback callback)812 public void checkRecognitionSupport( 813 Intent recognizerIntent, 814 @NonNull AttributionSource attributionSource, 815 IRecognitionSupportCallback callback) { 816 final RecognitionService service = mServiceRef.get(); 817 if (service != null) { 818 service.mHandler.sendMessage( 819 Message.obtain(service.mHandler, MSG_CHECK_RECOGNITION_SUPPORT, 820 new CheckRecognitionSupportArgs( 821 recognizerIntent, callback, attributionSource))); 822 } 823 } 824 825 @Override triggerModelDownload( Intent recognizerIntent, @NonNull AttributionSource attributionSource, IModelDownloadListener listener)826 public void triggerModelDownload( 827 Intent recognizerIntent, 828 @NonNull AttributionSource attributionSource, 829 IModelDownloadListener listener) { 830 final RecognitionService service = mServiceRef.get(); 831 if (service != null) { 832 service.mHandler.sendMessage( 833 Message.obtain( 834 service.mHandler, MSG_TRIGGER_MODEL_DOWNLOAD, 835 new ModelDownloadArgs( 836 recognizerIntent, 837 attributionSource, 838 listener))); 839 } 840 } 841 clearReference()842 public void clearReference() { 843 mServiceRef.clear(); 844 } 845 } 846 checkPermissionAndStartDataDelivery(SessionState sessionState)847 private boolean checkPermissionAndStartDataDelivery(SessionState sessionState) { 848 if (sessionState.mCallback.mAttributionContextCreated) { 849 return true; 850 } 851 852 if (PermissionChecker.checkPermissionAndStartDataDelivery( 853 RecognitionService.this, 854 Manifest.permission.RECORD_AUDIO, 855 sessionState.mCallback.getAttributionContextForCaller().getAttributionSource(), 856 /* message */ null) 857 == PermissionChecker.PERMISSION_GRANTED) { 858 sessionState.mStartedDataDelivery = true; 859 } 860 861 return sessionState.mStartedDataDelivery; 862 } 863 checkPermissionForPreflightNotHardDenied(AttributionSource attributionSource)864 private boolean checkPermissionForPreflightNotHardDenied(AttributionSource attributionSource) { 865 int result = PermissionChecker.checkPermissionForPreflight(RecognitionService.this, 866 Manifest.permission.RECORD_AUDIO, attributionSource); 867 return result == PermissionChecker.PERMISSION_GRANTED 868 || result == PermissionChecker.PERMISSION_SOFT_DENIED; 869 } 870 finishDataDelivery(SessionState sessionState)871 void finishDataDelivery(SessionState sessionState) { 872 if (sessionState.mStartedDataDelivery) { 873 sessionState.mStartedDataDelivery = false; 874 final String op = AppOpsManager.permissionToOp(Manifest.permission.RECORD_AUDIO); 875 PermissionChecker.finishDataDelivery(RecognitionService.this, op, 876 sessionState.mCallback.getAttributionContextForCaller().getAttributionSource()); 877 } 878 } 879 880 /** 881 * Data class containing information about an ongoing session: 882 * <ul> 883 * <li> {@link SessionState#mCallback} - callback of the client that invoked the 884 * {@link RecognitionService#onStartListening(Intent, Callback)} method; 885 * <li> {@link SessionState#mStartedDataDelivery} - flag denoting if data 886 * is being delivered to the client. 887 */ 888 private static class SessionState { 889 private Callback mCallback; 890 private boolean mStartedDataDelivery; 891 SessionState(Callback callback, boolean startedDataDelivery)892 SessionState(Callback callback, boolean startedDataDelivery) { 893 mCallback = callback; 894 mStartedDataDelivery = startedDataDelivery; 895 } 896 SessionState(Callback currentCallback)897 SessionState(Callback currentCallback) { 898 this(currentCallback, false); 899 } 900 reset()901 void reset() { 902 mCallback = null; 903 mStartedDataDelivery = false; 904 } 905 } 906 } 907