• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5  * use this file except in compliance with the License. You may obtain a copy of
6  * the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13  * License for the specific language governing permissions and limitations under
14  * the License.
15  */
16 
17 package android.speech;
18 
19 import android.Manifest;
20 import android.annotation.NonNull;
21 import android.annotation.Nullable;
22 import android.annotation.SdkConstant;
23 import android.annotation.SdkConstant.SdkConstantType;
24 import android.annotation.SuppressLint;
25 import android.app.AppOpsManager;
26 import android.app.Service;
27 import android.content.AttributionSource;
28 import android.content.Context;
29 import android.content.ContextParams;
30 import android.content.Intent;
31 import android.content.PermissionChecker;
32 import android.os.Binder;
33 import android.os.Bundle;
34 import android.os.Handler;
35 import android.os.IBinder;
36 import android.os.Looper;
37 import android.os.Message;
38 import android.os.RemoteException;
39 import android.util.Log;
40 import android.util.Pair;
41 
42 import com.android.internal.util.function.pooled.PooledLambda;
43 
44 import java.lang.ref.WeakReference;
45 import java.util.Objects;
46 
47 /**
48  * This class provides a base class for recognition service implementations. This class should be
49  * extended only in case you wish to implement a new speech recognizer. Please note that the
50  * implementation of this service is stateless.
51  */
52 public abstract class RecognitionService extends Service {
53     /**
54      * The {@link Intent} that must be declared as handled by the service.
55      */
56     @SdkConstant(SdkConstantType.SERVICE_ACTION)
57     public static final String SERVICE_INTERFACE = "android.speech.RecognitionService";
58 
59     /**
60      * Name under which a RecognitionService component publishes information about itself.
61      * This meta-data should reference an XML resource containing a
62      * <code>&lt;{@link android.R.styleable#RecognitionService recognition-service}&gt;</code> or
63      * <code>&lt;{@link android.R.styleable#RecognitionService on-device-recognition-service}
64      * &gt;</code> tag.
65      */
66     public static final String SERVICE_META_DATA = "android.speech";
67 
68     /** Log messages identifier */
69     private static final String TAG = "RecognitionService";
70 
71     /** Debugging flag */
72     private static final boolean DBG = false;
73 
74     /** Binder of the recognition service */
75     private RecognitionServiceBinder mBinder = new RecognitionServiceBinder(this);
76 
77     /**
78      * The current callback of an application that invoked the
79      *
80      * {@link RecognitionService#onStartListening(Intent, Callback)} method
81      */
82     private Callback mCurrentCallback = null;
83 
84     private boolean mStartedDataDelivery;
85 
86     private static final int MSG_START_LISTENING = 1;
87 
88     private static final int MSG_STOP_LISTENING = 2;
89 
90     private static final int MSG_CANCEL = 3;
91 
92     private static final int MSG_RESET = 4;
93 
94     private static final int MSG_CHECK_RECOGNITION_SUPPORT = 5;
95 
96     private static final int MSG_TRIGGER_MODEL_DOWNLOAD = 6;
97 
98     private final Handler mHandler = new Handler() {
99         @Override
100         public void handleMessage(Message msg) {
101             switch (msg.what) {
102                 case MSG_START_LISTENING:
103                     StartListeningArgs args = (StartListeningArgs) msg.obj;
104                     dispatchStartListening(args.mIntent, args.mListener, args.mAttributionSource);
105                     break;
106                 case MSG_STOP_LISTENING:
107                     dispatchStopListening((IRecognitionListener) msg.obj);
108                     break;
109                 case MSG_CANCEL:
110                     dispatchCancel((IRecognitionListener) msg.obj);
111                     break;
112                 case MSG_RESET:
113                     dispatchClearCallback();
114                     break;
115                 case MSG_CHECK_RECOGNITION_SUPPORT:
116                     Pair<Intent, IRecognitionSupportCallback> intentAndListener =
117                             (Pair<Intent, IRecognitionSupportCallback>) msg.obj;
118                     dispatchCheckRecognitionSupport(
119                             intentAndListener.first, intentAndListener.second);
120                     break;
121                 case MSG_TRIGGER_MODEL_DOWNLOAD:
122                     dispatchTriggerModelDownload((Intent) msg.obj);
123                     break;
124             }
125         }
126     };
127 
dispatchStartListening(Intent intent, final IRecognitionListener listener, @NonNull AttributionSource attributionSource)128     private void dispatchStartListening(Intent intent, final IRecognitionListener listener,
129             @NonNull AttributionSource attributionSource) {
130         try {
131             if (mCurrentCallback == null) {
132                 boolean preflightPermissionCheckPassed =
133                         intent.hasExtra(RecognizerIntent.EXTRA_AUDIO_SOURCE)
134                         || checkPermissionForPreflightNotHardDenied(attributionSource);
135                 if (preflightPermissionCheckPassed) {
136                     if (DBG) {
137                         Log.d(TAG, "created new mCurrentCallback, listener = "
138                                 + listener.asBinder());
139                     }
140                     mCurrentCallback = new Callback(listener, attributionSource);
141                     RecognitionService.this.onStartListening(intent, mCurrentCallback);
142                 }
143 
144                 if (!preflightPermissionCheckPassed || !checkPermissionAndStartDataDelivery()) {
145                     listener.onError(SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS);
146                     if (preflightPermissionCheckPassed) {
147                         // If we attempted to start listening, cancel the callback
148                         RecognitionService.this.onCancel(mCurrentCallback);
149                         dispatchClearCallback();
150                     }
151                     Log.i(TAG, "caller doesn't have permission:"
152                             + Manifest.permission.RECORD_AUDIO);
153                 }
154             } else {
155                 listener.onError(SpeechRecognizer.ERROR_RECOGNIZER_BUSY);
156                 Log.i(TAG, "concurrent startListening received - ignoring this call");
157             }
158         } catch (RemoteException e) {
159             Log.d(TAG, "onError call from startListening failed");
160         }
161     }
162 
dispatchStopListening(IRecognitionListener listener)163     private void dispatchStopListening(IRecognitionListener listener) {
164         try {
165             if (mCurrentCallback == null) {
166                 listener.onError(SpeechRecognizer.ERROR_CLIENT);
167                 Log.w(TAG, "stopListening called with no preceding startListening - ignoring");
168             } else if (mCurrentCallback.mListener.asBinder() != listener.asBinder()) {
169                 listener.onError(SpeechRecognizer.ERROR_RECOGNIZER_BUSY);
170                 Log.w(TAG, "stopListening called by other caller than startListening - ignoring");
171             } else { // the correct state
172                 RecognitionService.this.onStopListening(mCurrentCallback);
173             }
174         } catch (RemoteException e) { // occurs if onError fails
175             Log.d(TAG, "onError call from stopListening failed");
176         }
177     }
178 
dispatchCancel(IRecognitionListener listener)179     private void dispatchCancel(IRecognitionListener listener) {
180         if (mCurrentCallback == null) {
181             if (DBG) Log.d(TAG, "cancel called with no preceding startListening - ignoring");
182         } else if (mCurrentCallback.mListener.asBinder() != listener.asBinder()) {
183             Log.w(TAG, "cancel called by client who did not call startListening - ignoring");
184         } else { // the correct state
185             RecognitionService.this.onCancel(mCurrentCallback);
186             dispatchClearCallback();
187             if (DBG) Log.d(TAG, "canceling - setting mCurrentCallback to null");
188         }
189     }
190 
dispatchClearCallback()191     private void dispatchClearCallback() {
192         finishDataDelivery();
193         mCurrentCallback = null;
194         mStartedDataDelivery = false;
195     }
196 
dispatchCheckRecognitionSupport( Intent intent, IRecognitionSupportCallback callback)197     private void dispatchCheckRecognitionSupport(
198             Intent intent, IRecognitionSupportCallback callback) {
199         RecognitionService.this.onCheckRecognitionSupport(intent, new SupportCallback(callback));
200     }
201 
dispatchTriggerModelDownload(Intent intent)202     private void dispatchTriggerModelDownload(Intent intent) {
203         RecognitionService.this.onTriggerModelDownload(intent);
204     }
205 
206     private class StartListeningArgs {
207         public final Intent mIntent;
208 
209         public final IRecognitionListener mListener;
210         public final @NonNull AttributionSource mAttributionSource;
211 
StartListeningArgs(Intent intent, IRecognitionListener listener, @NonNull AttributionSource attributionSource)212         public StartListeningArgs(Intent intent, IRecognitionListener listener,
213                 @NonNull AttributionSource attributionSource) {
214             this.mIntent = intent;
215             this.mListener = listener;
216             this.mAttributionSource = attributionSource;
217         }
218     }
219 
220     /**
221      * Notifies the service that it should start listening for speech.
222      *
223      * <p> If you are recognizing speech from the microphone, in this callback you
224      * should create an attribution context for the caller such that when you access
225      * the mic the caller would be properly blamed (and their permission checked in
226      * the process) for accessing the microphone and that you served as a proxy for
227      * this sensitive data (and your permissions would be checked in the process).
228      * You should also open the mic in this callback via the attribution context
229      * and close the mic before returning the recognized result. If you don't do
230      * that then the caller would be blamed and you as being a proxy as well as you
231      * would get one more blame on yourself when you open the microphone.
232      *
233      * <pre>
234      * Context attributionContext = context.createContext(new ContextParams.Builder()
235      *     .setNextAttributionSource(callback.getCallingAttributionSource())
236      *     .build());
237      *
238      * AudioRecord recorder = AudioRecord.Builder()
239      *     .setContext(attributionContext);
240      *     . . .
241      *    .build();
242      *
243      * recorder.startRecording()
244      * </pre>
245      *
246      * @param recognizerIntent contains parameters for the recognition to be performed. The intent
247      *        may also contain optional extras, see {@link RecognizerIntent}. If these values are
248      *        not set explicitly, default values should be used by the recognizer.
249      * @param listener that will receive the service's callbacks
250      */
onStartListening(Intent recognizerIntent, Callback listener)251     protected abstract void onStartListening(Intent recognizerIntent, Callback listener);
252 
253     /**
254      * Notifies the service that it should cancel the speech recognition.
255      */
onCancel(Callback listener)256     protected abstract void onCancel(Callback listener);
257 
258     /**
259      * Notifies the service that it should stop listening for speech. Speech captured so far should
260      * be recognized as if the user had stopped speaking at this point. This method is only called
261      * if the application calls it explicitly.
262      */
onStopListening(Callback listener)263     protected abstract void onStopListening(Callback listener);
264 
265     /**
266      * Queries the service on whether it would support a {@link #onStartListening(Intent, Callback)}
267      * for the same {@code recognizerIntent}.
268      *
269      * <p>The service will notify the caller about the level of support or error via
270      * {@link SupportCallback}.
271      *
272      * <p>If the service does not offer the support check it will notify the caller with
273      * {@link SpeechRecognizer#ERROR_CANNOT_CHECK_SUPPORT}.
274      */
onCheckRecognitionSupport( @onNull Intent recognizerIntent, @NonNull SupportCallback supportCallback)275     public void onCheckRecognitionSupport(
276             @NonNull Intent recognizerIntent,
277             @NonNull SupportCallback supportCallback) {
278         if (DBG) {
279             Log.i(TAG, String.format("#onSupports [%s]", recognizerIntent));
280         }
281         supportCallback.onError(SpeechRecognizer.ERROR_CANNOT_CHECK_SUPPORT);
282     }
283 
284     /**
285      * Requests the download of the recognizer support for {@code recognizerIntent}.
286      */
onTriggerModelDownload(@onNull Intent recognizerIntent)287     public void onTriggerModelDownload(@NonNull Intent recognizerIntent) {
288         if (DBG) {
289             Log.i(TAG, String.format("#downloadModel [%s]", recognizerIntent));
290         }
291     }
292 
293     @Override
294     @SuppressLint("MissingNullability")
createContext(@onNull ContextParams contextParams)295     public Context createContext(@NonNull ContextParams contextParams) {
296         if (contextParams.getNextAttributionSource() != null) {
297             if (mHandler.getLooper().equals(Looper.myLooper())) {
298                 handleAttributionContextCreation(contextParams.getNextAttributionSource());
299             } else {
300                 mHandler.sendMessage(
301                         PooledLambda.obtainMessage(this::handleAttributionContextCreation,
302                                 contextParams.getNextAttributionSource()));
303             }
304         }
305         return super.createContext(contextParams);
306     }
307 
handleAttributionContextCreation(@onNull AttributionSource attributionSource)308     private void handleAttributionContextCreation(@NonNull AttributionSource attributionSource) {
309         if (mCurrentCallback != null
310                 && mCurrentCallback.mCallingAttributionSource.equals(attributionSource)) {
311             mCurrentCallback.mAttributionContextCreated = true;
312         }
313     }
314 
315     @Override
onBind(final Intent intent)316     public final IBinder onBind(final Intent intent) {
317         if (DBG) Log.d(TAG, "onBind, intent=" + intent);
318         return mBinder;
319     }
320 
321     @Override
onDestroy()322     public void onDestroy() {
323         if (DBG) Log.d(TAG, "onDestroy");
324         finishDataDelivery();
325         mCurrentCallback = null;
326         mBinder.clearReference();
327         super.onDestroy();
328     }
329 
330     /**
331      * This class receives callbacks from the speech recognition service and forwards them to the
332      * user. An instance of this class is passed to the
333      * {@link RecognitionService#onStartListening(Intent, Callback)} method. Recognizers may call
334      * these methods on any thread.
335      */
336     public class Callback {
337         private final IRecognitionListener mListener;
338         private final @NonNull AttributionSource mCallingAttributionSource;
339         private @Nullable Context mAttributionContext;
340         private boolean mAttributionContextCreated;
341 
Callback(IRecognitionListener listener, @NonNull AttributionSource attributionSource)342         private Callback(IRecognitionListener listener,
343                 @NonNull AttributionSource attributionSource) {
344             mListener = listener;
345             mCallingAttributionSource = attributionSource;
346         }
347 
348         /**
349          * The service should call this method when the user has started to speak.
350          */
beginningOfSpeech()351         public void beginningOfSpeech() throws RemoteException {
352             mListener.onBeginningOfSpeech();
353         }
354 
355         /**
356          * The service should call this method when sound has been received. The purpose of this
357          * function is to allow giving feedback to the user regarding the captured audio.
358          *
359          * @param buffer a buffer containing a sequence of big-endian 16-bit integers representing a
360          *        single channel audio stream. The sample rate is implementation dependent.
361          */
bufferReceived(byte[] buffer)362         public void bufferReceived(byte[] buffer) throws RemoteException {
363             mListener.onBufferReceived(buffer);
364         }
365 
366         /**
367          * The service should call this method after the user stops speaking.
368          */
endOfSpeech()369         public void endOfSpeech() throws RemoteException {
370             mListener.onEndOfSpeech();
371         }
372 
373         /**
374          * The service should call this method when a network or recognition error occurred.
375          *
376          * @param error code is defined in {@link SpeechRecognizer}
377          */
error(@peechRecognizer.RecognitionError int error)378         public void error(@SpeechRecognizer.RecognitionError int error) throws RemoteException {
379             Message.obtain(mHandler, MSG_RESET).sendToTarget();
380             mListener.onError(error);
381         }
382 
383         /**
384          * The service should call this method when partial recognition results are available. This
385          * method can be called at any time between {@link #beginningOfSpeech()} and
386          * {@link #results(Bundle)} when partial results are ready. This method may be called zero,
387          * one or multiple times for each call to {@link SpeechRecognizer#startListening(Intent)},
388          * depending on the speech recognition service implementation.
389          *
390          * @param partialResults the returned results. To retrieve the results in
391          *        ArrayList&lt;String&gt; format use {@link Bundle#getStringArrayList(String)} with
392          *        {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter
393          */
partialResults(Bundle partialResults)394         public void partialResults(Bundle partialResults) throws RemoteException {
395             mListener.onPartialResults(partialResults);
396         }
397 
398         /**
399          * The service should call this method when the endpointer is ready for the user to start
400          * speaking.
401          *
402          * @param params parameters set by the recognition service. Reserved for future use.
403          */
readyForSpeech(Bundle params)404         public void readyForSpeech(Bundle params) throws RemoteException {
405             mListener.onReadyForSpeech(params);
406         }
407 
408         /**
409          * The service should call this method when recognition results are ready.
410          *
411          * @param results the recognition results. To retrieve the results in {@code
412          *        ArrayList<String>} format use {@link Bundle#getStringArrayList(String)} with
413          *        {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter
414          */
results(Bundle results)415         public void results(Bundle results) throws RemoteException {
416             Message.obtain(mHandler, MSG_RESET).sendToTarget();
417             mListener.onResults(results);
418         }
419 
420         /**
421          * The service should call this method when the sound level in the audio stream has changed.
422          * There is no guarantee that this method will be called.
423          *
424          * @param rmsdB the new RMS dB value
425          */
rmsChanged(float rmsdB)426         public void rmsChanged(float rmsdB) throws RemoteException {
427             mListener.onRmsChanged(rmsdB);
428         }
429 
430         /**
431          * The service should call this method for each ready segment of a long recognition session.
432          *
433          * @param results the recognition results. To retrieve the results in {@code
434          *        ArrayList<String>} format use {@link Bundle#getStringArrayList(String)} with
435          *        {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter
436          */
437         @SuppressLint({"CallbackMethodName", "RethrowRemoteException"})
segmentResults(@onNull Bundle results)438         public void segmentResults(@NonNull Bundle results) throws RemoteException {
439             mListener.onSegmentResults(results);
440         }
441 
442         /**
443          * The service should call this method to end a segmented session.
444          */
445         @SuppressLint({"CallbackMethodName", "RethrowRemoteException"})
endOfSegmentedSession()446         public void endOfSegmentedSession() throws RemoteException {
447             Message.obtain(mHandler, MSG_RESET).sendToTarget();
448             mListener.onEndOfSegmentedSession();
449         }
450 
451         /**
452          * Return the Linux uid assigned to the process that sent you the current transaction that
453          * is being processed. This is obtained from {@link Binder#getCallingUid()}.
454          */
getCallingUid()455         public int getCallingUid() {
456             return mCallingAttributionSource.getUid();
457         }
458 
459         /**
460          * Gets the permission identity of the calling app. If you want to attribute
461          * the mic access to the calling app you can create an attribution context
462          * via {@link android.content.Context#createContext(android.content.ContextParams)}
463          * and passing this identity to {@link
464          * android.content.ContextParams.Builder#setNextAttributionSource(AttributionSource)}.
465          *
466          * @return The permission identity of the calling app.
467          *
468          * @see android.content.ContextParams.Builder#setNextAttributionSource(
469          * AttributionSource)
470          */
471         @SuppressLint("CallbackMethodName")
getCallingAttributionSource()472         public @NonNull AttributionSource getCallingAttributionSource() {
473             return mCallingAttributionSource;
474         }
475 
getAttributionContextForCaller()476         @NonNull Context getAttributionContextForCaller() {
477             if (mAttributionContext == null) {
478                 mAttributionContext = createContext(new ContextParams.Builder()
479                         .setNextAttributionSource(mCallingAttributionSource)
480                         .build());
481             }
482             return mAttributionContext;
483         }
484     }
485 
486     /**
487      * This class receives callbacks from the speech recognition service and forwards them to the
488      * user. An instance of this class is passed to the
489      * {@link RecognitionService#onCheckRecognitionSupport(Intent, SupportCallback)} method. Recognizers may call
490      * these methods on any thread.
491      */
492     public static class SupportCallback {
493 
494         private final IRecognitionSupportCallback mCallback;
495 
SupportCallback(IRecognitionSupportCallback callback)496         private SupportCallback(IRecognitionSupportCallback callback) {
497             this.mCallback = callback;
498         }
499 
500         /** The service should call this method to notify the caller about the level of support. */
onSupportResult(@onNull RecognitionSupport recognitionSupport)501         public void onSupportResult(@NonNull RecognitionSupport recognitionSupport) {
502             try {
503                 mCallback.onSupportResult(recognitionSupport);
504             } catch (RemoteException e) {
505                 throw e.rethrowFromSystemServer();
506             }
507         }
508 
509         /**
510          * The service should call this method when an error occurred and can't satisfy the support
511          * request.
512          *
513          * @param errorCode code is defined in {@link SpeechRecognizer}
514          */
onError(@peechRecognizer.RecognitionError int errorCode)515         public void onError(@SpeechRecognizer.RecognitionError int errorCode) {
516             try {
517                 mCallback.onError(errorCode);
518             } catch (RemoteException e) {
519                 throw e.rethrowFromSystemServer();
520             }
521         }
522     }
523 
524 /** Binder of the recognition service */
525     private static final class RecognitionServiceBinder extends IRecognitionService.Stub {
526         private final WeakReference<RecognitionService> mServiceRef;
527 
RecognitionServiceBinder(RecognitionService service)528         public RecognitionServiceBinder(RecognitionService service) {
529             mServiceRef = new WeakReference<>(service);
530         }
531 
532         @Override
startListening(Intent recognizerIntent, IRecognitionListener listener, @NonNull AttributionSource attributionSource)533         public void startListening(Intent recognizerIntent, IRecognitionListener listener,
534                 @NonNull AttributionSource attributionSource) {
535             Objects.requireNonNull(attributionSource);
536             attributionSource.enforceCallingUid();
537             if (DBG) Log.d(TAG, "startListening called by:" + listener.asBinder());
538             final RecognitionService service = mServiceRef.get();
539             if (service != null) {
540                 service.mHandler.sendMessage(Message.obtain(service.mHandler,
541                         MSG_START_LISTENING, service.new StartListeningArgs(
542                                 recognizerIntent, listener, attributionSource)));
543             }
544         }
545 
546         @Override
stopListening(IRecognitionListener listener)547         public void stopListening(IRecognitionListener listener) {
548             if (DBG) Log.d(TAG, "stopListening called by:" + listener.asBinder());
549             final RecognitionService service = mServiceRef.get();
550             if (service != null) {
551                 service.mHandler.sendMessage(
552                         Message.obtain(service.mHandler, MSG_STOP_LISTENING, listener));
553             }
554         }
555 
556         @Override
cancel(IRecognitionListener listener, boolean isShutdown)557         public void cancel(IRecognitionListener listener, boolean isShutdown) {
558             if (DBG) Log.d(TAG, "cancel called by:" + listener.asBinder());
559             final RecognitionService service = mServiceRef.get();
560             if (service != null) {
561                 service.mHandler.sendMessage(
562                         Message.obtain(service.mHandler, MSG_CANCEL, listener));
563             }
564         }
565 
566         @Override
checkRecognitionSupport( Intent recognizerIntent, IRecognitionSupportCallback callback)567         public void checkRecognitionSupport(
568                 Intent recognizerIntent, IRecognitionSupportCallback callback) {
569             final RecognitionService service = mServiceRef.get();
570             if (service != null) {
571                 service.mHandler.sendMessage(
572                         Message.obtain(service.mHandler, MSG_CHECK_RECOGNITION_SUPPORT,
573                                 Pair.create(recognizerIntent, callback)));
574             }
575         }
576 
577         @Override
triggerModelDownload(Intent recognizerIntent)578         public void triggerModelDownload(Intent recognizerIntent) {
579             final RecognitionService service = mServiceRef.get();
580             if (service != null) {
581                 service.mHandler.sendMessage(
582                         Message.obtain(
583                                 service.mHandler, MSG_TRIGGER_MODEL_DOWNLOAD, recognizerIntent));
584             }
585         }
586 
clearReference()587         public void clearReference() {
588             mServiceRef.clear();
589         }
590     }
591 
checkPermissionAndStartDataDelivery()592     private boolean checkPermissionAndStartDataDelivery() {
593         if (mCurrentCallback.mAttributionContextCreated) {
594             return true;
595         }
596         if (PermissionChecker.checkPermissionAndStartDataDelivery(
597                 RecognitionService.this, Manifest.permission.RECORD_AUDIO,
598                 mCurrentCallback.getAttributionContextForCaller().getAttributionSource(),
599                 /*message*/ null) == PermissionChecker.PERMISSION_GRANTED) {
600             mStartedDataDelivery = true;
601         }
602         return mStartedDataDelivery;
603     }
604 
checkPermissionForPreflightNotHardDenied(AttributionSource attributionSource)605     private boolean checkPermissionForPreflightNotHardDenied(AttributionSource attributionSource) {
606         int result = PermissionChecker.checkPermissionForPreflight(RecognitionService.this,
607                 Manifest.permission.RECORD_AUDIO, attributionSource);
608         return result == PermissionChecker.PERMISSION_GRANTED
609                 || result == PermissionChecker.PERMISSION_SOFT_DENIED;
610     }
611 
finishDataDelivery()612     void finishDataDelivery() {
613         if (mStartedDataDelivery) {
614             mStartedDataDelivery = false;
615             final String op = AppOpsManager.permissionToOp(Manifest.permission.RECORD_AUDIO);
616             PermissionChecker.finishDataDelivery(RecognitionService.this, op,
617                     mCurrentCallback.getAttributionContextForCaller().getAttributionSource());
618         }
619     }
620 }
621