<{@link android.R.styleable#RecognitionService recognition-service}>
or
* <{@link android.R.styleable#RecognitionService on-device-recognition-service}
* >
tag.
*/
public static final String SERVICE_META_DATA = "android.speech";
/** Log messages identifier */
private static final String TAG = "RecognitionService";
/** Debugging flag */
private static final boolean DBG = false;
/** Binder of the recognition service */
private RecognitionServiceBinder mBinder = new RecognitionServiceBinder(this);
/**
* The current callback of an application that invoked the
*
* {@link RecognitionService#onStartListening(Intent, Callback)} method
*/
private Callback mCurrentCallback = null;
private boolean mStartedDataDelivery;
private static final int MSG_START_LISTENING = 1;
private static final int MSG_STOP_LISTENING = 2;
private static final int MSG_CANCEL = 3;
private static final int MSG_RESET = 4;
private static final int MSG_CHECK_RECOGNITION_SUPPORT = 5;
private static final int MSG_TRIGGER_MODEL_DOWNLOAD = 6;
private final Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_START_LISTENING:
StartListeningArgs args = (StartListeningArgs) msg.obj;
dispatchStartListening(args.mIntent, args.mListener, args.mAttributionSource);
break;
case MSG_STOP_LISTENING:
dispatchStopListening((IRecognitionListener) msg.obj);
break;
case MSG_CANCEL:
dispatchCancel((IRecognitionListener) msg.obj);
break;
case MSG_RESET:
dispatchClearCallback();
break;
case MSG_CHECK_RECOGNITION_SUPPORT:
PairIf you are recognizing speech from the microphone, in this callback you * should create an attribution context for the caller such that when you access * the mic the caller would be properly blamed (and their permission checked in * the process) for accessing the microphone and that you served as a proxy for * this sensitive data (and your permissions would be checked in the process). * You should also open the mic in this callback via the attribution context * and close the mic before returning the recognized result. If you don't do * that then the caller would be blamed and you as being a proxy as well as you * would get one more blame on yourself when you open the microphone. * *
* Context attributionContext = context.createContext(new ContextParams.Builder() * .setNextAttributionSource(callback.getCallingAttributionSource()) * .build()); * * AudioRecord recorder = AudioRecord.Builder() * .setContext(attributionContext); * . . . * .build(); * * recorder.startRecording() ** * @param recognizerIntent contains parameters for the recognition to be performed. The intent * may also contain optional extras, see {@link RecognizerIntent}. If these values are * not set explicitly, default values should be used by the recognizer. * @param listener that will receive the service's callbacks */ protected abstract void onStartListening(Intent recognizerIntent, Callback listener); /** * Notifies the service that it should cancel the speech recognition. */ protected abstract void onCancel(Callback listener); /** * Notifies the service that it should stop listening for speech. Speech captured so far should * be recognized as if the user had stopped speaking at this point. This method is only called * if the application calls it explicitly. */ protected abstract void onStopListening(Callback listener); /** * Queries the service on whether it would support a {@link #onStartListening(Intent, Callback)} * for the same {@code recognizerIntent}. * *
The service will notify the caller about the level of support or error via * {@link SupportCallback}. * *
If the service does not offer the support check it will notify the caller with
* {@link SpeechRecognizer#ERROR_CANNOT_CHECK_SUPPORT}.
*/
public void onCheckRecognitionSupport(
@NonNull Intent recognizerIntent,
@NonNull SupportCallback supportCallback) {
if (DBG) {
Log.i(TAG, String.format("#onSupports [%s]", recognizerIntent));
}
supportCallback.onError(SpeechRecognizer.ERROR_CANNOT_CHECK_SUPPORT);
}
/**
* Requests the download of the recognizer support for {@code recognizerIntent}.
*/
public void onTriggerModelDownload(@NonNull Intent recognizerIntent) {
if (DBG) {
Log.i(TAG, String.format("#downloadModel [%s]", recognizerIntent));
}
}
@Override
@SuppressLint("MissingNullability")
public Context createContext(@NonNull ContextParams contextParams) {
if (contextParams.getNextAttributionSource() != null) {
if (mHandler.getLooper().equals(Looper.myLooper())) {
handleAttributionContextCreation(contextParams.getNextAttributionSource());
} else {
mHandler.sendMessage(
PooledLambda.obtainMessage(this::handleAttributionContextCreation,
contextParams.getNextAttributionSource()));
}
}
return super.createContext(contextParams);
}
private void handleAttributionContextCreation(@NonNull AttributionSource attributionSource) {
if (mCurrentCallback != null
&& mCurrentCallback.mCallingAttributionSource.equals(attributionSource)) {
mCurrentCallback.mAttributionContextCreated = true;
}
}
@Override
public final IBinder onBind(final Intent intent) {
if (DBG) Log.d(TAG, "onBind, intent=" + intent);
return mBinder;
}
@Override
public void onDestroy() {
if (DBG) Log.d(TAG, "onDestroy");
finishDataDelivery();
mCurrentCallback = null;
mBinder.clearReference();
super.onDestroy();
}
/**
* This class receives callbacks from the speech recognition service and forwards them to the
* user. An instance of this class is passed to the
* {@link RecognitionService#onStartListening(Intent, Callback)} method. Recognizers may call
* these methods on any thread.
*/
public class Callback {
private final IRecognitionListener mListener;
private final @NonNull AttributionSource mCallingAttributionSource;
private @Nullable Context mAttributionContext;
private boolean mAttributionContextCreated;
private Callback(IRecognitionListener listener,
@NonNull AttributionSource attributionSource) {
mListener = listener;
mCallingAttributionSource = attributionSource;
}
/**
* The service should call this method when the user has started to speak.
*/
public void beginningOfSpeech() throws RemoteException {
mListener.onBeginningOfSpeech();
}
/**
* The service should call this method when sound has been received. The purpose of this
* function is to allow giving feedback to the user regarding the captured audio.
*
* @param buffer a buffer containing a sequence of big-endian 16-bit integers representing a
* single channel audio stream. The sample rate is implementation dependent.
*/
public void bufferReceived(byte[] buffer) throws RemoteException {
mListener.onBufferReceived(buffer);
}
/**
* The service should call this method after the user stops speaking.
*/
public void endOfSpeech() throws RemoteException {
mListener.onEndOfSpeech();
}
/**
* The service should call this method when a network or recognition error occurred.
*
* @param error code is defined in {@link SpeechRecognizer}
*/
public void error(@SpeechRecognizer.RecognitionError int error) throws RemoteException {
Message.obtain(mHandler, MSG_RESET).sendToTarget();
mListener.onError(error);
}
/**
* The service should call this method when partial recognition results are available. This
* method can be called at any time between {@link #beginningOfSpeech()} and
* {@link #results(Bundle)} when partial results are ready. This method may be called zero,
* one or multiple times for each call to {@link SpeechRecognizer#startListening(Intent)},
* depending on the speech recognition service implementation.
*
* @param partialResults the returned results. To retrieve the results in
* ArrayList<String> format use {@link Bundle#getStringArrayList(String)} with
* {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter
*/
public void partialResults(Bundle partialResults) throws RemoteException {
mListener.onPartialResults(partialResults);
}
/**
* The service should call this method when the endpointer is ready for the user to start
* speaking.
*
* @param params parameters set by the recognition service. Reserved for future use.
*/
public void readyForSpeech(Bundle params) throws RemoteException {
mListener.onReadyForSpeech(params);
}
/**
* The service should call this method when recognition results are ready.
*
* @param results the recognition results. To retrieve the results in {@code
* ArrayList