Java Code Examples for android.speech.SpeechRecognizer

The following examples show how to use android.speech.SpeechRecognizer. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    boolean isSpeechEnabled = SpeechRecognizer.isRecognitionAvailable(this);

    // Determine fragment to use
    Fragment searchFragment = isSpeechEnabled
            ? new LeanbackSearchFragment()
            : new TextSearchFragment();

    // Add fragment
    getSupportFragmentManager()
            .beginTransaction()
            .replace(android.R.id.content, searchFragment)
            .commit();
}
 
Example 2
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);

    if(requestCode == REQUEST_CODE && resultCode == RESULT_OK){

        /**
         * The matched text with the highest confidence score will be in position 0
         */
        ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);

        if(matches != null && matches.size() >0){
            String sentence = matches.get(0);
            speechRecognitionListener.getOnSpeechRecognitionListener()
                    .OnSpeechRecognitionFinalResult(sentence);

            return;
        }
    }

    speechRecognitionListener.onError(SpeechRecognizer.ERROR_NO_MATCH);
}
 
Example 3
private void showPlayServicesError(final int errorCode) {
    if (DEBUG) {
        MyLog.i(CLS_NAME, "showPlayServicesError");
    }

    onError(SpeechRecognizer.ERROR_CLIENT);

    switch (errorCode) {

        case UNRECOVERABLE:
            // TODO
            break;
        default:
            final GoogleApiAvailability apiAvailability = GoogleApiAvailability.getInstance();
            apiAvailability.showErrorNotification(mContext, errorCode);
            break;
    }
}
 
Example 4
/**
 * Receives a terminating error from the stream.
 * <p>
 * <p>May only be called once and if called it must be the last method called. In particular if an
 * exception is thrown by an implementation of {@code onError} no further calls to any method are
 * allowed.
 * <p>
 * <p>{@code t} should be a {@link StatusException} or {@link
 * StatusRuntimeException}, but other {@code Throwable} types are possible. Callers should
 * generally convert from a {@link Status} via {@link Status#asException()} or
 * {@link Status#asRuntimeException()}. Implementations should generally convert to a
 * {@code Status} via {@link Status#fromThrowable(Throwable)}.
 *
 * @param throwable the error occurred on the stream
 */
@Override
public void onError(final Throwable throwable) {
    if (DEBUG) {
        MyLog.w(CLS_NAME, "onError");
        throwable.printStackTrace();
        final Status status = Status.fromThrowable(throwable);
        MyLog.w(CLS_NAME, "onError: " + status.toString());
    }

    if (doError.get()) {
        doError.set(false);
        stopListening();
        listener.onError(SpeechRecognizer.ERROR_NETWORK);
    }
}
 
Example 5
/**
 * A network or recognition error occurred.
 *
 * @param error code is defined in {@link SpeechRecognizer}
 */
@Override
public void onError(final int error) {
    if (DEBUG) {
        MyLog.w(CLS_NAME, "onError: " + error);
        MyLog.w(CLS_NAME, "onError: doEndOfSpeech: " + doEndOfSpeech);
        MyLog.w(CLS_NAME, "onError: doError: " + doError);
        MyLog.i(CLS_NAME, "onError: doBeginningOfSpeech: " + doBeginningOfSpeech);
    }

    if (error != SpeechRecognizer.ERROR_NO_MATCH) {
        doError = true;
    }

    if (doError) {
        onRecognitionError(error);
    }
}
 
Example 6
/**
 * Process the extracted text as identified as a command
 *
 * @param text the command to process
 */
private void process(@NonNull final String text) {
    if (DEBUG) {
        MyLog.i(CLS_NAME, "process");
    }

    final Bundle bundle = new Bundle();

    final ArrayList<String> voiceResults = new ArrayList<>(1);
    voiceResults.add(text);

    final float[] confidence = new float[1];
    confidence[0] = 1f;

    bundle.putStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION, voiceResults);
    bundle.putFloatArray(SpeechRecognizer.CONFIDENCE_SCORES, confidence);
    bundle.putInt(LocalRequest.EXTRA_CONDITION, Condition.CONDITION_GOOGLE_NOW);

    AsyncTask.execute(new Runnable() {
        @Override
        public void run() {
            new RecognitionAction(SaiyAccessibilityService.this.getApplicationContext(), SPH.getVRLocale(SaiyAccessibilityService.this.getApplicationContext()),
                    SPH.getTTSLocale(SaiyAccessibilityService.this.getApplicationContext()), sl, bundle);
        }
    });
}
 
Example 7
Source Project: android-speech   Source File: Speech.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void onPartialResults(final Bundle bundle) {
    mDelayedStopListening.resetTimer();

    final List<String> partialResults = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
    final List<String> unstableData = bundle.getStringArrayList("android.speech.extra.UNSTABLE_TEXT");

    if (partialResults != null && !partialResults.isEmpty()) {
        mPartialData.clear();
        mPartialData.addAll(partialResults);
        mUnstableData = unstableData != null && !unstableData.isEmpty()
                ? unstableData.get(0) : null;
        try {
            if (mLastPartialResults == null || !mLastPartialResults.equals(partialResults)) {
                if (mDelegate != null)
                    mDelegate.onSpeechPartialResults(partialResults);
                mLastPartialResults = partialResults;
            }
        } catch (final Throwable exc) {
            Logger.error(Speech.class.getSimpleName(),
                    "Unhandled exception in delegate onSpeechPartialResults", exc);
        }
    }
}
 
Example 8
@Override
public void initialize(CordovaInterface cordova, CordovaWebView webView) {
  super.initialize(cordova, webView);

  activity = cordova.getActivity();
  context = webView.getContext();
  view = webView.getView();

  view.post(new Runnable() {
    @Override
    public void run() {
      recognizer = SpeechRecognizer.createSpeechRecognizer(activity);
      SpeechRecognitionListener listener = new SpeechRecognitionListener();
      recognizer.setRecognitionListener(listener);
    }
  });
}
 
Example 9
@Override
public void onPartialResults(Bundle bundle) {
  ArrayList<String> matches = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
  Log.d(LOG_TAG, "SpeechRecognitionListener partialResults: " + matches);
  JSONArray matchesJSON = new JSONArray(matches);
  try {
    if (matches != null
            && matches.size() > 0
                    && !mLastPartialResults.equals(matchesJSON)) {
      mLastPartialResults = matchesJSON;
      PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, matchesJSON);
      pluginResult.setKeepCallback(true);
      callbackContext.sendPluginResult(pluginResult);
    }
  } catch (Exception e) {
    e.printStackTrace();
    callbackContext.error(e.getMessage());
  }
}
 
Example 10
Source Project: iqra-android   Source File: MainActivity.java    License: MIT License 6 votes vote down vote up
@Override
public void onResults(Bundle results) {
    mIsListening = false;
    micText.setText(getString(R.string.tap_on_mic));
    recordCircle.getLayoutParams().width = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 80, getResources().getDisplayMetrics());
    recordCircle.getLayoutParams().height = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 80, getResources().getDisplayMetrics());
    recordCircle.requestLayout();
    recordCircle.setImageResource(R.drawable.record_circle_inactive);
    partialResult.setText("");
    // Log.d(TAG, "onResults"); //$NON-NLS-1$
    ArrayList<String> matches = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
    // matches are the return values of speech recognition engine
    if (matches != null) {
        // Log.d(TAG, matches.toString()); //$NON-NLS-1$
        callApi(matches.get(0));
    } else {
        Toast.makeText(getApplicationContext(), getResources().getString(R.string.cannot_understand), Toast.LENGTH_SHORT).show();
    }
}
 
Example 11
private void handleResults(Bundle bundle, boolean provisional) {
    if (mContinuous && provisional) {
        // In continuous mode, Android's recognizer sends final results as provisional.
        provisional = false;
    }

    ArrayList<String> list = bundle.getStringArrayList(
            SpeechRecognizer.RESULTS_RECOGNITION);
    String[] results = list.toArray(new String[list.size()]);

    float[] scores = bundle.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);

    nativeOnRecognitionResults(mNativeSpeechRecognizerImplAndroid,
                               results,
                               scores,
                               provisional);
}
 
Example 12
Source Project: 365browser   Source File: SpeechRecognition.java    License: Apache License 2.0 6 votes vote down vote up
private void handleResults(Bundle bundle, boolean provisional) {
    if (mContinuous && provisional) {
        // In continuous mode, Android's recognizer sends final results as provisional.
        provisional = false;
    }

    ArrayList<String> list = bundle.getStringArrayList(
            SpeechRecognizer.RESULTS_RECOGNITION);
    String[] results = list.toArray(new String[list.size()]);

    float[] scores = bundle.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);

    nativeOnRecognitionResults(mNativeSpeechRecognizerImplAndroid,
                               results,
                               scores,
                               provisional);
}
 
Example 13
Source Project: 365browser   Source File: SpeechRecognition.java    License: Apache License 2.0 6 votes vote down vote up
private SpeechRecognition(long nativeSpeechRecognizerImplAndroid) {
    mContinuous = false;
    mNativeSpeechRecognizerImplAndroid = nativeSpeechRecognizerImplAndroid;
    mListener = new Listener();
    mIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

    if (sRecognitionProvider != null) {
        mRecognizer = SpeechRecognizer.createSpeechRecognizer(
                ContextUtils.getApplicationContext(), sRecognitionProvider);
    } else {
        // It is possible to force-enable the speech recognition web platform feature (using a
        // command-line flag) even if initialize() failed to find the PROVIDER_PACKAGE_NAME
        // provider, in which case the first available speech recognition provider is used.
        // Caveat: Continuous mode may not work as expected with a different provider.
        mRecognizer =
                SpeechRecognizer.createSpeechRecognizer(ContextUtils.getApplicationContext());
    }

    mRecognizer.setRecognitionListener(mListener);
}
 
Example 14
protected void initializeRecognizer() {
    if (speechRecognizer != null) {
        return;
    }

    synchronized (speechRecognizerLock) {
        if (speechRecognizer != null) {
            speechRecognizer.destroy();
            speechRecognizer = null;
        }

        final ComponentName component = RecognizerChecker.findGoogleRecognizer(context);
        speechRecognizer = SpeechRecognizer.createSpeechRecognizer(context, component);
        speechRecognizer.setRecognitionListener(new InternalRecognitionListener());
    }
}
 
Example 15
private void handleResults(Bundle bundle, boolean provisional) {
    if (mContinuous && provisional) {
        // In continuous mode, Android's recognizer sends final results as provisional.
        provisional = false;
    }

    ArrayList<String> list = bundle.getStringArrayList(
            SpeechRecognizer.RESULTS_RECOGNITION);
    String[] results = list.toArray(new String[list.size()]);

    float[] scores = bundle.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);

    nativeOnRecognitionResults(mNativeSpeechRecognizerImplAndroid,
                               results,
                               scores,
                               provisional);
}
 
Example 16
private SpeechRecognition(final Context context, int nativeSpeechRecognizerImplAndroid) {
    mContext = context;
    mContinuous = false;
    mNativeSpeechRecognizerImplAndroid = nativeSpeechRecognizerImplAndroid;
    mListener = new Listener();
    mIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

    if (mRecognitionProvider != null) {
        mRecognizer = SpeechRecognizer.createSpeechRecognizer(mContext, mRecognitionProvider);
    } else {
        // It is possible to force-enable the speech recognition web platform feature (using a
        // command-line flag) even if initialize() failed to find the PROVIDER_PACKAGE_NAME
        // provider, in which case the first available speech recognition provider is used.
        // Caveat: Continuous mode may not work as expected with a different provider.
        mRecognizer = SpeechRecognizer.createSpeechRecognizer(mContext);
    }

    mRecognizer.setRecognitionListener(mListener);
}
 
Example 17
Source Project: adt-leanback-support   Source File: SearchBar.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Set the speech recognizer to be used when doing voice search. The Activity/Fragment is in
 * charge of creating and destroying the recognizer with its own lifecycle.
 *
 * @param recognizer a SpeechRecognizer
 */
public void setSpeechRecognizer(SpeechRecognizer recognizer) {
    if (null != mSpeechRecognizer) {
        mSpeechRecognizer.setRecognitionListener(null);
        if (mListening) {
            mSpeechRecognizer.cancel();
            mListening = false;
        }
    }
    mSpeechRecognizer = recognizer;
    if (mSpeechRecognizer != null) {
        enforceAudioRecordPermission();
    }
    if (mSpeechRecognitionCallback != null && mSpeechRecognizer != null) {
        throw new IllegalStateException("Can't have speech recognizer and request");
    }
}
 
Example 18
@Override
public void onResults(Bundle results) {
    ArrayList<String> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
    if (data.size() > 0) {
        String query = data.get(0);
        if (listener != null) {
            listener.onVoiceRecognitionComplete(query);
        }
    } else {
        if (listener != null) {
            listener.onError(0);
        }
    }
}
 
Example 19
Source Project: Android-Speech-Recognition   Source File: SpeechRecognition.java    License: MIT License 5 votes vote down vote up
private void initializeSpeechRecognitionParameters(){

        if(!isSpeechRecognitionAvailable())
            throw new IllegalStateException(context.getString(R.string.speech_not_enabled_exception_text));

         /*
          * Initialize the SpeechRecognitionPermissions and googleIme here
          * for lazy loading the fragments
         */
        initializeGoogleVoiceImeParameters();
        speechRecognitionPermissions = new SpeechRecognitionPermissions();
        ((Activity) context).getFragmentManager()
                .beginTransaction()
                .add(speechRecognitionPermissions, SpeechRecognition.class.getSimpleName())
                .commit();

         /*
         *Initialize the SpeechRecognizer and set listener with onSpeechRecognizerListener implemented by client
         */
        speechRecognizer = SpeechRecognizer.createSpeechRecognizer(context);

        /*
         *Initialize the Speech recognition intent with default Language
         */
        recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.getPackageName());
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, MAX_RESULT_COUNT);
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);

        /*
         * Only offline recognition works from API level 23
         */
        if(enableOnlyOfflineRecognition){
            if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
                recognizerIntent.putExtra(RecognizerIntent.EXTRA_PREFER_OFFLINE, true);
        }

        //TODO: Set preferred Speech recognition Language
    }
 
Example 20
@Override
public void onResults(Bundle bundle) {

    //sentence with highest confidence score is in position 0
    ArrayList<String> matches = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);

    if(matches != null && matches.size() > 0){
        String sentence = matches.get(0);

        Log.i(SpeechRecognitionListener.class.getSimpleName(), sentence);
        onSpeechRecognitionListener.OnSpeechRecognitionFinalResult(sentence);

    }else onError(SpeechRecognizer.ERROR_NO_MATCH);
}
 
Example 21
@Override
public void onPartialResults(Bundle bundle) {
    //sentence with highest confidence score is in position 0
    ArrayList<String> matches = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);

    if(matches != null && matches.size() > 0){
        String word = matches.get(0);

        Log.i(SpeechRecognitionListener.class.getSimpleName(), word);
        onSpeechRecognitionListener.OnSpeechRecognitionCurrentResult(word);

    }else onError(SpeechRecognizer.ERROR_NO_MATCH);
}
 
Example 22
Source Project: Amadeus   Source File: MainActivity.java    License: GNU General Public License v3.0 5 votes vote down vote up
public void onResults(Bundle results) {
    String input = "";
    String debug = "";
    Log.d(TAG, "Received results");
    ArrayList data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);

    for (Object word: data) {
        debug += word + "\n";
    }
    Log.d(TAG, debug);

    input += data.get(0);
    /* TODO: Japanese doesn't split the words. Sigh. */
    String[] splitInput = input.split(" ");

    /* Really, google? */
    if (splitInput[0].equalsIgnoreCase("Асистент")) {
        splitInput[0] = "Ассистент";
    }

    /* Switch language within current context for voice recognition */
    Context context = LangContext.load(getApplicationContext(), contextLang[0]);

    if (splitInput.length > 2 && splitInput[0].equalsIgnoreCase(context.getString(R.string.assistant))) {
        String cmd = splitInput[1].toLowerCase();
        String[] args = new String[splitInput.length - 2];
        System.arraycopy(splitInput, 2, args, 0, splitInput.length - 2);

        if (cmd.contains(context.getString(R.string.open))) {
            Amadeus.openApp(args, MainActivity.this);
        }

    } else {
        Amadeus.responseToInput(input, context, MainActivity.this);
    }
}
 
Example 23
Source Project: o2oa   Source File: ErrorTranslation.java    License: GNU Affero General Public License v3.0 5 votes vote down vote up
public static String recogError(int errorCode) {
    String message;
    switch (errorCode) {
        case SpeechRecognizer.ERROR_AUDIO:
            message = "音频问题";
            break;
        case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
            message = "没有语音输入";
            break;
        case SpeechRecognizer.ERROR_CLIENT:
            message = "其它客户端错误";
            break;
        case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
            message = "权限不足";
            break;
        case SpeechRecognizer.ERROR_NETWORK:
            message = "网络问题";
            break;
        case SpeechRecognizer.ERROR_NO_MATCH:
            message = "没有匹配的识别结果";
            break;
        case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
            message = "引擎忙";
            break;
        case SpeechRecognizer.ERROR_SERVER:
            message = "服务端错误";
            break;
        case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
            message = "连接超时";
            break;
        default:
            message = "未知错误:" + errorCode;
            break;
    }
    return message;
}
 
Example 24
Source Project: DroidSpeech   Source File: DroidSpeech.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Initializes the droid speech properties
 */
private void initDroidSpeechProperties()
{
    // Initializing the droid speech recognizer
    droidSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(context);

    // Initializing the speech intent
    speechIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    speechIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.getPackageName());
    speechIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    speechIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, Extensions.MAX_VOICE_RESULTS);
    if(dsProperties.currentSpeechLanguage != null)
    {
        // Setting the speech language
        speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, dsProperties.currentSpeechLanguage);
        speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, dsProperties.currentSpeechLanguage);
    }

    if(dsProperties.offlineSpeechRecognition && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
    {
        // Setting offline speech recognition to true
        speechIntent.putExtra(RecognizerIntent.EXTRA_PREFER_OFFLINE, true);
    }

    // Initializing the audio Manager
    audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
}
 
Example 25
@Override
public void onPartialResults(final Bundle partialResults) {
    if (recognitionActive) {
        updateStopRunnable(1);
        final ArrayList<String> partialRecognitionResults = partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
        if (partialRecognitionResults != null && !partialRecognitionResults.isEmpty()) {
            GoogleRecognitionServiceImpl.this.onPartialResults(partialRecognitionResults);
        }
    }
}
 
Example 26
@Override
public void onPartialResults(Bundle bundle) {
  if (bundle.isEmpty()) {
    result = "";
  } else {
    ArrayList<String> results = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
    result = results.get(0);
  }
  speechListener.onPartialResult(result);
}
 
Example 27
private int getErrorMessage(int errorCode) {
  int errCode = ErrorMessages.ERROR_DEFAULT;
  switch (errorCode) {
    case SpeechRecognizer.ERROR_AUDIO:
      errCode = ErrorMessages.ERROR_AUDIO;
      break;
    case SpeechRecognizer.ERROR_CLIENT:
      errCode = ErrorMessages.ERROR_CLIENT;
      break;
    case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
      errCode = ErrorMessages.ERROR_INSUFFICIENT_PERMISSIONS;
      break;
    case SpeechRecognizer.ERROR_NETWORK:
      errCode = ErrorMessages.ERROR_NETWORK;
      break;
    case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
      errCode = ErrorMessages.ERROR_NETWORK_TIMEOUT;
      break;
    case SpeechRecognizer.ERROR_NO_MATCH:
      errCode = ErrorMessages.ERROR_NO_MATCH;
      break;
    case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
      errCode = ErrorMessages.ERROR_RECOGNIZER_BUSY;
      break;
    case SpeechRecognizer.ERROR_SERVER:
      errCode = ErrorMessages.ERROR_SERVER;
      break;
    case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
      errCode = ErrorMessages.ERROR_SPEECH_TIMEOUT;
      break;
  }
  return errCode;
}
 
Example 28
public static boolean initialize(Context context) {
    if (!SpeechRecognizer.isRecognitionAvailable(context))
        return false;

    PackageManager pm = context.getPackageManager();
    Intent intent = new Intent(RecognitionService.SERVICE_INTERFACE);
    final List<ResolveInfo> list = pm.queryIntentServices(intent, PackageManager.GET_SERVICES);

    for (ResolveInfo resolve : list) {
        ServiceInfo service = resolve.serviceInfo;

        if (!service.packageName.equals(PROVIDER_PACKAGE_NAME))
            continue;

        int versionCode;
        try {
            versionCode = pm.getPackageInfo(service.packageName, 0).versionCode;
        } catch (NameNotFoundException e) {
            continue;
        }

        if (versionCode < PROVIDER_MIN_VERSION)
            continue;

        mRecognitionProvider = new ComponentName(service.packageName, service.name);

        return true;
    }

    // If we reach this point, we failed to find a suitable recognition provider.
    return false;
}
 
Example 29
@Override
public void onPartialResponseReceived(final String partial) {
    if (DEBUG) {
        MyLog.i(CLS_NAME, "onPartialResponseReceived: " + partial);
    }

    partialArray.clear();
    partialBundle.clear();

    partialArray.add(partial);
    partialBundle.putStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION, partialArray);
    listener.onPartialResults(partialBundle);
}
 
Example 30
@TargetApi(14)
@Override
public void onResults(final Bundle results) {
    if (recognitionActive) {
        final ArrayList<String> recognitionResults = results
                .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);

        float[] rates = null;

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
            rates = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);
        }

        if (recognitionResults == null || recognitionResults.isEmpty()) {
            // empty response
            GoogleRecognitionServiceImpl.this.onResult(new AIResponse());
        } else {
            final AIRequest aiRequest = new AIRequest();
            if (rates != null) {
                aiRequest.setQuery(recognitionResults.toArray(new String[recognitionResults.size()]), rates);
            } else {
                aiRequest.setQuery(recognitionResults.get(0));
            }

            // notify listeners about the last recogntion result for more accurate user feedback
            GoogleRecognitionServiceImpl.this.onPartialResults(recognitionResults);
            GoogleRecognitionServiceImpl.this.sendRequest(aiRequest, requestExtras);
        }
    }
    stopInternal();
}