android.speech.SpeechRecognizer Java Examples

The following examples show how to use android.speech.SpeechRecognizer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SearchBar.java    From adt-leanback-support with Apache License 2.0 6 votes vote down vote up
/**
 * Set the speech recognizer to be used when doing voice search. The Activity/Fragment is in
 * charge of creating and destroying the recognizer with its own lifecycle.
 *
 * @param recognizer a SpeechRecognizer
 */
public void setSpeechRecognizer(SpeechRecognizer recognizer) {
    if (null != mSpeechRecognizer) {
        mSpeechRecognizer.setRecognitionListener(null);
        if (mListening) {
            mSpeechRecognizer.cancel();
            mListening = false;
        }
    }
    mSpeechRecognizer = recognizer;
    if (mSpeechRecognizer != null) {
        enforceAudioRecordPermission();
    }
    if (mSpeechRecognitionCallback != null && mSpeechRecognizer != null) {
        throw new IllegalStateException("Can't have speech recognizer and request");
    }
}
 
Example #2
Source File: GoogleImeSpeechRecognition.java    From Android-Speech-Recognition with MIT License 6 votes vote down vote up
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);

    if(requestCode == REQUEST_CODE && resultCode == RESULT_OK){

        /**
         * The matched text with the highest confidence score will be in position 0
         */
        ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);

        if(matches != null && matches.size() >0){
            String sentence = matches.get(0);
            speechRecognitionListener.getOnSpeechRecognitionListener()
                    .OnSpeechRecognitionFinalResult(sentence);

            return;
        }
    }

    speechRecognitionListener.onError(SpeechRecognizer.ERROR_NO_MATCH);
}
 
Example #3
Source File: RecognitionGoogleCloud.java    From Saiy-PS with GNU Affero General Public License v3.0 6 votes vote down vote up
private void showPlayServicesError(final int errorCode) {
    if (DEBUG) {
        MyLog.i(CLS_NAME, "showPlayServicesError");
    }

    onError(SpeechRecognizer.ERROR_CLIENT);

    switch (errorCode) {

        case UNRECOVERABLE:
            // TODO
            break;
        default:
            final GoogleApiAvailability apiAvailability = GoogleApiAvailability.getInstance();
            apiAvailability.showErrorNotification(mContext, errorCode);
            break;
    }
}
 
Example #4
Source File: RecognitionGoogleCloud.java    From Saiy-PS with GNU Affero General Public License v3.0 6 votes vote down vote up
/**
 * Receives a terminating error from the stream.
 * <p>
 * <p>May only be called once and if called it must be the last method called. In particular if an
 * exception is thrown by an implementation of {@code onError} no further calls to any method are
 * allowed.
 * <p>
 * <p>{@code t} should be a {@link StatusException} or {@link
 * StatusRuntimeException}, but other {@code Throwable} types are possible. Callers should
 * generally convert from a {@link Status} via {@link Status#asException()} or
 * {@link Status#asRuntimeException()}. Implementations should generally convert to a
 * {@code Status} via {@link Status#fromThrowable(Throwable)}.
 *
 * @param throwable the error occurred on the stream
 */
@Override
public void onError(final Throwable throwable) {
    if (DEBUG) {
        MyLog.w(CLS_NAME, "onError");
        throwable.printStackTrace();
        final Status status = Status.fromThrowable(throwable);
        MyLog.w(CLS_NAME, "onError: " + status.toString());
    }

    if (doError.get()) {
        doError.set(false);
        stopListening();
        listener.onError(SpeechRecognizer.ERROR_NETWORK);
    }
}
 
Example #5
Source File: SaiyRecognitionListener.java    From Saiy-PS with GNU Affero General Public License v3.0 6 votes vote down vote up
/**
 * A network or recognition error occurred.
 *
 * @param error code is defined in {@link SpeechRecognizer}
 */
@Override
public void onError(final int error) {
    if (DEBUG) {
        MyLog.w(CLS_NAME, "onError: " + error);
        MyLog.w(CLS_NAME, "onError: doEndOfSpeech: " + doEndOfSpeech);
        MyLog.w(CLS_NAME, "onError: doError: " + doError);
        MyLog.i(CLS_NAME, "onError: doBeginningOfSpeech: " + doBeginningOfSpeech);
    }

    if (error != SpeechRecognizer.ERROR_NO_MATCH) {
        doError = true;
    }

    if (doError) {
        onRecognitionError(error);
    }
}
 
Example #6
Source File: SaiyAccessibilityService.java    From Saiy-PS with GNU Affero General Public License v3.0 6 votes vote down vote up
/**
 * Process the extracted text as identified as a command
 *
 * @param text the command to process
 */
private void process(@NonNull final String text) {
    if (DEBUG) {
        MyLog.i(CLS_NAME, "process");
    }

    final Bundle bundle = new Bundle();

    final ArrayList<String> voiceResults = new ArrayList<>(1);
    voiceResults.add(text);

    final float[] confidence = new float[1];
    confidence[0] = 1f;

    bundle.putStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION, voiceResults);
    bundle.putFloatArray(SpeechRecognizer.CONFIDENCE_SCORES, confidence);
    bundle.putInt(LocalRequest.EXTRA_CONDITION, Condition.CONDITION_GOOGLE_NOW);

    AsyncTask.execute(new Runnable() {
        @Override
        public void run() {
            new RecognitionAction(SaiyAccessibilityService.this.getApplicationContext(), SPH.getVRLocale(SaiyAccessibilityService.this.getApplicationContext()),
                    SPH.getTTSLocale(SaiyAccessibilityService.this.getApplicationContext()), sl, bundle);
        }
    });
}
 
Example #7
Source File: Speech.java    From android-speech with Apache License 2.0 6 votes vote down vote up
@Override
public void onPartialResults(final Bundle bundle) {
    mDelayedStopListening.resetTimer();

    final List<String> partialResults = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
    final List<String> unstableData = bundle.getStringArrayList("android.speech.extra.UNSTABLE_TEXT");

    if (partialResults != null && !partialResults.isEmpty()) {
        mPartialData.clear();
        mPartialData.addAll(partialResults);
        mUnstableData = unstableData != null && !unstableData.isEmpty()
                ? unstableData.get(0) : null;
        try {
            if (mLastPartialResults == null || !mLastPartialResults.equals(partialResults)) {
                if (mDelegate != null)
                    mDelegate.onSpeechPartialResults(partialResults);
                mLastPartialResults = partialResults;
            }
        } catch (final Throwable exc) {
            Logger.error(Speech.class.getSimpleName(),
                    "Unhandled exception in delegate onSpeechPartialResults", exc);
        }
    }
}
 
Example #8
Source File: SpeechRecognition.java    From cordova-plugin-speechrecognition with MIT License 6 votes vote down vote up
@Override
public void initialize(CordovaInterface cordova, CordovaWebView webView) {
  super.initialize(cordova, webView);

  activity = cordova.getActivity();
  context = webView.getContext();
  view = webView.getView();

  view.post(new Runnable() {
    @Override
    public void run() {
      recognizer = SpeechRecognizer.createSpeechRecognizer(activity);
      SpeechRecognitionListener listener = new SpeechRecognitionListener();
      recognizer.setRecognitionListener(listener);
    }
  });
}
 
Example #9
Source File: SpeechRecognition.java    From cordova-plugin-speechrecognition with MIT License 6 votes vote down vote up
@Override
public void onPartialResults(Bundle bundle) {
  ArrayList<String> matches = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
  Log.d(LOG_TAG, "SpeechRecognitionListener partialResults: " + matches);
  JSONArray matchesJSON = new JSONArray(matches);
  try {
    if (matches != null
            && matches.size() > 0
                    && !mLastPartialResults.equals(matchesJSON)) {
      mLastPartialResults = matchesJSON;
      PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, matchesJSON);
      pluginResult.setKeepCallback(true);
      callbackContext.sendPluginResult(pluginResult);
    }
  } catch (Exception e) {
    e.printStackTrace();
    callbackContext.error(e.getMessage());
  }
}
 
Example #10
Source File: SpeechRecognition.java    From android-chromium with BSD 2-Clause "Simplified" License 6 votes vote down vote up
private void handleResults(Bundle bundle, boolean provisional) {
    if (mContinuous && provisional) {
        // In continuous mode, Android's recognizer sends final results as provisional.
        provisional = false;
    }

    ArrayList<String> list = bundle.getStringArrayList(
            SpeechRecognizer.RESULTS_RECOGNITION);
    String[] results = list.toArray(new String[list.size()]);

    float[] scores = bundle.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);

    nativeOnRecognitionResults(mNativeSpeechRecognizerImplAndroid,
                               results,
                               scores,
                               provisional);
}
 
Example #11
Source File: SpeechRecognition.java    From 365browser with Apache License 2.0 6 votes vote down vote up
private void handleResults(Bundle bundle, boolean provisional) {
    if (mContinuous && provisional) {
        // In continuous mode, Android's recognizer sends final results as provisional.
        provisional = false;
    }

    ArrayList<String> list = bundle.getStringArrayList(
            SpeechRecognizer.RESULTS_RECOGNITION);
    String[] results = list.toArray(new String[list.size()]);

    float[] scores = bundle.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);

    nativeOnRecognitionResults(mNativeSpeechRecognizerImplAndroid,
                               results,
                               scores,
                               provisional);
}
 
Example #12
Source File: SpeechRecognition.java    From 365browser with Apache License 2.0 6 votes vote down vote up
private SpeechRecognition(long nativeSpeechRecognizerImplAndroid) {
    mContinuous = false;
    mNativeSpeechRecognizerImplAndroid = nativeSpeechRecognizerImplAndroid;
    mListener = new Listener();
    mIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

    if (sRecognitionProvider != null) {
        mRecognizer = SpeechRecognizer.createSpeechRecognizer(
                ContextUtils.getApplicationContext(), sRecognitionProvider);
    } else {
        // It is possible to force-enable the speech recognition web platform feature (using a
        // command-line flag) even if initialize() failed to find the PROVIDER_PACKAGE_NAME
        // provider, in which case the first available speech recognition provider is used.
        // Caveat: Continuous mode may not work as expected with a different provider.
        mRecognizer =
                SpeechRecognizer.createSpeechRecognizer(ContextUtils.getApplicationContext());
    }

    mRecognizer.setRecognitionListener(mListener);
}
 
Example #13
Source File: GoogleRecognitionServiceImpl.java    From dialogflow-android-client with Apache License 2.0 6 votes vote down vote up
protected void initializeRecognizer() {
    if (speechRecognizer != null) {
        return;
    }

    synchronized (speechRecognizerLock) {
        if (speechRecognizer != null) {
            speechRecognizer.destroy();
            speechRecognizer = null;
        }

        final ComponentName component = RecognizerChecker.findGoogleRecognizer(context);
        speechRecognizer = SpeechRecognizer.createSpeechRecognizer(context, component);
        speechRecognizer.setRecognitionListener(new InternalRecognitionListener());
    }
}
 
Example #14
Source File: SpeechRecognition.java    From android-chromium with BSD 2-Clause "Simplified" License 6 votes vote down vote up
private void handleResults(Bundle bundle, boolean provisional) {
    if (mContinuous && provisional) {
        // In continuous mode, Android's recognizer sends final results as provisional.
        provisional = false;
    }

    ArrayList<String> list = bundle.getStringArrayList(
            SpeechRecognizer.RESULTS_RECOGNITION);
    String[] results = list.toArray(new String[list.size()]);

    float[] scores = bundle.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);

    nativeOnRecognitionResults(mNativeSpeechRecognizerImplAndroid,
                               results,
                               scores,
                               provisional);
}
 
Example #15
Source File: SpeechRecognition.java    From android-chromium with BSD 2-Clause "Simplified" License 6 votes vote down vote up
private SpeechRecognition(final Context context, int nativeSpeechRecognizerImplAndroid) {
    mContext = context;
    mContinuous = false;
    mNativeSpeechRecognizerImplAndroid = nativeSpeechRecognizerImplAndroid;
    mListener = new Listener();
    mIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

    if (mRecognitionProvider != null) {
        mRecognizer = SpeechRecognizer.createSpeechRecognizer(mContext, mRecognitionProvider);
    } else {
        // It is possible to force-enable the speech recognition web platform feature (using a
        // command-line flag) even if initialize() failed to find the PROVIDER_PACKAGE_NAME
        // provider, in which case the first available speech recognition provider is used.
        // Caveat: Continuous mode may not work as expected with a different provider.
        mRecognizer = SpeechRecognizer.createSpeechRecognizer(mContext);
    }

    mRecognizer.setRecognitionListener(mListener);
}
 
Example #16
Source File: MainActivity.java    From iqra-android with MIT License 6 votes vote down vote up
@Override
public void onResults(Bundle results) {
    mIsListening = false;
    micText.setText(getString(R.string.tap_on_mic));
    recordCircle.getLayoutParams().width = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 80, getResources().getDisplayMetrics());
    recordCircle.getLayoutParams().height = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 80, getResources().getDisplayMetrics());
    recordCircle.requestLayout();
    recordCircle.setImageResource(R.drawable.record_circle_inactive);
    partialResult.setText("");
    // Log.d(TAG, "onResults"); //$NON-NLS-1$
    ArrayList<String> matches = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
    // matches are the return values of speech recognition engine
    if (matches != null) {
        // Log.d(TAG, matches.toString()); //$NON-NLS-1$
        callApi(matches.get(0));
    } else {
        Toast.makeText(getApplicationContext(), getResources().getString(R.string.cannot_understand), Toast.LENGTH_SHORT).show();
    }
}
 
Example #17
Source File: SearchActivity.java    From jellyfin-androidtv with GNU General Public License v2.0 6 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    boolean isSpeechEnabled = SpeechRecognizer.isRecognitionAvailable(this);

    // Determine fragment to use
    Fragment searchFragment = isSpeechEnabled
            ? new LeanbackSearchFragment()
            : new TextSearchFragment();

    // Add fragment
    getSupportFragmentManager()
            .beginTransaction()
            .replace(android.R.id.content, searchFragment)
            .commit();
}
 
Example #18
Source File: SpeechRecognition.java    From Android-Speech-Recognition with MIT License 5 votes vote down vote up
private void initializeSpeechRecognitionParameters(){

        if(!isSpeechRecognitionAvailable())
            throw new IllegalStateException(context.getString(R.string.speech_not_enabled_exception_text));

         /*
          * Initialize the SpeechRecognitionPermissions and googleIme here
          * for lazy loading the fragments
         */
        initializeGoogleVoiceImeParameters();
        speechRecognitionPermissions = new SpeechRecognitionPermissions();
        ((Activity) context).getFragmentManager()
                .beginTransaction()
                .add(speechRecognitionPermissions, SpeechRecognition.class.getSimpleName())
                .commit();

         /*
         *Initialize the SpeechRecognizer and set listener with onSpeechRecognizerListener implemented by client
         */
        speechRecognizer = SpeechRecognizer.createSpeechRecognizer(context);

        /*
         *Initialize the Speech recognition intent with default Language
         */
        recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.getPackageName());
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, MAX_RESULT_COUNT);
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);

        /*
         * Only offline recognition works from API level 23
         */
        if(enableOnlyOfflineRecognition){
            if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
                recognizerIntent.putExtra(RecognizerIntent.EXTRA_PREFER_OFFLINE, true);
        }

        //TODO: Set preferred Speech recognition Language
    }
 
Example #19
Source File: SpeechRecognitionManager.java    From talkback with Apache License 2.0 5 votes vote down vote up
/** Gets the results from SpeechRecognizer and converts to a string. */
@Override
public void onResults(Bundle results) {
  LogUtils.v(TAG, "Speech recognizer onResults()");
  handleResult(
      results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION),
      /* isPartialResult= */ false);
}
 
Example #20
Source File: SpeechRecognitionManager.java    From talkback with Apache License 2.0 5 votes vote down vote up
/** Speech recognition did not fully understand. */
@Override
public void onPartialResults(Bundle partialResults) {
  LogUtils.v(TAG, "Speech recognizer onPartialResults()");
  // For watches SpeechRecognizer returns partial results, but the string is recognized
  // correctly and that is enough for Talkback to process voice commands.
  // Hence we try to handle partial results to improve the performance.
  handleResult(
      partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION),
      /* isPartialResult= */ true);
}
 
Example #21
Source File: AbstractRecognitionService.java    From speechutils with Apache License 2.0 5 votes vote down vote up
/**
 * Calls onError(SpeechRecognizer.ERROR_SPEECH_TIMEOUT) if server initiates close
 * without having received EOS. Otherwise simply shuts down the recorder and recognizer service.
 *
 * @param isEosSent true iff EOS was sent
 */
public void handleFinish(boolean isEosSent) {
    if (isEosSent) {
        onCancel(mListener);
    } else {
        onError(SpeechRecognizer.ERROR_SPEECH_TIMEOUT);
    }
}
 
Example #22
Source File: AbstractRecognitionService.java    From speechutils with Apache License 2.0 5 votes vote down vote up
protected static Bundle toResultsBundle(String hypothesis) {
    ArrayList<String> hypotheses = new ArrayList<>();
    hypotheses.add(hypothesis);
    Bundle bundle = new Bundle();
    bundle.putStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION, hypotheses);
    return bundle;
}
 
Example #23
Source File: IntentUtils.java    From speechutils with Apache License 2.0 5 votes vote down vote up
/**
 * @return table that maps SpeechRecognizer error codes to RecognizerIntent error codes
 */
public static SparseIntArray createErrorCodesServiceToIntent() {
    SparseIntArray errorCodes = new SparseIntArray();
    errorCodes.put(SpeechRecognizer.ERROR_AUDIO, RecognizerIntent.RESULT_AUDIO_ERROR);
    errorCodes.put(SpeechRecognizer.ERROR_CLIENT, RecognizerIntent.RESULT_CLIENT_ERROR);
    errorCodes.put(SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS, RecognizerIntent.RESULT_CLIENT_ERROR);
    errorCodes.put(SpeechRecognizer.ERROR_NETWORK, RecognizerIntent.RESULT_NETWORK_ERROR);
    errorCodes.put(SpeechRecognizer.ERROR_NETWORK_TIMEOUT, RecognizerIntent.RESULT_NETWORK_ERROR);
    errorCodes.put(SpeechRecognizer.ERROR_NO_MATCH, RecognizerIntent.RESULT_NO_MATCH);
    errorCodes.put(SpeechRecognizer.ERROR_RECOGNIZER_BUSY, RecognizerIntent.RESULT_SERVER_ERROR);
    errorCodes.put(SpeechRecognizer.ERROR_SERVER, RecognizerIntent.RESULT_SERVER_ERROR);
    errorCodes.put(SpeechRecognizer.ERROR_SPEECH_TIMEOUT, RecognizerIntent.RESULT_NO_MATCH);
    return errorCodes;
}
 
Example #24
Source File: GoogleRecognitionServiceImpl.java    From dialogflow-android-client with Apache License 2.0 5 votes vote down vote up
@TargetApi(14)
@Override
public void onResults(final Bundle results) {
    if (recognitionActive) {
        final ArrayList<String> recognitionResults = results
                .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);

        float[] rates = null;

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
            rates = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);
        }

        if (recognitionResults == null || recognitionResults.isEmpty()) {
            // empty response
            GoogleRecognitionServiceImpl.this.onResult(new AIResponse());
        } else {
            final AIRequest aiRequest = new AIRequest();
            if (rates != null) {
                aiRequest.setQuery(recognitionResults.toArray(new String[recognitionResults.size()]), rates);
            } else {
                aiRequest.setQuery(recognitionResults.get(0));
            }

            // notify listeners about the last recogntion result for more accurate user feedback
            GoogleRecognitionServiceImpl.this.onPartialResults(recognitionResults);
            GoogleRecognitionServiceImpl.this.sendRequest(aiRequest, requestExtras);
        }
    }
    stopInternal();
}
 
Example #25
Source File: ErrorTranslation.java    From o2oa with GNU Affero General Public License v3.0 5 votes vote down vote up
public static String recogError(int errorCode) {
    String message;
    switch (errorCode) {
        case SpeechRecognizer.ERROR_AUDIO:
            message = "音频问题";
            break;
        case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
            message = "没有语音输入";
            break;
        case SpeechRecognizer.ERROR_CLIENT:
            message = "其它客户端错误";
            break;
        case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
            message = "权限不足";
            break;
        case SpeechRecognizer.ERROR_NETWORK:
            message = "网络问题";
            break;
        case SpeechRecognizer.ERROR_NO_MATCH:
            message = "没有匹配的识别结果";
            break;
        case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
            message = "引擎忙";
            break;
        case SpeechRecognizer.ERROR_SERVER:
            message = "服务端错误";
            break;
        case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
            message = "连接超时";
            break;
        default:
            message = "未知错误:" + errorCode;
            break;
    }
    return message;
}
 
Example #26
Source File: MainActivity.java    From iqra-android with MIT License 5 votes vote down vote up
protected void setupSpeechInput() {
    mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
    mSpeechRecognizer.setRecognitionListener(this);
    mSpeechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    if (!mSpeechRecognizerIntent.hasExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE)) {
        mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName());
    }
    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "ar-AE");
    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    mIsListening = false;
}
 
Example #27
Source File: SpeechRecognition.java    From cordova-plugin-speechrecognition with MIT License 5 votes vote down vote up
private String getErrorText(int errorCode) {
  String message;
  switch (errorCode) {
    case SpeechRecognizer.ERROR_AUDIO:
      message = "Audio recording error";
      break;
    case SpeechRecognizer.ERROR_CLIENT:
      message = "Client side error";
      break;
    case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
      message = "Insufficient permissions";
      break;
    case SpeechRecognizer.ERROR_NETWORK:
      message = "Network error";
      break;
    case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
      message = "Network timeout";
      break;
    case SpeechRecognizer.ERROR_NO_MATCH:
      message = "No match";
      break;
    case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
      message = "RecognitionService busy";
      break;
    case SpeechRecognizer.ERROR_SERVER:
      message = "error from server";
      break;
    case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
      message = "No speech input";
      break;
    default:
      message = "Didn't understand, please try again.";
      break;
  }
  return message;
}
 
Example #28
Source File: SpeechRecognition.java    From cordova-plugin-speechrecognition with MIT License 5 votes vote down vote up
@Override
public void onResults(Bundle results) {
  ArrayList<String> matches = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
  Log.d(LOG_TAG, "SpeechRecognitionListener results: " + matches);
  try {
    JSONArray jsonMatches = new JSONArray(matches);
    callbackContext.success(jsonMatches);
  } catch (Exception e) {
    e.printStackTrace();
    callbackContext.error(e.getMessage());
  }
}
 
Example #29
Source File: DroidSpeech.java    From DroidSpeech with Apache License 2.0 5 votes vote down vote up
/**
 * Initializes the droid speech properties
 */
private void initDroidSpeechProperties()
{
    // Initializing the droid speech recognizer
    droidSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(context);

    // Initializing the speech intent
    speechIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    speechIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.getPackageName());
    speechIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    speechIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, Extensions.MAX_VOICE_RESULTS);
    if(dsProperties.currentSpeechLanguage != null)
    {
        // Setting the speech language
        speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, dsProperties.currentSpeechLanguage);
        speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, dsProperties.currentSpeechLanguage);
    }

    if(dsProperties.offlineSpeechRecognition && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
    {
        // Setting offline speech recognition to true
        speechIntent.putExtra(RecognizerIntent.EXTRA_PREFER_OFFLINE, true);
    }

    // Initializing the audio Manager
    audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
}
 
Example #30
Source File: SpeechRecognitionListener.java    From Android-Speech-Recognition with MIT License 5 votes vote down vote up
@Override
public void onPartialResults(Bundle bundle) {
    //sentence with highest confidence score is in position 0
    ArrayList<String> matches = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);

    if(matches != null && matches.size() > 0){
        String word = matches.get(0);

        Log.i(SpeechRecognitionListener.class.getSimpleName(), word);
        onSpeechRecognitionListener.OnSpeechRecognitionCurrentResult(word);

    }else onError(SpeechRecognizer.ERROR_NO_MATCH);
}