android.speech.RecognizerIntent Java Examples

The following examples show how to use android.speech.RecognizerIntent. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LanguageDetailsChecker.java    From cordova-plugin-speechrecognition with MIT License 6 votes vote down vote up
@Override
public void onReceive(Context context, Intent intent) {
    Bundle results = getResultExtras(true);

    if (results.containsKey(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE)) {
        languagePreference = results.getString(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE);
    }

    if (results.containsKey(RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES)) {
        supportedLanguages = results.getStringArrayList(RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES);

        JSONArray languages = new JSONArray(supportedLanguages);
        callbackContext.success(languages);
        return;
    }

    callbackContext.error(ERROR);
}
 
Example #2
Source File: SpeechRecognition.java    From 365browser with Apache License 2.0 6 votes vote down vote up
private SpeechRecognition(long nativeSpeechRecognizerImplAndroid) {
    mContinuous = false;
    mNativeSpeechRecognizerImplAndroid = nativeSpeechRecognizerImplAndroid;
    mListener = new Listener();
    mIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

    if (sRecognitionProvider != null) {
        mRecognizer = SpeechRecognizer.createSpeechRecognizer(
                ContextUtils.getApplicationContext(), sRecognitionProvider);
    } else {
        // It is possible to force-enable the speech recognition web platform feature (using a
        // command-line flag) even if initialize() failed to find the PROVIDER_PACKAGE_NAME
        // provider, in which case the first available speech recognition provider is used.
        // Caveat: Continuous mode may not work as expected with a different provider.
        mRecognizer =
                SpeechRecognizer.createSpeechRecognizer(ContextUtils.getApplicationContext());
    }

    mRecognizer.setRecognitionListener(mListener);
}
 
Example #3
Source File: HomePageFragment.java    From HeroVideo-master with Apache License 2.0 6 votes vote down vote up
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data)
{
    if (requestCode == MaterialSearchView.REQUEST_VOICE && resultCode == Activity.RESULT_OK)
    {
        ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
        if (matches != null && matches.size() > 0)
        {
            String searchWrd = matches.get(0);
            if (!TextUtils.isEmpty(searchWrd))
            {
                mSearchView.setQuery(searchWrd, false);
            }
        }
        return;
    }
    super.onActivityResult(requestCode, resultCode, data);
}
 
Example #4
Source File: GoogleImeSpeechRecognition.java    From Android-Speech-Recognition with MIT License 6 votes vote down vote up
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);

    if(requestCode == REQUEST_CODE && resultCode == RESULT_OK){

        /**
         * The matched text with the highest confidence score will be in position 0
         */
        ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);

        if(matches != null && matches.size() >0){
            String sentence = matches.get(0);
            speechRecognitionListener.getOnSpeechRecognitionListener()
                    .OnSpeechRecognitionFinalResult(sentence);

            return;
        }
    }

    speechRecognitionListener.onError(SpeechRecognizer.ERROR_NO_MATCH);
}
 
Example #5
Source File: HomePageFragment.java    From HeroVideo-master with Apache License 2.0 6 votes vote down vote up
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data)
{
    if (requestCode == MaterialSearchView.REQUEST_VOICE && resultCode == Activity.RESULT_OK)
    {
        ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
        if (matches != null && matches.size() > 0)
        {
            String searchWrd = matches.get(0);
            if (!TextUtils.isEmpty(searchWrd))
            {
                mSearchView.setQuery(searchWrd, false);
            }
        }
        return;
    }
    super.onActivityResult(requestCode, resultCode, data);
}
 
Example #6
Source File: ChatActivity.java    From BotLibre with Eclipse Public License 1.0 6 votes vote down vote up
@Override
	protected void onActivityResult(int requestCode, int resultCode, Intent data) {
		super.onActivityResult(requestCode, resultCode, data);
 
		switch (requestCode) {
			case RESULT_SPEECH: {
				if (resultCode == RESULT_OK && null != data) {
	 
					ArrayList<String> text = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
	 
					textView.setText(text.get(0));
					submitChat();
				}
				break;
			}
		}
//		IntentResult scanResult = IntentIntegrator.parseActivityResult(requestCode, resultCode, data);
//		if (scanResult != null) {
//			textView.setText("lookup " + scanResult.getContents());
//			submitChat();
//			if (scanResult.getContents().startsWith("http")) {
//				Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(scanResult.getContents()));
//				startActivity(intent);
//			}
//		}
	}
 
Example #7
Source File: SpeechRecognizerManager.java    From ContinuesVoiceRecognition with MIT License 6 votes vote down vote up
public SpeechRecognizerManager(Context context,onResultsReady listener)
{
    try{
        mListener=listener;
    }
    catch(ClassCastException e)
    {
        Log.e(TAG,e.toString());
    }
    mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
    mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(context);
    mSpeechRecognizer.setRecognitionListener(new SpeechRecognitionListener());
    mSpeechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
            RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            context.getPackageName());
    startListening();

}
 
Example #8
Source File: Food_RecyclerView_Main.java    From Stayfit with Apache License 2.0 6 votes vote down vote up
/**
 * Receiving speech input
 */
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);

    switch (requestCode) {
        case REQ_CODE_SPEECH_INPUT: {
            if (resultCode == Activity.RESULT_OK && null != data) {

                ArrayList<String> result = data
                        .getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
                voice_query = (result.get(0));
                Log.d("voice", voice_query);
            }
            break;
        }

    }
}
 
Example #9
Source File: AppRunnerActivity.java    From PHONK with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Handle the results from the recognition activity.
 */
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    if (requestCode == VOICE_RECOGNITION_REQUEST_CODE && resultCode == Activity.RESULT_OK) {
        // Fill the list view with the strings the recognizer thought it
        // could have heard
        ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);

        for (String _string : matches) {
            MLog.d(TAG, "" + _string);
        }
        onVoiceRecognitionListener.onNewResult(matches);

        //TODO disabled
    }

    if (onBluetoothListener != null) {
        onBluetoothListener.onActivityResult(requestCode, resultCode, data);
    }

    super.onActivityResult(requestCode, resultCode, data);
}
 
Example #10
Source File: MainActivity.java    From Android-Example with Apache License 2.0 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
	super.onActivityResult(requestCode, resultCode, data);

	switch (requestCode) {
	case RESULT_SPEECH: {
			if (resultCode == RESULT_OK && null != data) {

				ArrayList<String> text = data
						.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);

				txtText.setText(text.get(0));
			}
		break;
	}

	}
}
 
Example #11
Source File: Home.java    From xDrip with GNU General Public License v3.0 6 votes vote down vote up
public void promptSpeechNoteInput(View abc) {

        if (recognitionRunning) return;
        recognitionRunning = true;

        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
        // intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US"); // debug voice
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT,
                getString(R.string.speak_your_note_text));

        try {
            startActivityForResult(intent, REQ_CODE_SPEECH_NOTE_INPUT);
        } catch (ActivityNotFoundException a) {
            Toast.makeText(getApplicationContext(),
                    getString(R.string.speech_recognition_is_not_supported),
                    Toast.LENGTH_LONG).show();
        }

    }
 
Example #12
Source File: VoiceControl.java    From HomeGenie-Android with GNU General Public License v3.0 6 votes vote down vote up
public void startListen() {
    _recognizer = getSpeechRecognizer();
    _recognizer.setRecognitionListener(this);
    //
    //speech recognition is supported - detect user button clicks
    //start the speech recognition intent passing required data
    Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    //indicate package
    //recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass().getPackage().getName());
    //message to display while listening
    recognizerIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Your wish is my command!");
    //set speech model
    recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    //specify number of results to retrieve
    recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
    //start listening
    //startActivityForResult(listenIntent, VR_REQUEST);
    //startActivityForResult(recognizerIntent, VR_REQUEST);
    _recognizer.startListening(recognizerIntent);
}
 
Example #13
Source File: SearchView.java    From MeiZiNews with MIT License 6 votes vote down vote up
protected void onVoiceClicked() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, VOICE_SEARCH_TEXT);
    intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);

    if (mActivity != null) {
        mActivity.startActivityForResult(intent, SPEECH_REQUEST_CODE);
    } else if (mFragment != null) {
        mFragment.startActivityForResult(intent, SPEECH_REQUEST_CODE);
    } else if (mSupportFragment != null) {
        mSupportFragment.startActivityForResult(intent, SPEECH_REQUEST_CODE);
    } else {
        if (mContext instanceof Activity) {
            ((Activity) mContext).startActivityForResult(intent, SPEECH_REQUEST_CODE);
        }
    }
}
 
Example #14
Source File: Home.java    From xDrip-plus with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Showing google speech input dialog
 */
private synchronized void promptSpeechInput() {

    if (JoH.ratelimit("speech-input", 1)) {
        if (recognitionRunning) return;
        recognitionRunning = true;

        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
        // intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US"); // debug voice
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT,
                getString(R.string.speak_your_treatment));

        try {
            startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
        } catch (ActivityNotFoundException a) {
            Toast.makeText(getApplicationContext(),
                    R.string.speech_recognition_is_not_supported,
                    Toast.LENGTH_LONG).show();
        }
    }
}
 
Example #15
Source File: GoogleRecognitionServiceImpl.java    From dialogflow-android-client with Apache License 2.0 6 votes vote down vote up
private Intent createRecognitionIntent() {
    final Intent sttIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    sttIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
            RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);

    final String language = config.getLanguage().replace('-', '_');

    sttIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    sttIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, language);
    sttIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    sttIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.getPackageName());

    // WORKAROUND for https://code.google.com/p/android/issues/detail?id=75347
    sttIntent.putExtra("android.speech.extra.EXTRA_ADDITIONAL_LANGUAGES", new String[]{language});
    return sttIntent;
}
 
Example #16
Source File: MainActivity.java    From IdeaTrackerPlus with MIT License 6 votes vote down vote up
public void startVoiceRecognitionActivity() {

        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        // identifying your application to the Google service
        intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass().getPackage().getName());
        // hint in the dialog
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.voice_msg));
        // hint to the recognizer about what the user is going to say
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        // number of results
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 5);
        // recognition language
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
        startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
    }
 
Example #17
Source File: RemoteFragment.java    From RoMote with Apache License 2.0 5 votes vote down vote up
private void displaySpeechRecognizer() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
            RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    // Start the activity, the intent will be populated with the speech text
    startActivityForResult(intent, SPEECH_REQUEST_CODE);
}
 
Example #18
Source File: NiboDefaultVoiceRecognizerDelegate.java    From Nibo with MIT License 5 votes vote down vote up
@Override
public Intent buildVoiceRecognitionIntent() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
            RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT,
            getContext().getString(R.string.speak_now));
    return intent;
}
 
Example #19
Source File: SearchActivity.java    From PersistentSearchView with Apache License 2.0 5 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
	if (requestCode == VOICE_RECOGNITION_REQUEST_CODE && resultCode == RESULT_OK) {
		ArrayList<String> matches = data
				.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
		mSearchView.populateEditText(matches);
	}
	super.onActivityResult(requestCode, resultCode, data);
}
 
Example #20
Source File: FeatureUtilities.java    From 365browser with Apache License 2.0 5 votes vote down vote up
/**
 * Determines whether or not the {@link RecognizerIntent#ACTION_WEB_SEARCH} {@link Intent}
 * is handled by any {@link android.app.Activity}s in the system.  The result will be cached for
 * future calls.  Passing {@code false} to {@code useCachedValue} will force it to re-query any
 * {@link android.app.Activity}s that can process the {@link Intent}.
 * @param context        The {@link Context} to use to check to see if the {@link Intent} will
 *                       be handled.
 * @param useCachedValue Whether or not to use the cached value from a previous result.
 * @return {@code true} if recognition is supported.  {@code false} otherwise.
 */
public static boolean isRecognitionIntentPresent(Context context, boolean useCachedValue) {
    ThreadUtils.assertOnUiThread();
    if (sHasRecognitionIntentHandler == null || !useCachedValue) {
        PackageManager pm = context.getPackageManager();
        List<ResolveInfo> activities = pm.queryIntentActivities(
                new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
        sHasRecognitionIntentHandler = activities.size() > 0;
    }

    return sHasRecognitionIntentHandler;
}
 
Example #21
Source File: Audio.java    From PHONK with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Fire an intent to start the speech recognition activity. onActivityResult
 * is handled in BaseActivity
 */
private void startVoiceRecognitionActivity(Activity a) {

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Tell me something!");
    a.startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
}
 
Example #22
Source File: SpeechRecognition.java    From android-chromium with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@CalledByNative
private void startRecognition(String language, boolean continuous, boolean interim_results) {
    if (mRecognizer == null)
        return;

    mContinuous = continuous;
    mIntent.putExtra("android.speech.extra.DICTATION_MODE", continuous);
    mIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    mIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, interim_results);
    mRecognizer.startListening(mIntent);
}
 
Example #23
Source File: NiboDefaultVoiceRecognizerDelegate.java    From Nibo with MIT License 5 votes vote down vote up
@Override
public boolean isVoiceRecognitionAvailable() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    PackageManager mgr = getContext().getPackageManager();
    if (mgr != null) {
        List<ResolveInfo> list = mgr.queryIntentActivities(intent, PackageManager.MATCH_DEFAULT_ONLY);
        return list.size() > 0;
    }
    return false;
}
 
Example #24
Source File: SpeechRecognitionManager.java    From talkback with Apache License 2.0 5 votes vote down vote up
/** Create and initialize the recognition intent. */
private void createRecogIntent() {
  // Works without wifi, but provides many extra partial results. Respects the system language.
  recognizerIntent =
      new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH)
          .putExtra(
              RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
          .putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
}
 
Example #25
Source File: SelfAwareConditions.java    From Saiy-PS with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * Add the intent extras
 *
 * @return the {@link Intent} with the required extras added
 */
public Intent getNativeIntent() {

    final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, mContext.getPackageName());
    intent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, mContext.getString(R.string.app_name));
    intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, RecognitionNative.MAX_RESULTS);
    intent.putExtra(RecognitionDefaults.PREFER_OFFLINE, SPH.getUseOffline(mContext));
    intent.putExtra(RecognitionDefaults.EXTRA_SECURE,
            (getBundle().getInt(LocalRequest.EXTRA_CONDITION, Condition.CONDITION_NONE) == Condition.CONDITION_SECURE));

    final Long timeout = SPH.getPauseTimeout(mContext);
    intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, timeout);
    intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS, timeout);
    intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS, timeout);

    if (servingRemote()) {
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, getCallback().getParcel()
                .getVRLanguageGoogle().getLocaleString());
    } else {
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, getVRLocale().toString());
    }

    return intent;
}
 
Example #26
Source File: MicConfiguration.java    From BotLibre with Eclipse Public License 1.0 5 votes vote down vote up
public void googleListening(View v){
	txt.setText("Status: ON");
	setMicIcon(true, false);
	Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); 
	intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, MainActivity.voice.language);
	try {
		startActivityForResult(intent, 1);
		editTextForGoogle.setText("");
	} catch (ActivityNotFoundException a) {
		Toast t = Toast.makeText(getApplicationContext(),
				"Your device doesn't support Speech to Text",
				Toast.LENGTH_SHORT);
		t.show();
	}
}
 
Example #27
Source File: VoiceSuggestionProvider.java    From delion with Apache License 2.0 5 votes vote down vote up
/**
 * Takes and processes the results from a recognition action. It parses the confidence and
 * string values and stores the processed results here so they are made available to the
 * {@link AutocompleteController} and show up in the omnibox results. This method does not
 * reorder the voice results that come back from the recognizer.
 * @param extras The {@link Bundle} that contains the recognition results from a
 *               {@link RecognizerIntent#ACTION_RECOGNIZE_SPEECH} action.
 */
public void setVoiceResultsFromIntentBundle(Bundle extras) {
    clearVoiceSearchResults();

    if (extras == null) return;

    ArrayList<String> strings = extras.getStringArrayList(
            RecognizerIntent.EXTRA_RESULTS);
    float[] confidences = extras.getFloatArray(
            RecognizerIntent.EXTRA_CONFIDENCE_SCORES);

    if (strings == null || confidences == null) return;

    assert (strings.size() == confidences.length);
    if (strings.size() != confidences.length) return;

    for (int i = 0; i < strings.size(); ++i) {
        // Remove any spaces in the voice search match when determining whether it
        // appears to be a URL. This is to prevent cases like (
        // "tech crunch.com" and "www. engadget .com" from not appearing like URLs)
        // from not navigating to the URL.
        // If the string appears to be a URL, then use it instead of the string returned from
        // the voice engine.
        String culledString = strings.get(i).replaceAll(" ", "");
        String url = AutocompleteController.nativeQualifyPartialURLQuery(culledString);
        mResults.add(new VoiceResult(
                url == null ? strings.get(i) : culledString, confidences[i]));
    }
}
 
Example #28
Source File: FeatureUtilities.java    From delion with Apache License 2.0 5 votes vote down vote up
/**
 * Determines whether or not the {@link RecognizerIntent#ACTION_WEB_SEARCH} {@link Intent}
 * is handled by any {@link android.app.Activity}s in the system.  The result will be cached for
 * future calls.  Passing {@code false} to {@code useCachedValue} will force it to re-query any
 * {@link android.app.Activity}s that can process the {@link Intent}.
 * @param context        The {@link Context} to use to check to see if the {@link Intent} will
 *                       be handled.
 * @param useCachedValue Whether or not to use the cached value from a previous result.
 * @return {@code true} if recognition is supported.  {@code false} otherwise.
 */
public static boolean isRecognitionIntentPresent(Context context, boolean useCachedValue) {
    ThreadUtils.assertOnUiThread();
    if (sHasRecognitionIntentHandler == null || !useCachedValue) {
        PackageManager pm = context.getPackageManager();
        List<ResolveInfo> activities = pm.queryIntentActivities(
                new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
        sHasRecognitionIntentHandler = activities.size() > 0;
    }

    return sHasRecognitionIntentHandler;
}
 
Example #29
Source File: Simulation.java    From xDrip with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Receiving speech input
 */
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);

    switch (requestCode) {
        case REQ_CODE_SPEECH_INPUT: {
            if (resultCode == RESULT_OK && null != data) {
                ArrayList<String> result = data
                        .getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
                String results = result.get(0);
                if ((results != null) && (results.length() > 1)) {
                    if (mTextView != null) {
                        mTextView.setText(results);
                        mTextView.setVisibility(View.VISIBLE);
                    }
                    //TODO add speech recognition using the initiallexicon.txt used in Home.initializeSearchWords for Home.classifyWord called by naturalLanguageRecognition()
                    SendData(this, WEARABLE_VOICE_PAYLOAD, results.getBytes(StandardCharsets.UTF_8));
                }
                //   last_speech_time = JoH.ts();
                //  naturalLanguageRecognition(result.get(0));
            }
            recognitionRunning = false;
            break;
        }

    }
}
 
Example #30
Source File: PackageUtils.java    From FloatingSearchView with Apache License 2.0 5 votes vote down vote up
static public void startTextToSpeech(Activity context, String prompt, int requestCode) {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    try {
        context.startActivityForResult(intent, requestCode);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(context, context.getString(R.string.speech_not_supported),
                Toast.LENGTH_SHORT).show();
    }
}