0

The title it's not clear i think. In my project i want a service that runs in background and when the user says "hello phone" or some word/phrase my app starts to recognize the voice. Actually it "works" but not in right way... I have a service and this service detect the voice.

public class SpeechActivationService extends Service
    {
protected AudioManager mAudioManager; 
protected SpeechRecognizer mSpeechRecognizer;
protected Intent mSpeechRecognizerIntent;
protected final Messenger mServerMessenger = new Messenger(new IncomingHandler(this));
protected boolean mIsListening;
protected volatile boolean mIsCountDownOn;

static String TAG = "Icaro";

static final int MSG_RECOGNIZER_START_LISTENING = 1;
static final int MSG_RECOGNIZER_CANCEL = 2;

private int mBindFlag;
private Messenger mServiceMessenger;

@Override
public void onCreate()
{
    super.onCreate();
    mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); 
    mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
    mSpeechRecognizer.setRecognitionListener(new SpeechRecognitionListener());
    mSpeechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                                     RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
                                     this.getPackageName());

    //mSpeechRecognizer.startListening(mSpeechRecognizerIntent);
}

protected static class IncomingHandler extends Handler
{
    private WeakReference<SpeechActivationService> mtarget;

    IncomingHandler(SpeechActivationService target)
    {
        mtarget = new WeakReference<SpeechActivationService>(target);
    }


    @Override
    public void handleMessage(Message msg)
    {
        final SpeechActivationService target = mtarget.get();

        switch (msg.what)
        {
            case MSG_RECOGNIZER_START_LISTENING:

                if (Build.VERSION.SDK_INT >= 16);//Build.VERSION_CODES.JELLY_BEAN)
                {
                    // turn off beep sound  
                    target.mAudioManager.setStreamMute(AudioManager.STREAM_SYSTEM, true);
                }
                 if (!target.mIsListening)
                 {
                     target.mSpeechRecognizer.startListening(target.mSpeechRecognizerIntent);
                     target.mIsListening = true;
                    Log.d(TAG, "message start listening"); //$NON-NLS-1$
                 }
                 break;

             case MSG_RECOGNIZER_CANCEL:
                  target.mSpeechRecognizer.cancel();
                  target.mIsListening = false;
                  Log.d(TAG, "message canceled recognizer"); //$NON-NLS-1$
                  break;
         }
   } 
} 

// Count down timer for Jelly Bean work around
protected CountDownTimer mNoSpeechCountDown = new CountDownTimer(5000, 5000)
{

    @Override
    public void onTick(long millisUntilFinished)
    {
        // TODO Auto-generated method stub

    }

    @Override
    public void onFinish()
    {
        mIsCountDownOn = false;
        Message message = Message.obtain(null, MSG_RECOGNIZER_CANCEL);
        try
        {
            mServerMessenger.send(message);
            message = Message.obtain(null, MSG_RECOGNIZER_START_LISTENING);
            mServerMessenger.send(message);
        }
        catch (RemoteException e)
        {

        }
    }
};

@Override
public int onStartCommand (Intent intent, int flags, int startId) 
{
    //mSpeechRecognizer.startListening(mSpeechRecognizerIntent);
    try
    {
        Message msg = new Message();
        msg.what = MSG_RECOGNIZER_START_LISTENING; 
        mServerMessenger.send(msg);
    }
    catch (RemoteException e)
    {

    }
    return  START_NOT_STICKY;
}

@Override
public void onDestroy()
{
    super.onDestroy();

    if (mIsCountDownOn)
    {
        mNoSpeechCountDown.cancel();
    }
    if (mSpeechRecognizer != null)
    {
        mSpeechRecognizer.destroy();
    }
}

protected class SpeechRecognitionListener implements RecognitionListener
{

    @Override
    public void onBeginningOfSpeech()
    {
        // speech input will be processed, so there is no need for count down anymore
        if (mIsCountDownOn)
        {
            mIsCountDownOn = false;
            mNoSpeechCountDown.cancel();
        }               
        Log.d(TAG, "onBeginingOfSpeech"); //$NON-NLS-1$
    }

    @Override
    public void onBufferReceived(byte[] buffer)
    {
        String sTest = "";
    }

    @Override
    public void onEndOfSpeech()
    {
        Log.d("TESTING: SPEECH SERVICE", "onEndOfSpeech"); //$NON-NLS-1$
     }

    @Override
    public void onError(int error)
    {
        if (mIsCountDownOn)
        {
            mIsCountDownOn = false;
            mNoSpeechCountDown.cancel();
        }
         Message message = Message.obtain(null, MSG_RECOGNIZER_START_LISTENING);
         try
         {
             mIsListening = false;   
             mServerMessenger.send(message);
         }
         catch (RemoteException e)
         {

         }
         Log.d(TAG, "error = " + error); //$NON-NLS-1$
    }

    @Override
    public void onEvent(int eventType, Bundle params)
    {

    }

    @Override
    public void onPartialResults(Bundle partialResults)
    {

    }

    @Override
    public void onReadyForSpeech(Bundle params)
    {
        if (Build.VERSION.SDK_INT >= 16);//Build.VERSION_CODES.JELLY_BEAN)
        {
            mIsCountDownOn = true;
            mNoSpeechCountDown.start();
            mAudioManager.setStreamMute(AudioManager.STREAM_SYSTEM, false);
        }
        Log.d("TESTING: SPEECH SERVICE", "onReadyForSpeech"); //$NON-NLS-1$
    }

    @Override
    public void onResults(Bundle results)
    {
        ArrayList<String> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
        Log.d(TAG, (String) data.get(0));

        //mSpeechRecognizer.startListening(mSpeechRecognizerIntent);

        mIsListening = false;
        Message message = Message.obtain(null, MSG_RECOGNIZER_START_LISTENING);
        try
        {
               mServerMessenger.send(message);
        }
        catch (RemoteException e)
        {

        }

        Log.d(TAG, "onResults"); //$NON-NLS-1$
    }

    @Override
    public void onRmsChanged(float rmsdB)
    {

    }
}

@Override
public IBinder onBind(Intent arg0) {
    // TODO Auto-generated method stub
    return null;
}
}

And i start service in my MainActivity just to try:

Intent i = new Intent(context, SpeechActivationService.class);
startService(i);

It detect the voice input...and TOO MUCH!!! Every time it detects something it's a "bipbip". Too many bips!! It's frustrating.. I only want that it starts when i say "hello phone" or "start" or a specific word!! I try to look at this https://github.com/gast-lib/gast-lib/blob/master/library/src/root/gast/speech/activation/WordActivator.java but really i don't know how use this library. I try see this question onCreate of android service not called but i not understand exactly what i have to do.. Anyway, i already import the gast library.. I only need to know how use it. Anyone can help me step by step? Thanks

Community
  • 1
  • 1
Atlas91
  • 5,754
  • 17
  • 69
  • 141

1 Answers1

0

Use setStreamSolo(AudioManager.STREAM_VOICE_CALL, true) instead of setStreamMute. Remember to add setStreamSolo(AudioManager.STREAM_VOICE_CALL, false) in case MSG_RECOGNIZER_CANCEL