0
대화없이 음성 인식을 프로그래밍하려고합니다. SpeechRecognizer.startListening(recognizerIntent)
의 첫 번째 호출에 대해서는 모든 것이 잘 작동하며 인식 된 음성 문자열로 결과를 얻습니다. 그러나 두 번째 인식을 시작하려면 인식 시작 음만 재생하고 말하기를 중단하면 인식 종료 음을 들으십시오. onResults()
, onRmsChanged
또는 onError
등과 같은 콜백 메소드는 호출되지 않습니다.SpeechRecognizer는 사운드를 재생하지만 startListening()을 호출 할 때 결과를 제공하지 않습니다.
활동 :
package com.example.plucinst.emojat;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.Switch;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Created by thomasplucinski on 08/04/2017.
*/
public class StartActivity extends AppCompatActivity implements RecognitionListener {
private Button btnGoToChat;
private ImageButton btnSpeechControl;
private LinearLayout speechControlContainer;
private Switch switchMode;
private Activity context;
private String firstMatchText;
private SpeechRecognizer speechRecognizer = null;
private ProgressBar progressBar;
private Intent recognizerIntent;
private String messageText = "";
private GlobalSetting globalSetting;
private ArrayList<String> matchesText;
private List<String> sendStringArray;
private String recognizedContactName = "Gollum"; //For the prototyp the contact isn't recognized via speech but set as static value
private static final int REQUEST_CODE_SPEECH = 1234;
private static final int REQUEST_CODE_DETECTION = 0000;
private static final String LOG_TAG = "START_ACTIVITY";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = this;
globalSetting = GlobalSetting.getInstance();
sendStringArray = Arrays.asList(getResources().getStringArray(R.array.send));
setContentView(R.layout.activity_start);
PreferenceManager.setDefaultValues(this, R.xml.preferences_app_settings, false);
SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(this);
//Intent intent = new Intent(this, InboxActivity.class);
//startActivity(intent);
//
initUI();
// Init the speechRecognition
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
speechRecognizer.setRecognitionListener(this);
recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "de-DE");
recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName());
recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 3);
}
@Override
protected void onResume(){
super.onResume();
if (globalSetting.speechRecognitionActive){
startSpeechRecognition();
}
}
/**
* Checks if device ist conntected to the internet
* @return
*/
public boolean isConnected()
{
ConnectivityManager cm = (ConnectivityManager) getSystemService(context.CONNECTIVITY_SERVICE);
NetworkInfo net = cm.getActiveNetworkInfo();
if (net!=null && net.isAvailable() && net.isConnected()) {
return true;
} else {
return false;
}
}
/**
* Initializes the UI elements and the listeners
*/
private void initUI(){
btnGoToChat = (CustomButton) findViewById(R.id.btn_go_to_chat);
btnSpeechControl = (ImageButton) findViewById(R.id.btn_speech_control);
switchMode = (Switch) findViewById(R.id.switch_app_mode);
progressBar = (ProgressBar) findViewById(R.id.speech_progress_bar);
progressBar.setIndeterminate(false);
speechControlContainer = (LinearLayout) findViewById(R.id.speech_control_container);
if (btnGoToChat!=null){
btnGoToChat.setOnClickListener(new Button.OnClickListener(){
@Override
public void onClick(View v) {
goToInbox();
}
});
}
if (btnSpeechControl!=null){
btnSpeechControl.setOnClickListener(new Button.OnClickListener(){
@Override
public void onClick(View v) {
if(isConnected()){
startSpeechRecognition();
globalSetting.speechRecognitionActive = true;
}
else{
Toast.makeText(getApplicationContext(), "Please Connect to Internet", Toast.LENGTH_LONG).show();
globalSetting.speechRecognitionActive = false;
}
//goToInbox();
}
});
}
switchMode.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
// do something, the isChecked will be
// true if the switch is in the On position
if (isChecked){
speechControlContainer.setVisibility(View.VISIBLE);
btnGoToChat.setVisibility(View.GONE);
}else{
speechControlContainer.setVisibility(View.GONE);
btnGoToChat.setVisibility(View.VISIBLE);
}
}
});
}
private void startSpeechRecognition(){
/*
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
startActivityForResult(intent, REQUEST_CODE_SPEECH);
*/
speechRecognizer.stopListening();
Log.d(LOG_TAG, " call startSpeechRecognition");
speechRecognizer.startListening(recognizerIntent);
}
@Override
protected void onPause() {
super.onPause();
if (speechRecognizer != null) {
speechRecognizer.destroy();
Log.i(LOG_TAG, "destroy");
}
}
@Override
public void onBeginningOfSpeech() {
Log.i(LOG_TAG, "onBeginningOfSpeech");
progressBar.setIndeterminate(false);
progressBar.setMax(10);
}
@Override
public void onBufferReceived(byte[] buffer) {
Log.i(LOG_TAG, "onBufferReceived: " + buffer);
}
@Override
public void onEndOfSpeech() {
Log.i(LOG_TAG, "onEndOfSpeech");
progressBar.setIndeterminate(true);
//toggleButton.setChecked(false);
}
@Override
public void onError(int errorCode) {
String errorMessage = getErrorText(errorCode);
Log.d(LOG_TAG, "FAILED " + errorMessage);
//returnedText.setText(errorMessage);
//toggleButton.setChecked(false);
}
@Override
public void onEvent(int arg0, Bundle arg1) {
Log.i(LOG_TAG, "onEvent");
}
@Override
public void onPartialResults(Bundle arg0) {
Log.i(LOG_TAG, "onPartialResults");
}
@Override
public void onReadyForSpeech(Bundle arg0) {
Log.i(LOG_TAG, "onReadyForSpeech");
}
@Override
public void onResults(Bundle results) {
Log.i(LOG_TAG, "onResults");
progressBar.setIndeterminate(true);
matchesText = results
.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
processSpeechResults();
//returnedText.setText(text);
}
@Override
public void onRmsChanged(float rmsdB) {
Log.i(LOG_TAG, "onRmsChanged: " + rmsdB);
progressBar.setProgress((int) rmsdB);
}
public static String getErrorText(int errorCode) {
String message;
switch (errorCode) {
case SpeechRecognizer.ERROR_AUDIO:
message = "Audio recording error";
break;
case SpeechRecognizer.ERROR_CLIENT:
message = "Client side error";
break;
case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
message = "Insufficient permissions";
break;
case SpeechRecognizer.ERROR_NETWORK:
message = "Network error";
break;
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
message = "Network timeout";
break;
case SpeechRecognizer.ERROR_NO_MATCH:
message = "No match";
break;
case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
message = "RecognitionService busy";
break;
case SpeechRecognizer.ERROR_SERVER:
message = "error from server";
break;
case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
message = "No speech input";
break;
default:
message = "Didn't understand, please try again.";
break;
}
return message;
}
private void processSpeechResults(){
firstMatchText = matchesText.get(0); //because recognizerIntent returns a list of proposed matches to the recognized text and we only need the first
Log.d("STARTACTIVITY", "recognized text: " + firstMatchText);
//Do sth.
}
}
사람이 왜 생각이 있습니까?
미리 감사드립니다.