speech recognition intent not opening on button click - java

I am working on a simple assistant app intially by hardcoding what it replies. It has a button which on clicked shows up the voice recognizer intent. but it is not showing up now on button click. I have attached my code here please help me to find the mistake causing the error.
also please help me how to invoke the speech recognizer without tapping on the button, by just saying some specified word as in 'Ok google'.
MainActivity.java
package com.example.rv00485448.neha1;
import android.content.Intent;
import android.os.Build;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.speech.tts.TextToSpeech;
import android.speech.tts.UtteranceProgressListener;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import java.util.ArrayList;
import java.util.Locale;
import static android.speech.RecognizerIntent.ACTION_RECOGNIZE_SPEECH;
public class MainActivity extends Activity{
private TextToSpeech tts;
private ArrayList<String> questions;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
findViewById(R.id.microphoneButton).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
listen();
}
});
loadQuestions();
tts = new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
int result = tts.setLanguage(Locale.US);
if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) {
Log.e("TTS", "This Language is not supported");
}
speak("Hello");
} else {
Log.e("TTS", "Initilization Failed!");
}
}
});
}
private void loadQuestions(){
questions = new ArrayList<>();
questions.clear();
questions.add("hi how are you");
questions.add("I am good. how are you feeling today?");
questions.add("Do you have vitals readings?");
questions.add("you seem to have fever. Do you want me to book an appointment with doctor nandan ");
questions.add("I have booked an appointment with doctor nandan at 5 PM");
questions.add("Thank you too");
}
private void listen(){
Intent i = new Intent(ACTION_RECOGNIZE_SPEECH);
i.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
i.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
i.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say something");
// speak("I am listening to you");
// try {
// startActivityForResult(i, 100);
// } catch (ActivityNotFoundException a) {
// Toast.makeText(MainActivity.this, "Your device doesn't support Speech Recognition", Toast.LENGTH_SHORT).show();
// }
}
#Override
public void onDestroy() {
if (tts != null) {
tts.stop();
tts.shutdown();
}
super.onDestroy();
}
private void speak(String text){
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
tts.speak(text, TextToSpeech.QUEUE_FLUSH, null, null);
}else{
tts.speak(text, TextToSpeech.QUEUE_FLUSH, null);
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(requestCode == 100){
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> res = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
String inSpeech = res.get(0);
recognition(inSpeech);
}
}
}
private void recognition(String text){
switch (text)
{
case "hello":
{
speak(questions.get(0));
break;
}
case "fine how about you":
{
speak(questions.get(1));
break;
}
case "feeling dizzy":
{
speak(questions.get(2));
break;
}
case "yeah":
{
speak(questions.get(3));
break;
}
case "yes":
{
speak(questions.get(4));
break;
}
case "thank you":
{
speak(questions.get(5));
break;
}
}
}
}

you are not triggering that activity.
use startActivityForResult(i) inside listen method.

You need to add INTERNET permission in your manifest file

Related

How to make my virtual assistant app run always in the background? (android studio)

I created a virtual assistant app in Android Studio and it working fine until I exit the app window and the process stops. I want to make the app run in the background always so when it get's the wake word anytime it will respond. I tried using a Service but I couldn't make it work.
Can you help me please?
This is my code:
package com.eylon.jarvis;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ToggleButton;
import java.util.ArrayList;
import java.util.Locale;
import ai.picovoice.porcupine.Porcupine;
import ai.picovoice.porcupine.PorcupineActivationException;
import ai.picovoice.porcupine.PorcupineActivationLimitException;
import ai.picovoice.porcupine.PorcupineActivationRefusedException;
import ai.picovoice.porcupine.PorcupineActivationThrottledException;
import ai.picovoice.porcupine.PorcupineException;
import ai.picovoice.porcupine.PorcupineInvalidArgumentException;
import ai.picovoice.porcupine.PorcupineManager;
import ai.picovoice.porcupine.PorcupineManagerCallback;
enum AppState {
STOPPED,
WAKEWORD,
STT
}
public class MainActivity extends AppCompatActivity {
private static final String ACCESS_KEY = "Oc8ZOSkVtJHWKhVW3iGMedHDSCSXn6P4vQtrQBl8hNLXwLmxLhs2AA==";
private PorcupineManager porcupineManager = null;
TextView textView;
ToggleButton button;
private SpeechRecognizer speechRecognizer;
private Intent speechRecognizerIntent;
private AppState currentState;
private void displayError(String message) {
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
private final PorcupineManagerCallback porcupineManagerCallback = new PorcupineManagerCallback() {
#Override
public void invoke(int keywordIndex) {
runOnUiThread(() -> {
textView.setText("");
try {
// need to stop porcupine manager before speechRecognizer can start listening.
porcupineManager.stop();
} catch (PorcupineException e) {
displayError("Failed to stop Porcupine.");
return;
}
speechRecognizer.startListening(speechRecognizerIntent);
currentState = AppState.STT;
});
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textView = findViewById(R.id.text1);
button = findViewById(R.id.button1);
if (!SpeechRecognizer.isRecognitionAvailable(this)) {
displayError("Speech Recognition not available.");
}
// Creating the Intent of the Google speech to text and adding extra variables.
speechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "en-US");
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US");
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_ONLY_RETURN_LANGUAGE_PREFERENCE, "en-US");
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Start speaking");
try {
porcupineManager = new PorcupineManager.Builder()
.setAccessKey(ACCESS_KEY)
.setKeyword(Porcupine.BuiltInKeyword.JARVIS)
.setSensitivity(0.7f)
.build(getApplicationContext(), porcupineManagerCallback);
} catch (PorcupineInvalidArgumentException e) {
onPorcupineInitError(
String.format("%s\nEnsure your accessKey '%s' is a valid access key.", e.getMessage(), ACCESS_KEY)
);
} catch (PorcupineActivationException e) {
onPorcupineInitError("AccessKey activation error");
} catch (PorcupineActivationLimitException e) {
onPorcupineInitError("AccessKey reached its device limit");
} catch (PorcupineActivationRefusedException e) {
onPorcupineInitError("AccessKey refused");
} catch (PorcupineActivationThrottledException e) {
onPorcupineInitError("AccessKey has been throttled");
} catch (PorcupineException e) {
onPorcupineInitError("Failed to initialize Porcupine " + e.getMessage());
}
currentState = AppState.STOPPED;
}
private void onPorcupineInitError(final String errorMessage) {
runOnUiThread(() -> {
TextView errorText = findViewById(R.id.text1);
errorText.setText(errorMessage);
ToggleButton recordButton = findViewById(R.id.button1);
recordButton.setChecked(false);
recordButton.setEnabled(false);
});
}
#Override
protected void onStop() {
if (button.isChecked()) {
stopService();
button.toggle();
speechRecognizer.destroy();
}
super.onStop();
}
private boolean hasRecordPermission() {
return ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO)
== PackageManager.PERMISSION_GRANTED;
}
private void requestRecordPermission() {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO},
0);
}
#SuppressLint("SetTextI18n")
private void playback(int milliSeconds) {
speechRecognizer.stopListening();
currentState = AppState.WAKEWORD;
new Handler(Looper.getMainLooper()).postDelayed(() -> {
if (currentState == AppState.WAKEWORD) {
porcupineManager.start();
textView.setText("Listening for " + Porcupine.BuiltInKeyword.JARVIS + " ...");
}
}, milliSeconds);
}
private void stopService() {
if (porcupineManager != null) {
try {
porcupineManager.stop();
} catch (PorcupineException e) {
displayError("Failed to stop porcupine.");
}
}
textView.setText("");
speechRecognizer.stopListening();
speechRecognizer.destroy();
currentState = AppState.STOPPED;
}
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String[] permissions,
#NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (grantResults.length == 0 || grantResults[0] == PackageManager.PERMISSION_DENIED) {
displayError("Microphone permission is required for this app!");
requestRecordPermission();
} else {
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
speechRecognizer.setRecognitionListener(new SpeechListener());
playback(0);
}
}
public void process(View view) {
if (button.isChecked()) {
if (hasRecordPermission()) {
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
speechRecognizer.setRecognitionListener(new SpeechListener());
playback(0);
} else {
requestRecordPermission();
}
} else {
stopService();
}
}
private class SpeechListener implements RecognitionListener {
#Override
public void onReadyForSpeech(Bundle params) {
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onRmsChanged(float rmsdB) {
}
#Override
public void onBufferReceived(byte[] buffer) {
}
#Override
public void onEndOfSpeech() {
}
#SuppressLint("SwitchIntDef")
#Override
public void onError(int error) {
switch (error) {
case SpeechRecognizer.ERROR_AUDIO:
displayError("Error recording audio.");
break;
case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
displayError("Insufficient permissions.");
break;
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
case SpeechRecognizer.ERROR_NETWORK:
displayError("Network Error.");
break;
case SpeechRecognizer.ERROR_NO_MATCH:
if (button.isChecked()) {
displayError("No recognition result matched.");
playback(1000);
}
case SpeechRecognizer.ERROR_CLIENT:
return;
case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
displayError("Recognition service is busy.");
break;
case SpeechRecognizer.ERROR_SERVER:
displayError("Server Error.");
break;
case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
displayError("No speech input.");
break;
default:
displayError("Something wrong occurred.");
}
stopService();
button.toggle();
}
#Override
public void onResults(Bundle results) {
ArrayList<String> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
audioResponseSelecting(data.get(0).toLowerCase(Locale.ROOT));
textView.setText(data.get(0));
playback(3000);
}
#Override
public void onPartialResults(Bundle partialResults) {
ArrayList<String> data = partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
audioResponseSelecting(data.get(0).toLowerCase(Locale.ROOT));
textView.setText(data.get(0));
}
#Override
public void onEvent(int eventType, Bundle params) {
}
}
// The response selecting function.
public void audioResponseSelecting(String transcript)
{
if (transcript.equals(("Good morning").toLowerCase(Locale.ROOT)))
{
executeResponse(R.raw.good_morning);
}
else if (transcript.equals(("Who is your creator").toLowerCase(Locale.ROOT)))
{
executeResponse(R.raw.creator);
}
}
// The audio file response execution function.
public void executeResponse(final int audio)
{
MediaPlayer response = MediaPlayer.create(MainActivity.this, audio);
response.start();
}
}
By design, all SpeechRecognizer's methods "must be invoked only from the main application thread."
Main application thread is also referred to as "UI thread".
This means that SpeechRecognizer cannot run in a service.

I am working on OCR reader application. below is my java code. My question is how to start another method if one is completed?

When the user captures the image, the device will automatically read aloud the text. for that i have implemented texttospeech method but when this activity stop i want to start another task. how will it can be done?
After reading the captured text, I want to implement another text-to-speech method how can i do this?
package com.example.software2.ocrhy;
import static android.Manifest.permission.CAMERA;
import android.Manifest;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.speech.RecognizerIntent;
import android.speech.tts.TextToSpeech;
import android.util.SparseArray;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.Detector;
import com.google.android.gms.vision.text.TextBlock;
import com.google.android.gms.vision.text.TextRecognizer;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Locale;
public class MainActivity2 extends AppCompatActivity {
private static final int REQUEST_SPEECH = 101;
private static final int REQ_CODE_SPEECH_INPUT = 100;
Button buttonCamera;
private Button button;
private TextView mVoiceInputTv;
private TextView textView;
private SurfaceView surfaceView;
private CameraSource cameraSource;
private TextRecognizer textRecognizer;
private static TextToSpeech textToSpeech;
private String stringResult = null;
#RequiresApi(api = Build.VERSION_CODES.N)
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
mVoiceInputTv = (TextView) findViewById(R.id.textView);
textView = (TextView) findViewById(R.id.textView);
getWindow().getDecorView().setBackgroundColor(Color.WHITE);
ActivityCompat.requestPermissions(this, new String[]{CAMERA}, PackageManager.PERMISSION_GRANTED);
textToSpeech = new TextToSpeech(this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
if (status != TextToSpeech.ERROR) {
textToSpeech.setLanguage(Locale.CANADA);
textToSpeech.setSpeechRate(1f);
Toast.makeText(MainActivity2.this, "tap on the screen and say yes for read and no for return to the main menu", Toast.LENGTH_SHORT).show();
textToSpeech.speak("tap on the screen and say yes for read and no for return to the main menu", TextToSpeech.QUEUE_ADD, null);
}
}
});
}
private void textRecognizer() {
Toast.makeText(MainActivity2.this, "Tap on the screen and listen ", Toast.LENGTH_SHORT).show();
textToSpeech.speak(" Tap on the screen take a picture of any text with your device and listen", TextToSpeech.QUEUE_FLUSH, null);
textRecognizer = new TextRecognizer.Builder(getApplicationContext()).build();
cameraSource = new CameraSource.Builder(getApplicationContext(), textRecognizer)
.setRequestedPreviewSize(1280, 1024)
.setAutoFocusEnabled(true)
.build();
surfaceView = findViewById(R.id.surfaceView);
Context context = this;
surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
cameraSource.start(surfaceView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop();
}
});
}
private void capture() {
textRecognizer.setProcessor(new Detector.Processor<TextBlock>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
SparseArray<TextBlock> sparseArray = detections.getDetectedItems();
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < sparseArray.size(); ++i) {
TextBlock textBlock = sparseArray.valueAt(i);
if (textBlock != null && textBlock.getValue() != null) {
stringBuilder.append(textBlock.getValue() + " ");
}
}
final String stringText = stringBuilder.toString();
Handler handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
#Override
public void run() {
stringResult = stringText;
resultObtained();
}
});
}
});
}
private void resultObtained() {
setContentView(R.layout.activity_main2);
textView = findViewById(R.id.textView);
textView.setText(stringResult);
textToSpeech.speak(stringResult, TextToSpeech.QUEUE_FLUSH, null, null);
textView.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
startVoiceInput();
}
});
}
private void startVoiceInput() {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Hello, How can I help you?");
try {
startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
} catch (ActivityNotFoundException a) {
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case REQ_CODE_SPEECH_INPUT: {
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> result = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
mVoiceInputTv.setText(result.get(0));
}
if (mVoiceInputTv.getText().toString().contentEquals("time and date")) {
Intent intent = new Intent(getApplicationContext(), MainActivity4.class);
startActivity(intent);
}
if (mVoiceInputTv.getText().toString().contentEquals("battery")) {
Intent intent = new Intent(getApplicationContext(), MainActivity6.class);
startActivity(intent);
mVoiceInputTv.setText(null);
}
if (mVoiceInputTv.getText().toString().contentEquals("location")) {
Intent intent = new Intent(getApplicationContext(), MainActivity8.class);
startActivity(intent);
mVoiceInputTv.setText(null);
}
if (mVoiceInputTv.getText().toString().contentEquals("weather")) {
Intent intent = new Intent(getApplicationContext(), MainActivity5.class);
startActivity(intent);
mVoiceInputTv.setText(null);
} else {
textToSpeech.speak( "Do not understand just tap on the screen Say again", TextToSpeech.QUEUE_FLUSH, null);
}
if (mVoiceInputTv.getText().toString().contentEquals("calculator")) {
Intent intent = new Intent(getApplicationContext(), MainActivity3.class);
startActivity(intent);
mVoiceInputTv.setText(null);
}
else if(mVoiceInputTv.getText().toString().contentEquals("exit")) {
finish();
}
else {
textToSpeech.speak("Do not understand just tap on the screen Say again", TextToSpeech.QUEUE_FLUSH, null);
}
if (mVoiceInputTv.getText().toString().contentEquals("yes")) {
setContentView(R.layout.surface);
surfaceView = findViewById(R.id.surfaceView);
surfaceView.setOnClickListener((View v) -> {
capture();
});
textRecognizer();
mVoiceInputTv.setText(null);
} else if (mVoiceInputTv.getText().toString().contentEquals("no")) {
Intent intent = new Intent(getApplicationContext(), MainActivity.class);
startActivity(intent);
}
break;
}
}
}
public boolean onKeyDown(int keyCode, #Nullable KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_VOLUME_UP){
textToSpeech.speak("You are in main menu. just swipe right and say what you want", TextToSpeech.QUEUE_FLUSH, null);
Intent intent = new Intent(getApplicationContext(), MainActivity.class);
startActivity(intent);
final Handler handler = new Handler(Looper.getMainLooper());
handler.postDelayed(new Runnable() {
#Override
public void run() {
textToSpeech.speak("you are in main menu. just swipe right and say what you want", TextToSpeech.QUEUE_FLUSH, null);
}
},1000);
}
return true;
}
public void buttonStart(View view) {
startVoiceInput();
}
public void onPause() {
if (textToSpeech != null) {
textToSpeech.stop();
}
super.onPause();
}
}
just use the same mode on text to speech ADD and it will play when the first one is done, ADD = ADD, FLUSH = reset
textToSpeech.speak("this will play when first is done",
TextToSpeech.QUEUE_ADD, null);
You have to call startVoiceInput() again.
You are setting this call for an onClick() handler on a TextView already.

launch another application from my own app

So I'm writing a voice assistant app for my final in Intro to Android, and I got most of it to work however I would like the intent for Instagram to open the app not the website. (That would be good enough for now)
I have tried several solutions from here to no avail.
Ideally, I would like for it to get a list of installed apps and throw those in an array that just responds to the apps name
I have the code is as follows, written in Java.
Any help will be appreciated.
package com.example.kako;
import android.content.Intent;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.speech.tts.TextToSpeech;
import android.text.format.DateUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import com.google.android.material.floatingactionbutton.FloatingActionButton;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import static android.text.format.DateUtils.FORMAT_SHOW_TIME;
import static android.text.format.DateUtils.formatDateTime;
public class MainActivity extends AppCompatActivity {
//Requesting run-time permissions
//Create placeholder for user's consent to record_audio permission.
//This will be used in handling callback
private final int MY_PERMISSIONS_RECORD_AUDIO = 1;
private void requestAudioPermissions() {
if (ContextCompat.checkSelfPermission(this,
Manifest.permission.RECORD_AUDIO)
!= PackageManager.PERMISSION_GRANTED) {
//When permission is not granted by user, show them message why this permission is needed.
if (ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.RECORD_AUDIO)) {
Toast.makeText(this, "Please grant permissions to record audio", Toast.LENGTH_LONG).show();
//Give user option to still opt-in the permissions
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.RECORD_AUDIO},
MY_PERMISSIONS_RECORD_AUDIO);
} else {
// Show user dialog to grant permission to record audio
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.RECORD_AUDIO},
MY_PERMISSIONS_RECORD_AUDIO);
}
}
//If permission is granted, then go ahead recording audio
else if (ContextCompat.checkSelfPermission(this,
Manifest.permission.RECORD_AUDIO)
== PackageManager.PERMISSION_GRANTED) {
//Go ahead with recording audio now
recordAudio();
}
}
private TextToSpeech myTTS;
private SpeechRecognizer mySpeechRecognizer;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
FloatingActionButton fab = findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
mySpeechRecognizer.startListening(intent);
}
});
initializeTextToSpeech();
initializeSpeechRecognizer();
}
private void initializeSpeechRecognizer() {
if(SpeechRecognizer.isRecognitionAvailable(this)){
mySpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
mySpeechRecognizer.setRecognitionListener(new RecognitionListener() {
#Override
public void onReadyForSpeech(Bundle params) {
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onRmsChanged(float rmsdB) {
}
#Override
public void onBufferReceived(byte[] buffer) {
}
#Override
public void onEndOfSpeech() {
}
#Override
public void onError(int error) {
}
#Override
public void onResults(Bundle bundle) {
List<String> results = bundle.getStringArrayList(
SpeechRecognizer.RESULTS_RECOGNITION
);
processResult(Objects.requireNonNull(results).get(0));
}
#Override
public void onPartialResults(Bundle partialResults) {
}
#Override
public void onEvent(int eventType, Bundle params) {
}
});
}
}
private void processResult(String command) {
command = command.toLowerCase();
//what is your name?
//what is the time?
//open a browser
//private browser
//youtube
//applications
if(command.indexOf("what") != -1){
if(command.indexOf("your name") != -1) {
speak("Hi! I'm KayKo!");
}
if(command.indexOf("time") != -1) {
Date now = new Date();
String time = formatDateTime(this, now.getTime(),
FORMAT_SHOW_TIME);
speak("the time is " + time);
}
if(command.indexOf("date") != -1) {
Date now = new Date();
String date = formatDateTime(this, now.getTime(),
DateUtils.FORMAT_SHOW_DATE);
speak("the Date is " + date);
}
}
else if ( command.indexOf( "open" ) != -1 ) {
if ( command.indexOf( "browser" ) != -1 ) {
Intent intent;
intent = new Intent( Intent.ACTION_VIEW,
Uri.parse( "https://www.google.com/" ) );
startActivity( intent );
}
}
else if (command.indexOf("private") != -1) {
if (command.indexOf("browser") != -1) {
Intent intent;
intent = new Intent( Intent.ACTION_VIEW,
Uri.parse( "https://duckduckgo.com/" ) );
startActivity( intent );
}
}else if (command.indexOf("youtube") != -1) {
Intent intent;
intent = new Intent( Intent.ACTION_VIEW,
Uri.parse( "https://youtube.com/") );
startActivity( intent );
}else if (command.indexOf("instagram") != -1) {
Intent intent;
intent = new Intent( Intent.ACTION_VIEW,
Uri.parse( "https://www.instagram.com/artsyphotosllc/") );
startActivity( intent );
}
}
private void initializeTextToSpeech() {
myTTS = new TextToSpeech(this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int i) {
if(myTTS.getEngines().size() ==0){
Toast.makeText(MainActivity.this,"There is no TTS engine installed on this device"
, Toast.LENGTH_LONG).show();
finish();
} else{
myTTS.setLanguage(Locale.getDefault());
speak("Ready");
}
}
});
}
private void speak(String message){
if(Build.VERSION.SDK_INT >= 21){
myTTS.speak(message, TextToSpeech.QUEUE_FLUSH, null, null);
}else {
myTTS.speak(message, TextToSpeech.QUEUE_FLUSH, null);
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
#Override
protected void onPause() {
super.onPause();
myTTS.shutdown();
}
}
PackageManager pm = getPackageManager();
//apps package names
String instagram = "com.instagram.android",
youtube = "com.google.android.youtube",
facebook = "com.facebook.katana",
whatsapp = "com.whatsapp";
//other apps package names
// can be found in url of app in play store in the browser
//ex: https://play.google.com/store/apps/details?id=***com.whatsapp***&hl=en
//launch the app
Intent appIntent = pm.getLaunchIntentForPackage(instagram);//change app package name
if(appIntent != null)
startActivity(appIntent);
else {
//App not installed !
}

Bluetooth Callback funtion onCharacteristicRead is not making Intent call to another activity Working with <API21. Not in Marshmallow

I am trying to make an intent call from Bluetooth Low Energy Callback function onCharacteristicRead(). Please suggest me where i am wrong .
I am not sure if it is related to the context which we give to the intent class or some thing else.
It is showing me below error :
"Unable to start activity ComponentInfo::java.lang.NullPointerException: Attempt to invoke virtual method 'void android.widget.TextView.setText(java.lang.CharSequence)' on a null object reference"
Below is the code sample :
package com.example.pushkara.msable;
import android.Manifest;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.AlertDialog;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCallback;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattService;
import android.bluetooth.BluetoothManager;
import android.bluetooth.BluetoothProfile;
import android.bluetooth.le.BluetoothLeScanner;
import android.bluetooth.le.ScanFilter;
import android.bluetooth.le.ScanSettings;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.ParcelUuid;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.logging.ConsoleHandler;
#TargetApi(18)
public class MainActivity extends AppCompatActivity {
private BluetoothAdapter mBluetoothAdapter;
private int REQUEST_ENABLE_BT = 1;
private Handler mHandler;
private static final long SCAN_PERIOD = 5000;
private BluetoothLeScanner mLEScanner;
private ScanSettings settings;
private List<ScanFilter> filters;
private BluetoothGatt mGatt;
private MsaScanCallback mScanCallback;
private Button mCheckInButton;
private Button mCheckOutButton;
private Button mNewButton;
private Boolean buttonvalue = false;
public static TextView mSecurityCode;
public static TextView rssiNdistance;
public final static UUID UUID_MSA_APP = UUID.fromString("13333333-3333-3333-3333-333333333337");
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mCheckInButton = (Button)findViewById(R.id.checkIn);
mCheckOutButton = (Button)findViewById(R.id.checkOut);
mSecurityCode= (TextView)findViewById(R.id.securityCode);
rssiNdistance= (TextView)findViewById(R.id.rssiNdistance);
mCheckOutButton.setOnClickListener(new MsaCheckInCheckout(false));
mCheckInButton.setOnClickListener(new MsaCheckInCheckout(true));
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (this.checkSelfPermission(Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("This app needs location access");
builder.setMessage("Please grant location access so this app can detect msa server.");
builder.setPositiveButton(android.R.string.ok, null);
builder.setOnDismissListener(new DialogInterface.OnDismissListener() {
#TargetApi(Build.VERSION_CODES.M)
#Override
public void onDismiss(DialogInterface dialog) {
requestPermissions(new String[]{Manifest.permission.ACCESS_COARSE_LOCATION}, 1);
}
});
builder.show();
}
}
mHandler = new Handler();
if (!getPackageManager().hasSystemFeature(PackageManager.FEATURE_BLUETOOTH_LE)) {
Toast.makeText(this, "BLE Not Supported",
Toast.LENGTH_SHORT).show();
finish();
}
final BluetoothManager bluetoothManager =
(BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
mBluetoothAdapter = bluetoothManager.getAdapter();
}
catch(Exception ex){
Log.i("msable"," [##MA]::::Exception 1" +ex.toString());
mSecurityCode.setText("[##MA]Exc1" + ex.toString());
}
}
private final ConnectDevice connectToDeviceCallback = new ConnectDevice() {
#Override
public void OnConnectToDevice(BluetoothDevice device){
connectToDevice(device);
}
};
public class MsaCheckInCheckout implements View.OnClickListener {
private boolean mCheckIn;
public MsaCheckInCheckout(boolean isCheckIn){
mCheckIn =isCheckIn;
}
#Override
public void onClick (View v) {
try {
// your code here;
Log.i("CLICK", mCheckIn + "");
if (mBluetoothAdapter == null || !mBluetoothAdapter.isEnabled()) {
Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT);
} else {
if (Build.VERSION.SDK_INT >= 21) {
if (mGatt != null) {
mGatt.close();
mGatt = null;
}
mScanCallback = new MsaScanCallback(connectToDeviceCallback);
mLEScanner = mBluetoothAdapter.getBluetoothLeScanner();
settings = new ScanSettings.Builder()
.setScanMode(ScanSettings.SCAN_MODE_LOW_LATENCY)
.build();
ScanFilter filter = new ScanFilter.Builder().setServiceUuid(new ParcelUuid(UUID_MSA_APP)).build();
filters = new ArrayList<>();
filters.add(filter);
}
scanLeDevice(true);
}
}
catch(Exception ex){
Log.i("msable","[##MA] ::::Exception 2" +ex.toString());
mSecurityCode.setText("[##MA]Ex2" + ex.toString());
}
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String permissions[], #NonNull int[] grantResults) {
try {
switch (requestCode) {
case 1: {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Log.d("1", "coarse location permission granted");
} else {
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Functionality limited");
builder.setMessage("Since location access has not been granted, this app will not be able to discover msa server");
builder.setPositiveButton(android.R.string.ok, null);
builder.setOnDismissListener(new DialogInterface.OnDismissListener() {
#Override
public void onDismiss(DialogInterface dialog) {
}
});
builder.show();
}
}
}
}
catch(Exception ex){
Log.i("msable","[##MA] ::::Exception 3" +ex.toString());
mSecurityCode.setText("[##MA]Ex3" + ex.toString());
}
}
#Override
protected void onResume() {
super.onResume();
}
#Override
protected void onPause() {
super.onPause();
if (mBluetoothAdapter != null && mBluetoothAdapter.isEnabled()) {
scanLeDevice(false);
}
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mGatt == null) {
return;
}
mGatt.close();
mGatt = null;
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
try {
if (requestCode == REQUEST_ENABLE_BT) {
if (resultCode == Activity.RESULT_CANCELED) {
//Bluetooth not enabled.
finish();
return;
}
}
super.onActivityResult(requestCode, resultCode, data);
}
catch(Exception ex){
Log.i("msable","[##MA] ::::Exception 4" +ex.toString());
mSecurityCode.setText("[##MA]Ex4" + ex.toString());
}
}
private void scanLeDevice(final boolean enable) {
try {
Log.i("BLE", "**** scanLeDevice");
if(mBluetoothAdapter != null || mLEScanner !=null) {
Log.i("BLE", "**** Object not null proceed to stop scan");
if (enable) {
mHandler.postDelayed(new Runnable() {
#Override
public void run() {
if (Build.VERSION.SDK_INT < 21) {
mBluetoothAdapter.stopLeScan(mLeScanCallback);
Log.i("BLE", "mBluetoothAdapter STOP SCAN");
} else {
Log.i("BLE", "mLEScanner STOP SCAN");
if(mLEScanner !=null) {
Log.i("BLE", "mLEScanner not null proceed for stop ");
}
else
{
Log.i("BLE", "mLEScanner is null !!!!!");
}
mLEScanner.stopScan(mScanCallback);
}
}
}, SCAN_PERIOD);
if (Build.VERSION.SDK_INT < 21) {
mBluetoothAdapter.startLeScan(mLeScanCallback);
Log.i("BLE", "mBluetoothAdapter START SCAN");
} else {
Log.i("START SCAN", "START SCAN");
mLEScanner.startScan(filters, settings, mScanCallback);
}
} else {
if (Build.VERSION.SDK_INT < 21) {
mBluetoothAdapter.stopLeScan(mLeScanCallback);
} else {
mLEScanner.stopScan(mScanCallback);
}
}
}
else
{
Log.i("BLE","**** Object is null");
}
}
catch(Exception ex){
Log.i("msable","[##MA] ::::Exception 5" +ex.toString());
mSecurityCode.setText("[##MA]Ex5" + ex.toString());
}
}
double getDistance(int rssi) {
return Math.pow(10d, ((double) -52 - rssi) / (10 * 2));
}
private BluetoothAdapter.LeScanCallback mLeScanCallback =
new BluetoothAdapter.LeScanCallback() {
#Override
public void onLeScan(final BluetoothDevice device, final int rssi,
byte[] scanRecord) {
try {
Log.i("BLE","**** onLeScan LeScanCallback");
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.i("onLeScan", device.toString());
double distance = getDistance(rssi);
rssiNdistance.setText("rssi:"+rssi+" distance:"+distance);
if (distance <= 1) {
connectToDevice(device);
}
else{
mSecurityCode.setText("Out of Range!!!");
}
}
});
}
catch(Exception ex){
Log.i("msable","[##MA] ::::Exception 6" +ex.toString());
mSecurityCode.setText("[##MA]Ex6" + ex.toString());
}
}
};
public void connectToDevice(BluetoothDevice device) {
Log.i("BLE","**** connectToDevice");
if (mGatt == null) {
mGatt = device.connectGatt(this, false, gattCallback);
scanLeDevice(false);// will stop after first device detection
}else{
Log.i("gatt status","ATT");
}
}
private final BluetoothGattCallback gattCallback = new BluetoothGattCallback() {
#Override
public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) {
try {
Log.i("BLE","**** gattCallback onConnectionStateChange Status: "+status);
Log.i("onConnectionStateChange", "Status: " + status);
switch (newState) {
case BluetoothProfile.STATE_CONNECTED:
Log.i("gattCallback", "STATE_CONNECTED");
gatt.discoverServices();
break;
case BluetoothProfile.STATE_DISCONNECTED:
Log.e("gattCallback", "STATE_DISCONNECTED");
break;
default:
Log.e("gattCallback", "STATE_OTHER");
}
}
catch(Exception ex){
Log.i("msable","[##MA] ::::Exception 7" +ex.toString());
mSecurityCode.setText("[##MA]Ex7" + ex.toString());
}
}
#Override
public void onServicesDiscovered(BluetoothGatt gatt, int status) {
try {
Log.i("BLE","**** onServicesDiscovered Status: "+status);
List<BluetoothGattService> services = gatt.getServices();
for (BluetoothGattService bg : services) {
Log.i(bg.toString(), bg.getUuid().toString());
if (UUID_MSA_APP.equals(bg.getUuid())) {
Log.i("MSA APP", "found");
BluetoothGattCharacteristic msacode = bg.getCharacteristics().get(0);
//gatt.setCharacteristicNotification(msacode, true);
gatt.readCharacteristic(msacode);
Log.i("BLE", "**** BluetoothGattCharacteristic msacode: " + msacode.toString());
}
else{
Log.i("MSA APP", "UUID_MSA_APP not Matching found");
}
}
}
catch(Exception ex){
Log.i("msable","[##MA] ::::Exception 8" +ex.toString());
mSecurityCode.setText("[##MA]Ex8" + ex.toString());
}
}
#Override
public void onCharacteristicRead(BluetoothGatt gatt,
BluetoothGattCharacteristic
characteristic, int status) {
try {
Log.i("BLE","**** onCharacteristicRead Status: "+status);
final byte[] data = characteristic.getValue();
if (data != null && data.length > 0) {
final StringBuilder stringBuilder = new StringBuilder(data.length);
for (byte byteChar : data)
stringBuilder.append(String.format("%02X ", byteChar));
Log.i("onCharacteristicRead", new String(data));
runOnUiThread(new Runnable() {
#Override
public void run() {
mSecurityCode.setText(new String(data));
Intent intent = new Intent(MainActivity.this, newActivity.class);
intent.putExtra("UniqueCode", new String(data));
startActivity(intent);
}
});
}
gatt.disconnect();
}
catch(Exception ex){
Log.i("msable","[##MA] ::::Exception 9" +ex.toString());
mSecurityCode.setText("[##MA]Ex9" + ex.toString());
}
}
};
}
In callback need to give intent a static context and had to set intent FLAG_ACTIVITY_NEW_TASK flag.
#Override
public void onCharacteristicRead(BluetoothGatt gatt,
BluetoothGattCharacteristic
characteristic, int status) {
try {
//Reads the requested characteristic and get the characteristic value.
final byte[] data = characteristic.getValue();
if (data != null && data.length > 0) {
final StringBuilder stringBuilder = new StringBuilder(data.length);
for (byte byteChar : data)
stringBuilder.append(String.format("%02X ", byteChar));
Intent intent = new Intent(callBackContext, InOutActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
callBackContext.startActivity(intent);
}
}
catch(Exception ex){
dbObj.PutLog("[##MA]EX7" + ex.toString());
}
finally{
if(gatt != null){
dbObj.PutLog("[MA] 5 CLOSE GATT");
gatt.disconnect();
gatt.close();
}
}
}
};
Here callBackContext.startActivity(intent); is a Static context given to intent. this solved my purpose
It seems that mSecurityCode is causing the exception. Please post the code where you initialize or write to that variable, and also where you setup the Bluetooth callback.

How to use Text to speech code with Text view only

I've tried to do the Text to Speech tutorials online but instead it gave me an out put of typing the words and clicking a button to speak the typed words.
What I really want for my output is that the app would just read the text showed on the app example, It will read the text "I am Happy" which the string is already declared as TextView.
Here is a picture of an example of what I want to do.
Link of picture here
When you press the speaker button it will just read the words above it.
Here is my java file which is still on typing the words and it gives out the speech:
package com.example.chadymaebarinan.emojiexpress;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.view.View;
import android.widget.EditText;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnInitListener;
import android.content.Intent;
import android.widget.Toast;
import java.util.Locale;
public class Speech extends AppCompatActivity implements OnClickListener,OnInitListener {
private int MY_DATA_CHECK_CODE = 0;
private TextToSpeech myTTS;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_speech);
Button speakButton = (Button) findViewById(R.id.speak);
speakButton.setOnClickListener(this);
Intent checkTTSIntent = new Intent();
checkTTSIntent.setAction(TextToSpeech.Engine.ACTION_CHECK_TTS_DATA);
startActivityForResult(checkTTSIntent, MY_DATA_CHECK_CODE);
}
public void onClick(View v) {
//handle user clicks here
EditText enteredText = (EditText) findViewById(R.id.enter);
String words = enteredText.getText().toString();
speakWords(words);
}
private void speakWords(String speech) {
myTTS.speak(speech, TextToSpeech.QUEUE_FLUSH, null);
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == MY_DATA_CHECK_CODE) {
if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) {
myTTS = new TextToSpeech(this, this);
} else {
Intent installTTSIntent = new Intent();
installTTSIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
startActivity(installTTSIntent);
}
}
}
public void onInit(int initStatus) {
if (initStatus == TextToSpeech.SUCCESS) {
if (myTTS.isLanguageAvailable(Locale.US) == TextToSpeech.LANG_AVAILABLE)
myTTS.setLanguage(Locale.US);
myTTS.setLanguage(Locale.US);
} else if (initStatus == TextToSpeech.ERROR) {
Toast.makeText(this, "Sorry! Text To Speech failed...", Toast.LENGTH_LONG).show();
}
}
}
Thank you guys! :)
Create a textView in your xml with id say myword and remove the editText from xml
then instead of
public void onClick(View v) {
//handle user clicks here
EditText enteredText = (EditText) findViewById(R.id.enter);
String words = enteredText.getText().toString();
speakWords(words);
}
do
public void onClick(View v) {
//handle user clicks here
TextView myword = (TextView) findViewById(R.id.myword);
String words = myword.getText().toString();
speakWords(words);
}
here this code working properly for me:
implement in user click :
public void onClick(View v) {
//handle user clicks here
EditText enteredText = (EditText) findViewById(R.id.enter);
String words = enteredText.getText().toString();
if(words.length!=0){
StartSpeak(words);
}
}
Start a method before speak:
private void StartSpeak(final String data) {
TTS=new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() {
#Override
public void onInit(int initStatus) {
if (initStatus == TextToSpeech.SUCCESS) {
if(TTS.isLanguageAvailable(Locale.US)==TextToSpeech.LANG_AVAILABLE)
TTS.setLanguage(Locale.US);
TTS.setPitch(1.3f);
TTS.setSpeechRate(0.7f);
// start speak
speakWords(data);
}
else if (initStatus == TextToSpeech.ERROR) {
Toast.makeText(getApplicationContext(), "Sorry! Text To Speech failed...", Toast.LENGTH_LONG).show();
}
}
});
}
Start Text to Speech
private void speakWords(String speech) {
TTS.speak(speech, TextToSpeech.QUEUE_FLUSH, null);
}

Categories

Resources