Problem when transmitting camera from android to Wowza - java

I am trying to transmit the Android camera image via rtsp to Wowza, however without success. It does not return any error, but also in the local Wowza does not show any traffic. I have the local Wowza for testing in http://localhost:8088/
I am using the Libstreaming library to send the rtsp stream.
MainActivity.java
package com.security.testeslibstreaming;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import android.content.DialogInterface;
import android.os.Bundle;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.Window;
import android.view.WindowManager;
import net.majorkernelpanic.streaming.Session;
import net.majorkernelpanic.streaming.SessionBuilder;
import net.majorkernelpanic.streaming.audio.AudioQuality;
import net.majorkernelpanic.streaming.gl.SurfaceView;
import net.majorkernelpanic.streaming.rtsp.RtspClient;
public class MainActivity extends AppCompatActivity implements RtspClient.Callback, Session.Callback, SurfaceHolder.Callback {
// log tag
public final static String TAG = MainActivity.class.getSimpleName();
// surfaceview
private static SurfaceView mSurfaceView;
// Rtsp session
private Session mSession;
private static RtspClient mClient;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
// getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_main);
mSurfaceView = (SurfaceView) findViewById(R.id.surface);
mSurfaceView.getHolder().addCallback(this);
// Initialize RTSP client
initRtspClient();
}
#Override
protected void onResume() {
super.onResume();
toggleStreaming();
}
#Override
protected void onPause(){
super.onPause();
toggleStreaming();
}
private void initRtspClient() {
// Configures the SessionBuilder
mSession = SessionBuilder.getInstance()
.setContext(getApplicationContext())
.setAudioEncoder(SessionBuilder.AUDIO_NONE)
.setAudioQuality(new AudioQuality(8000, 16000))
.setVideoEncoder(SessionBuilder.VIDEO_H264)
.setSurfaceView(mSurfaceView).setPreviewOrientation(0)
.setCallback(this).build();
// Configures the RTSP client
mClient = new RtspClient();
mClient.setSession(mSession);
mClient.setCallback(this);
mSurfaceView.setAspectRatioMode(SurfaceView.ASPECT_RATIO_PREVIEW);
String ip, port, path;
// We parse the URI written in the Editext
Pattern uri = Pattern.compile("rtsp://(.+):(\\d+)/(.+)");
Matcher m = uri.matcher(AppConfig.STREAM_URL);
m.find();
ip = m.group(1);
port = m.group(2);
path = m.group(3);
mClient.setCredentials(AppConfig.PUBLISHER_USERNAME,
AppConfig.PUBLISHER_PASSWORD);
mClient.setServerAddress(ip, Integer.parseInt(port));
mClient.setStreamPath("/" + path);
}
private void toggleStreaming() {
if (!mClient.isStreaming()) {
// Start camera preview
mSession.startPreview();
// Start video stream
mClient.startStream();
} else {
// already streaming, stop streaming
// stop camera preview
mSession.stopPreview();
// stop streaming
mClient.stopStream();
}
}
#Override
public void onDestroy() {
super.onDestroy();
mClient.release();
mSession.release();
mSurfaceView.getHolder().removeCallback(this);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
public void onSessionError(int reason, int streamType, Exception e) {
switch (reason) {
case Session.ERROR_CAMERA_ALREADY_IN_USE:
break;
case Session.ERROR_CAMERA_HAS_NO_FLASH:
break;
case Session.ERROR_INVALID_SURFACE:
break;
case Session.ERROR_STORAGE_NOT_READY:
break;
case Session.ERROR_CONFIGURATION_NOT_SUPPORTED:
break;
case Session.ERROR_OTHER:
break;
}
if (e != null) {
alertError(e.getMessage());
e.printStackTrace();
}
}
private void alertError(final String msg) {
final String error = (msg == null) ? "Unknown error: " : msg;
AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this);
builder.setMessage(error).setPositiveButton("Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
}
});
AlertDialog dialog = builder.create();
dialog.show();
}
#Override
public void onRtspUpdate(int message, Exception exception) {
switch (message) {
case RtspClient.ERROR_CONNECTION_FAILED:
case RtspClient.ERROR_WRONG_CREDENTIALS:
alertError(exception.getMessage());
exception.printStackTrace();
break;
}
}
#Override
public void onPreviewStarted() {
}
#Override
public void onSessionConfigured() {
}
#Override
public void onSessionStarted() {
}
#Override
public void onSessionStopped() {
}
#Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
#Override
public void onBitrateUpdate(long bitrate) {
}
}
AppConfig.java
package com.security.testeslibstreaming;
public class AppConfig {
public static final String STREAM_URL = "rtsp://192.XXX.XX.XX:1935/live/myStream";
public static final String PUBLISHER_USERNAME = "barrXXXXXXX";
public static final String PUBLISHER_PASSWORD = "XXXXXXXXXX";
}

Related

How to make my virtual assistant app run always in the background? (android studio)

I created a virtual assistant app in Android Studio and it working fine until I exit the app window and the process stops. I want to make the app run in the background always so when it get's the wake word anytime it will respond. I tried using a Service but I couldn't make it work.
Can you help me please?
This is my code:
package com.eylon.jarvis;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ToggleButton;
import java.util.ArrayList;
import java.util.Locale;
import ai.picovoice.porcupine.Porcupine;
import ai.picovoice.porcupine.PorcupineActivationException;
import ai.picovoice.porcupine.PorcupineActivationLimitException;
import ai.picovoice.porcupine.PorcupineActivationRefusedException;
import ai.picovoice.porcupine.PorcupineActivationThrottledException;
import ai.picovoice.porcupine.PorcupineException;
import ai.picovoice.porcupine.PorcupineInvalidArgumentException;
import ai.picovoice.porcupine.PorcupineManager;
import ai.picovoice.porcupine.PorcupineManagerCallback;
enum AppState {
STOPPED,
WAKEWORD,
STT
}
public class MainActivity extends AppCompatActivity {
private static final String ACCESS_KEY = "Oc8ZOSkVtJHWKhVW3iGMedHDSCSXn6P4vQtrQBl8hNLXwLmxLhs2AA==";
private PorcupineManager porcupineManager = null;
TextView textView;
ToggleButton button;
private SpeechRecognizer speechRecognizer;
private Intent speechRecognizerIntent;
private AppState currentState;
private void displayError(String message) {
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
private final PorcupineManagerCallback porcupineManagerCallback = new PorcupineManagerCallback() {
#Override
public void invoke(int keywordIndex) {
runOnUiThread(() -> {
textView.setText("");
try {
// need to stop porcupine manager before speechRecognizer can start listening.
porcupineManager.stop();
} catch (PorcupineException e) {
displayError("Failed to stop Porcupine.");
return;
}
speechRecognizer.startListening(speechRecognizerIntent);
currentState = AppState.STT;
});
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textView = findViewById(R.id.text1);
button = findViewById(R.id.button1);
if (!SpeechRecognizer.isRecognitionAvailable(this)) {
displayError("Speech Recognition not available.");
}
// Creating the Intent of the Google speech to text and adding extra variables.
speechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "en-US");
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US");
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_ONLY_RETURN_LANGUAGE_PREFERENCE, "en-US");
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Start speaking");
try {
porcupineManager = new PorcupineManager.Builder()
.setAccessKey(ACCESS_KEY)
.setKeyword(Porcupine.BuiltInKeyword.JARVIS)
.setSensitivity(0.7f)
.build(getApplicationContext(), porcupineManagerCallback);
} catch (PorcupineInvalidArgumentException e) {
onPorcupineInitError(
String.format("%s\nEnsure your accessKey '%s' is a valid access key.", e.getMessage(), ACCESS_KEY)
);
} catch (PorcupineActivationException e) {
onPorcupineInitError("AccessKey activation error");
} catch (PorcupineActivationLimitException e) {
onPorcupineInitError("AccessKey reached its device limit");
} catch (PorcupineActivationRefusedException e) {
onPorcupineInitError("AccessKey refused");
} catch (PorcupineActivationThrottledException e) {
onPorcupineInitError("AccessKey has been throttled");
} catch (PorcupineException e) {
onPorcupineInitError("Failed to initialize Porcupine " + e.getMessage());
}
currentState = AppState.STOPPED;
}
private void onPorcupineInitError(final String errorMessage) {
runOnUiThread(() -> {
TextView errorText = findViewById(R.id.text1);
errorText.setText(errorMessage);
ToggleButton recordButton = findViewById(R.id.button1);
recordButton.setChecked(false);
recordButton.setEnabled(false);
});
}
#Override
protected void onStop() {
if (button.isChecked()) {
stopService();
button.toggle();
speechRecognizer.destroy();
}
super.onStop();
}
private boolean hasRecordPermission() {
return ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO)
== PackageManager.PERMISSION_GRANTED;
}
private void requestRecordPermission() {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO},
0);
}
#SuppressLint("SetTextI18n")
private void playback(int milliSeconds) {
speechRecognizer.stopListening();
currentState = AppState.WAKEWORD;
new Handler(Looper.getMainLooper()).postDelayed(() -> {
if (currentState == AppState.WAKEWORD) {
porcupineManager.start();
textView.setText("Listening for " + Porcupine.BuiltInKeyword.JARVIS + " ...");
}
}, milliSeconds);
}
private void stopService() {
if (porcupineManager != null) {
try {
porcupineManager.stop();
} catch (PorcupineException e) {
displayError("Failed to stop porcupine.");
}
}
textView.setText("");
speechRecognizer.stopListening();
speechRecognizer.destroy();
currentState = AppState.STOPPED;
}
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String[] permissions,
#NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (grantResults.length == 0 || grantResults[0] == PackageManager.PERMISSION_DENIED) {
displayError("Microphone permission is required for this app!");
requestRecordPermission();
} else {
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
speechRecognizer.setRecognitionListener(new SpeechListener());
playback(0);
}
}
public void process(View view) {
if (button.isChecked()) {
if (hasRecordPermission()) {
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
speechRecognizer.setRecognitionListener(new SpeechListener());
playback(0);
} else {
requestRecordPermission();
}
} else {
stopService();
}
}
private class SpeechListener implements RecognitionListener {
#Override
public void onReadyForSpeech(Bundle params) {
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onRmsChanged(float rmsdB) {
}
#Override
public void onBufferReceived(byte[] buffer) {
}
#Override
public void onEndOfSpeech() {
}
#SuppressLint("SwitchIntDef")
#Override
public void onError(int error) {
switch (error) {
case SpeechRecognizer.ERROR_AUDIO:
displayError("Error recording audio.");
break;
case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
displayError("Insufficient permissions.");
break;
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
case SpeechRecognizer.ERROR_NETWORK:
displayError("Network Error.");
break;
case SpeechRecognizer.ERROR_NO_MATCH:
if (button.isChecked()) {
displayError("No recognition result matched.");
playback(1000);
}
case SpeechRecognizer.ERROR_CLIENT:
return;
case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
displayError("Recognition service is busy.");
break;
case SpeechRecognizer.ERROR_SERVER:
displayError("Server Error.");
break;
case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
displayError("No speech input.");
break;
default:
displayError("Something wrong occurred.");
}
stopService();
button.toggle();
}
#Override
public void onResults(Bundle results) {
ArrayList<String> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
audioResponseSelecting(data.get(0).toLowerCase(Locale.ROOT));
textView.setText(data.get(0));
playback(3000);
}
#Override
public void onPartialResults(Bundle partialResults) {
ArrayList<String> data = partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
audioResponseSelecting(data.get(0).toLowerCase(Locale.ROOT));
textView.setText(data.get(0));
}
#Override
public void onEvent(int eventType, Bundle params) {
}
}
// The response selecting function.
public void audioResponseSelecting(String transcript)
{
if (transcript.equals(("Good morning").toLowerCase(Locale.ROOT)))
{
executeResponse(R.raw.good_morning);
}
else if (transcript.equals(("Who is your creator").toLowerCase(Locale.ROOT)))
{
executeResponse(R.raw.creator);
}
}
// The audio file response execution function.
public void executeResponse(final int audio)
{
MediaPlayer response = MediaPlayer.create(MainActivity.this, audio);
response.start();
}
}
By design, all SpeechRecognizer's methods "must be invoked only from the main application thread."
Main application thread is also referred to as "UI thread".
This means that SpeechRecognizer cannot run in a service.

Below is my code but there is some error when i open camera appear and immediately disappear. How to hold on the camera view for a bit longer.?

1.MainActivity.java file
I think the error got at this line
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop()
I'm trying to open the camera and display using SurfaceView. This delays the loading of the activity for a really long time. So I'm wondering what are the best practices of opening the camera.
package com.example.software2.ocrhy;
public class MainActivity extends AppCompatActivity {
private TextView textView;
private SurfaceView surfaceView;
private CameraSource cameraSource;
private TextRecognizer textRecognizer;
private TextToSpeech textToSpeech;
private String stringResult = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ActivityCompat.requestPermissions(this, new String[]{CAMERA}, PackageManager.PERMISSION_GRANTED);
textToSpeech = new TextToSpeech(this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
}
});
}
#Override
protected void onDestroy() {
super.onDestroy();
cameraSource.release();
}
private void textRecognizer(){
textRecognizer = new TextRecognizer.Builder(getApplicationContext()).build();
cameraSource = new CameraSource.Builder(getApplicationContext(), textRecognizer)
.setRequestedPreviewSize(1280, 1024)
.build();
surfaceView = findViewById(R.id.surfaceView);
surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
#SuppressLint("MissingPermission")
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
cameraSource.start(surfaceView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop();
}
});
textRecognizer.setProcessor(new Detector.Processor<TextBlock>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
SparseArray<TextBlock> sparseArray = detections.getDetectedItems();
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i<sparseArray.size(); ++i){
TextBlock textBlock = sparseArray.valueAt(i);
if (textBlock != null && textBlock.getValue() !=null){
stringBuilder.append(textBlock.getValue() + " ");
}
}
final String stringText = stringBuilder.toString();
Handler handler = new Handler(Looper.getMainLooper());
handler.post(() -> {
stringResult = stringText;
resultObtained();
});
}
});
}
private void resultObtained(){
setContentView(R.layout.activity_main);
textView = findViewById(R.id.textView);
textView.setText(stringResult);
textToSpeech.speak(stringResult, TextToSpeech.QUEUE_FLUSH, null, null);
}
public void buttonStart(View view){
setContentView(R.layout.surface);
textRecognizer();
}
}
I got full code from this link
javafile
error got in camera activity
According to your source code, your camera starts reading the text immediately. If you want to first see your text on your camera and only then start reading it we can break down it to following steps:
Open your camera
Navigate to specific text
Finally when you see via your camera the text what you want to read it
I suggest you to do small modifications on your code
Add Capture button on your surfaceview.xml (for example in a code below)
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent">
<SurfaceView
android:id="#+id/surfaceView"
android:layout_width="308dp"
android:layout_height="503dp"
android:layout_marginStart="64dp"
android:layout_marginTop="32dp"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<Button
android:layout_width="match_parent"
android:layout_height="wrap_content"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintStart_toStartOf="parent"
android:id="#+id/capture"
android:layout_marginHorizontal="32dp"
android:layout_marginBottom="32dp"
android:text="Capture"
app:layout_constraintEnd_toEndOf="parent"/>
</androidx.constraintlayout.widget.ConstraintLayout>
Move out text processing to separate function
private void capture() {
textRecognizer.setProcessor(new Detector.Processor<TextBlock>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
SparseArray<TextBlock> sparseArray = detections.getDetectedItems();
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i<sparseArray.size(); ++i){
TextBlock textBlock = sparseArray.valueAt(i);
if (textBlock != null && textBlock.getValue() !=null){
stringBuilder.append(textBlock.getValue() + " ");
}
}
final String stringText = stringBuilder.toString();
Handler handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
#Override
public void run() {
stringResult = stringText;
resultObtained();
}
});
}
});
}
Capture your text on capture button click
public void buttonStart(View view){
setContentView(R.layout.surfaceview);
Button capture = findViewById(R.id.capture);
capture.setOnClickListener(v -> capture());
textRecognizer();
}
As a result whole MainActivity
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.speech.tts.TextToSpeech;
import android.util.SparseArray;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.Detector;
import com.google.android.gms.vision.text.TextBlock;
import com.google.android.gms.vision.text.TextRecognizer;
import java.io.IOException;
import static android.Manifest.permission.CAMERA;
public class MainActivity extends AppCompatActivity {
private TextView textView;
private SurfaceView surfaceView;
private CameraSource cameraSource;
private TextRecognizer textRecognizer;
private TextToSpeech textToSpeech;
private String stringResult = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ActivityCompat.requestPermissions(this, new String[]{CAMERA}, PackageManager.PERMISSION_GRANTED);
textToSpeech = new TextToSpeech(this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
}
});
}
#Override
protected void onDestroy() {
super.onDestroy();
cameraSource.release();
}
private void textRecognizer() {
textRecognizer = new TextRecognizer.Builder(getApplicationContext()).build();
cameraSource = new CameraSource.Builder(getApplicationContext(), textRecognizer)
.setRequestedPreviewSize(1280, 1024)
.setAutoFocusEnabled(true)
.build();
surfaceView = findViewById(R.id.surfaceView);
Context context = this;
surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
cameraSource.start(surfaceView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop();
}
});
}
private void capture() {
textRecognizer.setProcessor(new Detector.Processor<TextBlock>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
SparseArray<TextBlock> sparseArray = detections.getDetectedItems();
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i<sparseArray.size(); ++i){
TextBlock textBlock = sparseArray.valueAt(i);
if (textBlock != null && textBlock.getValue() !=null){
stringBuilder.append(textBlock.getValue() + " ");
}
}
final String stringText = stringBuilder.toString();
Handler handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
#Override
public void run() {
stringResult = stringText;
resultObtained();
}
});
}
});
}
private void resultObtained(){
setContentView(R.layout.activity_main);
textView = findViewById(R.id.textView);
textView.setText(stringResult);
textToSpeech.speak(stringResult, TextToSpeech.QUEUE_FLUSH, null, null);
}
public void buttonStart(View view){
setContentView(R.layout.surfaceview);
Button capture = findViewById(R.id.capture);
capture.setOnClickListener(v -> capture());
textRecognizer();
}
}

Using onCreate Bundle savedInstanceState with wallpaper service? (Watchface)

Im in need of some help.
Secondary Activity
package archtectsproductions.scriptpyandroidwearwatchface;
import android.app.Activity;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.wearable.view.WatchViewStub;
import android.view.View;
import android.widget.Button;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.TextView;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.wearable.PutDataMapRequest;
import com.google.android.gms.wearable.PutDataRequest;
import com.google.android.gms.wearable.Wearable;
import static android.graphics.Color.BLUE;
import static android.graphics.Color.GREEN;
import static android.graphics.Color.RED;
public class WatchfaceConfigActivity extends Activity implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener {
private GoogleApiClient mGoogleApiClient;
private int mTextColor = 0xffffffff;
private TextView mTextView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_watchface_config);
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(Wearable.API)
.build();
Button buttonOK = (Button)findViewById(R.id.buttonOK);
buttonOK.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
RadioGroup radioTextColor =
(RadioGroup)findViewById(R.id.rGroup);
int selectedId = radioTextColor.getCheckedRadioButtonId();
switch (selectedId) {
default:
case R.id.rDarkpastel:
mTextColor = 0xffffffff;
break;
case R.id.Notepad:
mTextColor = GREEN;
break;
case R.id.rHarvenjark:
mTextColor = BLUE;
break;
case R.id.rVibrant:
mTextColor = RED;
break;
}
sendParamsAndFinish();
}
});
}
// sends data through Google API
private void sendParamsAndFinish() {
PutDataMapRequest putDataMapReq =
PutDataMapRequest.create("/watch_face_config_cliu");
putDataMapReq.getDataMap().putInt("text_color", mTextColor);
PutDataRequest putDataReq = putDataMapReq.asPutDataRequest();
Wearable.DataApi.putDataItem(mGoogleApiClient, putDataReq);
finish();
}
#Override
protected void onStart() {
super.onStart();
mGoogleApiClient.connect();
}
#Override
protected void onStop() {
if (mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
mGoogleApiClient.disconnect();
}
super.onStop();
}
#Override
public void onConnected(Bundle bundle) {
}
#Override
public void onConnectionSuspended(int i) {
}
#Override
public void onConnectionFailed(ConnectionResult connectionResult) {
}
}
MAIN
public class WatchFaceCLiuService extends
CanvasWatchFaceService {
private static final long UPDATE_INTERVAL =
TimeUnit.SECONDS.toMillis(1);
#Override
public Engine onCreateEngine() {
return new Engine();
}
private class Engine extends CanvasWatchFaceService.Engine implements
GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener {
...
private GoogleApiClient mGoogleApiClient;
private int mTextColor = 0xffffffff;
private float offsetx = (float)(-50 + 100 * Math.random());
private float offsety = (float)(-50 + 100 * Math.random());
#Override
public void onCreate(SurfaceHolder holder) {
super.onCreate(holder);
...
mGoogleApiClient = new GoogleApiClient.Builder(WatchFaceCLiuService.this)
.addApi(Wearable.API)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
}
...
#Override
public void onDraw(Canvas canvas, Rect bounds) {
...
canvas.drawText(ts1, tx1, ty1, mDigitalPaint);
...
}
private void releaseGoogleApiClient() {
if (mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
Wearable.DataApi.removeListener(mGoogleApiClient,
onDataChangedListener);
mGoogleApiClient.disconnect();
}
}
#Override
public void onConnected(Bundle bundle) {
Wearable.DataApi.addListener(mGoogleApiClient,
onDataChangedListener);
Wearable.DataApi.getDataItems(mGoogleApiClient).
setResultCallback(onConnectedResultCallback);
}
private void updateParamsForDataItem(DataItem item) {
if ((item.getUri().getPath()).equals("/watch_face_config_cliu")) {
DataMap dataMap = DataMapItem.fromDataItem(item).getDataMap();
if (dataMap.containsKey("text_color")) {
int tc = dataMap.getInt("text_color");
mDigitalPaint.setColor(tc);
invalidate();
}
}
}
private final DataApi.DataListener onDataChangedListener =
new DataApi.DataListener() {
#Override
public void onDataChanged(DataEventBuffer dataEvents) {
for (DataEvent event : dataEvents) {
if (event.getType() == DataEvent.TYPE_CHANGED) {
DataItem item = event.getDataItem();
updateParamsForDataItem(item);
}
}
dataEvents.release();
if (isVisible() && !isInAmbientMode()) {
invalidate();
}
}
};
private final ResultCallback<DataItemBuffer>
onConnectedResultCallback =
new ResultCallback<DataItemBuffer>() {
#Override
public void onResult(DataItemBuffer dataItems) {
for (DataItem item : dataItems) {
updateParamsForDataItem(item);
}
dataItems.release();
if (isVisible() && !isInAmbientMode()) {
invalidate();
}
}
};
#Override
public void onConnectionSuspended(int i) {
}
#Override
public void onConnectionFailed(ConnectionResult
connectionResult) {
}
#Override
public void onDestroy() {
mUpdateTimeHandler.removeMessages(MESSAGE_ID_UPDATE_TIME);
releaseGoogleApiClient();
super.onDestroy();
}
}
}
I really need help, I'm bashing my head against a brick wall. this just doesn't work. it wont send int across at all. Ive followed a guide. Ive done my best it just wont send, does anyone know a better way? should I make a global? would that work better but I'm not sure how id do it as I seem limited on what code I can use in the canvas watchface service.
all I want Is to send an int from one activity to another. Using a watchface service not a companion. I want to learn. so if you do answer don't just paste the code straight out, unless its easier to just paste the code and I can follow it, and decipher it.
thank you for any help .
<-- old
While I understand how to make in java a simple app that saves the users selection of a radio button and then puts it over to a separate activity to, say, change the color of text, i have hit a point where im struggling to do so in android wear.
The reason being i cant Implement the following.
#overide
public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
Radiogroup = (RadioGroup)findViewById(R.id.myyrad);
Radiogroup.setOnCheckedChangedListener(new OnCheckedChangedListener()) { ....
the following cant be implemented on the wallpaper service?
is there any alternative? or do i have to go the extremely confusing googleapiclient route? any help would be greatly appreciated thank you.
This was solved. Nothing wrong with the tutorial or my code. Studio was looking for a different android play service. Changed it in the gradel

Music player implementation

I am new to Android. I'm trying to create a music player which will play specific MP3 based on the count (controlled by Rc variable) and based on the button sequence (controlled by BitCount variable) should change. And I have one stop button by clicking which I should be able to stop the music.
My problem is everything is working fine when UI is on foreground but when screen goes to background then also music start running (it's OK let music be running on background) but when once again I bring my UI from background to foreground than by clicking stop button I am unable to stop the music, it runs continuously. This same problem is happening when I am rotating my mobile.
Code:
package com.example.tallite;
import java.io.IOException;
import java.util.Timer;
import java.util.TimerTask;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.app.Activity;
import android.content.SharedPreferences;
import android.view.Menu;
import android.view.View;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.Toast;
public class MainActivity extends Activity implements OnSeekBarChangeListener {
MediaPlayer OurSong;
public Handler handler1 = new Handler();
Button Start;
Button Stop;
Button Button21;
Button Button22;
Button StopButton;
public int GProgreess=0;
int Rc=0;
int BitCount=6;
int SeekPos=0;
int Period=500;
int BreakVar=0;
Thread myThread ;
public TextView text1,text2,text3,text4;
private SeekBar bar;
private TextView textProgress,textAction;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toast.makeText(this, "onCreate()", Toast.LENGTH_LONG).show();
text1 = (TextView)findViewById(R.id.textbit1);
text2 = (TextView)findViewById(R.id.textbit2);
text3 = (TextView)findViewById(R.id.textbit3);
text4 = (TextView)findViewById(R.id.textbit4);
Button21=(Button)findViewById(R.id.button21);
Button22=(Button)findViewById(R.id.button22);
StopButton=(Button)findViewById(R.id.buttonstop);
bar = (SeekBar)findViewById(R.id.seekBar1);
textProgress = (TextView)findViewById(R.id.textViewProgress);
OurSong=MediaPlayer.create(MainActivity.this,R.raw.a);
try {
OurSong.prepare();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
bar.setOnSeekBarChangeListener(this); // set seekbar listener.
/*handler1.post(runnable1);
stopPlaying();*/
StopButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v)
{
stopPlaying();
BitCount=5;
handler1.removeCallbacks(runnable1);
}/****End of on clk******/
});/*****End of set on clk listener*****/
Button21.setOnClickListener(new View.OnClickListener() {
public void onClick(View v)
{
stopPlaying();
Rc=0;
BitCount=13;
handler1.removeCallbacks(runnable1);
handler1.post(runnable1);
}/****End of on clk******/
});/*****End of set on clk listener*****/
Button22.setOnClickListener(new View.OnClickListener() {
public void onClick(View v)
{
stopPlaying();
Rc=0;
BitCount=15;
handler1.removeCallbacks(runnable1);
handler1.post(runnable1);
}/****End of on clk******/
});/*****End of set on clk listener*****/
}
#Override
protected void onStart() {
//the activity is become visible.
super.onStart();
/*handler1.removeCallbacks(runnable1);*/
}
#Override
protected void onPause() {
super.onPause();
}
#Override
protected void onResume() {
super.onResume();
}
#Override
protected void onStop() {
//the activity is no longer visible.
super.onStop();
Toast.makeText(this, "onStop()", Toast.LENGTH_LONG).show();
}
#Override
protected void onDestroy() {
//The activity about to be destroyed.
super.onDestroy();
Toast.makeText(this, "onDestroy()", Toast.LENGTH_LONG).show();
}
private void stopPlaying()
{
if (OurSong != null)
{
OurSong.stop();
OurSong.release();
OurSong = null;
}
}
/* private void stopPlaying()
{
OurSong.stop();
OurSong.release();
}*/
public Runnable runnable1= new Runnable() {
#Override
public void run() {
if(BitCount==13)
{
text1.setText("1");
text2.setText(""+Rc);
text3.setText(""+BitCount);
if(Rc==0||Rc==6||Rc==10)
{
stopPlaying();
OurSong=MediaPlayer.create(MainActivity.this,R.raw.a);
OurSong.start();
}
if(Rc==2||Rc==4||Rc==8||Rc==12)
{
stopPlaying();
OurSong=MediaPlayer.create(MainActivity.this,R.raw.b);
OurSong.start();
}
if(Rc==13)
{
stopPlaying();
OurSong=MediaPlayer.create(MainActivity.this,R.raw.c);
OurSong.start();
}
Rc++;
if(Rc>BitCount)
{
text4.setText(""+Rc);
Rc=0;
}
}/****End of bitcount=13****/
if(BitCount==15)
{
text1.setText("2");
text2.setText(""+Rc);
text3.setText(""+BitCount);
if(Rc==0||Rc==8||Rc==12)
{
stopPlaying();
OurSong=MediaPlayer.create(MainActivity.this,R.raw.a);
OurSong.start();
}
if(Rc==2||Rc==4||Rc==6||Rc==10||Rc==14)
{
stopPlaying();
OurSong=MediaPlayer.create(MainActivity.this,R.raw.b);
OurSong.start();
/* Toast.makeText(getApplicationContext(), "-",
Toast.LENGTH_LONG).show();*/
}
if(Rc==15)
{
stopPlaying();
OurSong=MediaPlayer.create(MainActivity.this,R.raw.c);
OurSong.start();
/*Toast.makeText(getApplicationContext(), "|",
Toast.LENGTH_LONG).show();*/
}
Rc++;
if(Rc>BitCount)
{
text4.setText(""+Rc);
Rc=0;
}
}/***End of bitcount=15***/
if(BitCount==5)
{
text1.setText("Rc"+Rc);
stopPlaying();
}
if(BitCount==6)
{
text1.setText("x"+Rc);
stopPlaying();
}
handler1.postDelayed(runnable1, Period);
}
};
public void onProgressChanged(SeekBar seekBar, int progress,
boolean fromUser) {
GProgreess=progress+20;
textProgress.setText("Bit/Minute: "+(progress+20));
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
#Override
public void onStopTrackingTouch(SeekBar seekBar) {
seekBar.setSecondaryProgress(seekBar.getProgress());
SeekPos=GProgreess;
Period=(int)(30000/SeekPos);
/*textProgress.setText("Period: "+Period);*/
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
When you close your app, ur activity is destroyed and hence reference to ur media player variable is lost. When you recreate ur activity you get a new set of reference and hence ur unable to stop it.
You will have to create a Service name it as MediaPlayerService which will deal with all media related actions like play, pause etc..
You can read about service over here:
http://developer.android.com/guide/components/services.html
If you want an example for media player, you can check RandomMusicPlayer, an excellent media player example provided by android in android sample projects.
Here is my implementation of MediaPlayerService
MediaPlayerService Class:
package com.cyberinsane.musicplayerlibrary;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.net.Uri;
import android.os.Binder;
import android.os.IBinder;
import android.os.PowerManager;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import android.widget.Toast;
public class MediaPlayerService extends Service implements OnCompletionListener, OnPreparedListener, OnErrorListener {
private static final int NOTIFICATION_ID = 1;
public static final String INTENT_URL = "url";
public enum MediaState {
Playing, Paused, Stopped
}
private MediaState mState = MediaState.Stopped;
private MediaPlayer mPlayer;
private NotificationManager mNotificationManager;
public String mCurrentUrl = "";
public static final String ACTION_TOGGLE_PLAYBACK = "com.cyberinsane.musicplayerlibrary.action.TOGGLE_PLAYBACK";
public static final String ACTION_PLAY = "com.cyberinsane.musicplayerlibrary.action.PLAY";
public static final String ACTION_PAUSE = "com.cyberinsane.musicplayerlibrary.action.PAUSE";
public static final String ACTION_STOP = "com.cyberinsane.musicplayerlibrary.action.STOP";
#Override
public void onCreate() {
super.onCreate();
mNotificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent != null) {
String action = intent.getAction();
String url = intent.getExtras().getString(INTENT_URL);
if (url != null) {
if (!url.equals(mCurrentUrl)) {
mCurrentUrl = url;
mState = MediaState.Stopped;
relaxResources(true);
processPlayRequest();
} else {
if (action.equals(ACTION_TOGGLE_PLAYBACK)) {
processTogglePlaybackRequest();
} else if (action.equals(ACTION_PLAY)) {
processPlayRequest();
} else if (action.equals(ACTION_PAUSE)) {
processPauseRequest();
} else if (action.equals(ACTION_STOP)) {
processStopRequest(false);
}
}
}
}
return START_STICKY;
}
private void processTogglePlaybackRequest() {
if (mState == MediaState.Paused || mState == MediaState.Stopped) {
processPlayRequest();
} else {
processPauseRequest();
}
}
private void processPlayRequest() {
try {
if (mState == MediaState.Stopped) {
createMediaPlayerIfNeeded();
mPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mPlayer.setDataSource(getApplicationContext(), Uri.parse(mCurrentUrl));
mPlayer.prepare();
} else if (mState == MediaState.Paused) {
mState = MediaState.Playing;
setUpAsForeground("Playing...");
if (!mPlayer.isPlaying()) {
mPlayer.start();
if (mIMediaPlayer != null) {
mIMediaPlayer.onAudioPlay();
}
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
private void processPauseRequest() {
if (mState == MediaState.Playing) {
mState = MediaState.Paused;
mPlayer.pause();
relaxResources(false);
if (mIMediaPlayer != null) {
mIMediaPlayer.onAudioPause();
}
}
}
private void processStopRequest(boolean force) {
if (mState == MediaState.Playing || mState == MediaState.Paused || force) {
mState = MediaState.Stopped;
relaxResources(true);
stopSelf();
}
}
private void createMediaPlayerIfNeeded() {
if (mPlayer == null) {
mPlayer = new MediaPlayer();
mPlayer.setWakeMode(getApplicationContext(), PowerManager.PARTIAL_WAKE_LOCK);
mPlayer.setOnPreparedListener(this);
mPlayer.setOnCompletionListener(this);
mPlayer.setOnErrorListener(this);
} else
mPlayer.reset();
}
private void relaxResources(boolean releaseMediaPlayer) {
stopForeground(true);
if (releaseMediaPlayer && mPlayer != null) {
mPlayer.reset();
mPlayer.release();
mPlayer = null;
}
}
#Override
public boolean onError(MediaPlayer mp, int what, int extra) {
Toast.makeText(getApplicationContext(), "Media player error! Resetting.", Toast.LENGTH_SHORT).show();
Log.e("MusicPlayer", "Error: what=" + String.valueOf(what) + ", extra=" + String.valueOf(extra));
mState = MediaState.Stopped;
relaxResources(true);
if (mIMediaPlayer != null) {
mIMediaPlayer.onError();
}
return true; // true indicates we handled the error
}
#Override
public void onPrepared(MediaPlayer mp) {
mState = MediaState.Playing;
updateNotification("Playing...");
if (!mPlayer.isPlaying()) {
mPlayer.start();
if (mIMediaPlayer != null) {
mIMediaPlayer.onAudioPlay();
}
}
}
#Override
public void onCompletion(MediaPlayer mp) {
mState = MediaState.Stopped;
relaxResources(true);
stopSelf();
if (mIMediaPlayer != null) {
mIMediaPlayer.onAudioStop();
}
}
/**
* Updates the notification.
*/
private void updateNotification(String text) {
mNotificationManager.notify(NOTIFICATION_ID, buildNotification(text).build());
}
private void setUpAsForeground(String text) {
startForeground(NOTIFICATION_ID, buildNotification(text).build());
}
private NotificationCompat.Builder buildNotification(String text) {
NotificationCompat.Builder builder = new NotificationCompat.Builder(this.getApplicationContext());
builder.setSmallIcon(R.drawable.ic_launcher)
.setOngoing(true)
.setContentTitle("Cyberinsane MusicPlayer")
.setContentText(text)
.setContentIntent(
PendingIntent.getActivity(getApplicationContext(), 0, new Intent(getApplicationContext(),
MediaPlayerActivity.class), PendingIntent.FLAG_UPDATE_CURRENT))
.setContentInfo("Awesome");
return builder;
}
#Override
public void onDestroy() {
mState = MediaState.Stopped;
relaxResources(true);
}
public class LocalBinder extends Binder {
MediaPlayerService getService() {
return MediaPlayerService.this;
}
}
#Override
public IBinder onBind(Intent intent) {
return mBinder;
}
private final IBinder mBinder = new LocalBinder();
private IMediaPlayer mIMediaPlayer;
public MediaPlayer getMediaPlayer() {
return mPlayer;
}
public void setIMediaPlayer(IMediaPlayer mediaPlayer) {
mIMediaPlayer = mediaPlayer;
}
public MediaState getMediaState() {
return mState;
}
public boolean isPlaying() {
return (mState == MediaState.Playing);
}
public String getCurrentUrl() {
return mCurrentUrl;
}
public long duration() {
if (mPlayer != null) {
return mPlayer.getDuration();
}
return 0;
}
public long position() {
if (mPlayer != null) {
return mPlayer.getCurrentPosition();
}
return 0;
}
public void seekTo(long position) {
if (mPlayer != null) {
mPlayer.seekTo((int) position);
}
}
}
MediaPlayerActivity Class:
package com.cyberinsane.musicplayerlibrary;
import java.io.File;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.IBinder;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
public class MediaPlayerActivity extends Activity implements IMediaPlayer {
private ImageButton mButtonPlay;
private EditText mEditTextUrl;
private SeekBar mSeekBarProgress;
private boolean mIsBound;
private String mExtStorePath;
private MediaPlayerService mMediaService;
private Handler mSeekHandler = new Handler();
private Runnable mSeekRunnable = new Runnable() {
#Override
public void run() {
seekStartUpdation();
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.media_player_activity);
mButtonPlay = (ImageButton) findViewById(R.id.buttonPlay);
mEditTextUrl = (EditText) findViewById(R.id.editTextURL);
mSeekBarProgress = (SeekBar) findViewById(R.id.seekBarMediaProgress);
mExtStorePath = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator;
doBindService();
initListeners();
}
private void initListeners() {
mButtonPlay.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
String urlExt = mEditTextUrl.getText().toString();
if (urlExt != null && !urlExt.equals("")) {
String url = mExtStorePath + urlExt;
startService(new Intent(MediaPlayerService.ACTION_TOGGLE_PLAYBACK).putExtra(
MediaPlayerService.INTENT_URL, url));
}
}
});
mSeekBarProgress.setOnSeekBarChangeListener(new OnSeekBarChangeListener() {
#Override
public void onStopTrackingTouch(SeekBar seekBar) {
mMediaService.seekTo(seekBar.getProgress());
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) {
// empty
}
#Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
// empty
}
});
}
#Override
protected void onResume() {
super.onResume();
doBindService();
}
#Override
protected void onDestroy() {
super.onDestroy();
doUnbindService();
}
#Override
protected void onPause() {
super.onPause();
doUnbindService();
}
private ServiceConnection mConnection = new ServiceConnection() {
#Override
public void onServiceConnected(ComponentName className, IBinder service) {
mMediaService = ((MediaPlayerService.LocalBinder) service).getService();
mMediaService.setIMediaPlayer(MediaPlayerActivity.this);
setControlState();
}
#Override
public void onServiceDisconnected(ComponentName className) {
mMediaService = null;
}
};
private void doBindService() {
bindService(new Intent(MediaPlayerActivity.this, MediaPlayerService.class), mConnection,
Context.BIND_AUTO_CREATE);
mIsBound = true;
}
private void doUnbindService() {
if (mIsBound) {
unbindService(mConnection);
mIsBound = false;
}
}
#Override
public void onAudioPlay() {
setPlayerState(true);
}
#Override
public void onAudioPause() {
setPlayerState(false);
}
#Override
public void onAudioStop() {
setPlayerState(false);
mSeekBarProgress.setProgress(0);
}
#Override
public void onError() {
// handle errors here
}
private void setControlState() {
if (mMediaService.isPlaying()) {
mEditTextUrl.setText(mMediaService.getCurrentUrl().replace(mExtStorePath, ""));
setPlayerState(true);
} else {
setPlayerState(false);
}
}
public void setPlayerState(boolean isPlaying) {
if (isPlaying) {
mButtonPlay.setImageResource(R.drawable.ic_pause);
mSeekBarProgress.setMax((int) mMediaService.duration());
seekStartUpdation();
} else {
mButtonPlay.setImageResource(R.drawable.ic_play);
seekStopUpdate();
}
}
public void seekStartUpdation() {
mSeekBarProgress.setProgress((int) mMediaService.position());
mSeekHandler.postDelayed(mSeekRunnable, 1000);
}
public void seekStopUpdate() {
mSeekHandler.removeCallbacks(mSeekRunnable);
}
}
Media Player Event Listener
package com.cyberinsane.musicplayerlibrary;
public interface IMediaPlayer {
public void onAudioPlay();
public void onAudioPause();
public void onAudioStop();
public void onError();
}
And finally my Manifest file
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.cyberinsane.musicplayerlibrary"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="18" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<application
android:allowBackup="true"
android:icon="#drawable/ic_launcher"
android:label="#string/app_name"
android:theme="#style/AppTheme" >
<activity
android:name="com.cyberinsane.musicplayerlibrary.MediaPlayerActivity"
android:label="#string/app_name"
android:launchMode="singleTask" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service
android:name="com.cyberinsane.musicplayerlibrary.MediaPlayerService"
android:exported="false" >
<intent-filter>
<action android:name="com.cyberinsane.musicplayerlibrary.action.TOGGLE_PLAYBACK" />
<action android:name="com.cyberinsane.musicplayerlibrary.action.PLAY" />
<action android:name="com.cyberinsane.musicplayerlibrary.action.PAUSE" />
<action android:name="com.cyberinsane.musicplayerlibrary.action.SKIP" />
<action android:name="com.cyberinsane.musicplayerlibrary.action.REWIND" />
<action android:name="com.cyberinsane.musicplayerlibrary.action.STOP" />
</intent-filter>
</service>
</application>
</manifest>

Facebook Feed Dialog with game images and description below

I want to achieve Facebook Integration in my app. At this point of time, I have the login and post to wall functionality, but the wall post I have is only like the simple wall post.
I want to achieve this. Just like in every game, they have this kind of facebook feed..
This is the current code I have..
package com.example.facebooktrial;
import android.app.Activity;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import com.facebook.android.AsyncFacebookRunner;
import com.facebook.android.DialogError;
import com.facebook.android.Facebook;
import com.facebook.android.Facebook.DialogListener;
import com.facebook.android.FacebookError;
#SuppressWarnings("deprecation")
public class AndroidFacebookConnectActivity extends Activity {
Button btnFbLogin;
Button btnPostToWall;
// Your Facebook APP ID
private static String APP_ID = "593769430655402"; // Replace your App ID here
// Instance of Facebook Class
private Facebook facebook;
private AsyncFacebookRunner mAsyncRunner;
String FILENAME = "AndroidSSO_data";
private SharedPreferences mPrefs;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
btnFbLogin = (Button) findViewById(R.id.btnFbLogin);
btnPostToWall = (Button) findViewById(R.id.btnFbPost);
facebook = new Facebook(APP_ID);
mAsyncRunner = new AsyncFacebookRunner(facebook);
btnFbLogin.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
loginToFacebook();
}
});
btnPostToWall.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
postToWall();
}
});
}
#SuppressWarnings("deprecation")
public void loginToFacebook() {
mPrefs = getPreferences(MODE_PRIVATE);
String access_token = mPrefs.getString("access_token", null);
long expires = mPrefs.getLong("access_expires", 0);
if (access_token != null) {
facebook.setAccessToken(access_token);
}
if (expires != 0) {
facebook.setAccessExpires(expires);
}
if (!facebook.isSessionValid()) {
facebook.authorize(this,
new String[] { "email", "publish_stream" },
new DialogListener() {
#Override
public void onCancel() {
// Function to handle cancel event
}
#Override
public void onComplete(Bundle values) {
// Function to handle complete event
// Edit Preferences and update facebook acess_token
SharedPreferences.Editor editor = mPrefs.edit();
editor.putString("access_token",
facebook.getAccessToken());
editor.putLong("access_expires",
facebook.getAccessExpires());
editor.commit();
}
#Override
public void onError(DialogError error) {
// Function to handle error
}
#Override
public void onFacebookError(FacebookError fberror) {
// Function to handle Facebook errors
}
});
}
}
#SuppressWarnings("deprecation")
public void postToWall() {
// post on user's wall.
facebook.dialog(this, "feed", new DialogListener() {
#Override
public void onFacebookError(FacebookError e) {
}
#Override
public void onError(DialogError e) {
}
#Override
public void onComplete(Bundle values) {
}
#Override
public void onCancel() {
}
});
}
}
I found the solution. Just make use of a Bundle where you'll store all the necessary information like the picture, name, link and so on.. After that, include that bundle in the Facebook dialog as an argument..
#SuppressWarnings("deprecation")
public void postToWall() {
// post on user's wall.
Bundle params = new Bundle();
params.putString("name", "Check it out, I am playing FLIP game!");
params.putString("caption", "Come on FLIP with me");
params.putString("description", "FLIP!");
params.putString("picture", "http://www.rawk.com/media/images/uploaded/products/2099/flip-hkd-black-complete-skateboard.3043.full.jpg");
facebook.dialog(this, "feed",params, new DialogListener() {
#Override
public void onFacebookError(FacebookError e) {
}
#Override
public void onError(DialogError e) {
}
#Override
public void onComplete(Bundle values) {
}
#Override
public void onCancel() {
}
});
}

Categories

Resources