Unable to display the results of OCR - java

I have this mainActivity java file which I would like to display back the results of the OCR.
ERROR: Creation of directory /storage/emulated/0/TesseractSample/tessdata failed, check does Android Manifest have permission to write to external storage.
Unable to copy files to tessdata java.io.FileNotFoundException: /storage/emulated/0/TesseractSample/tessdata/eng.traineddata: open failed: ENOENT (No such file or directory)
3.Unable to decode stream: java.io.FileNotFoundException: /storage/emulated/0/TesseractSample/imgs/ocr.jpg: open failed: EACCES (Permission denied)
Data path must contain subfolder tessdata!
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Environment;
import android.os.Bundle;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import com.googlecode.tesseract.android.TessBaseAPI;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class MainActivity extends Activity {
private static final String TAG = MainActivity.class.getSimpleName();
static final int PHOTO_REQUEST_CODE = 1;
private TessBaseAPI tessBaseApi;
TextView textView;
Uri outputFileUri;
private static final String lang = "eng";
String result = "empty";
private static final String DATA_PATH = Environment.getExternalStorageDirectory().toString() + "/TesseractSample/";
private static final String TESSDATA = "tessdata";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button captureImg = (Button) findViewById(R.id.action_btn);
if (captureImg != null) {
captureImg.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
startCameraActivity();
}
});
}
textView = (TextView) findViewById(R.id.textResult);
}
/**
* to get high resolution image from camera
*/
private void startCameraActivity() {
try {
String IMGS_PATH = Environment.getExternalStorageDirectory().toString() + "/TesseractSample/imgs";
prepareDirectory(IMGS_PATH);
String img_path = IMGS_PATH + "/ocr.jpg";
outputFileUri = Uri.fromFile(new File(img_path));
final Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri);
if (takePictureIntent.resolveActivity(getPackageManager()) != null) {
startActivityForResult(takePictureIntent, PHOTO_REQUEST_CODE);
}
} catch (Exception e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onActivityResult(int requestCode, int resultCode,
Intent data) {
//making photo
if (requestCode == PHOTO_REQUEST_CODE && resultCode == Activity.RESULT_OK) {
prepareTesseract();
startOCR(outputFileUri);
} else {
Toast.makeText(this, "ERROR: Image was not obtained.", Toast.LENGTH_SHORT).show();
}
}
/**
* Prepare directory on external storage
*
* #param path
* #throws Exception
*/
private void prepareDirectory(String path) {
File dir = new File(path);
if (!dir.exists()) {
if (!dir.mkdirs()) {
Log.e(TAG, "ERROR: Creation of directory " + path + " failed, check does Android Manifest have permission to write to external storage.");
}
} else {
Log.i(TAG, "Created directory " + path);
}
}
private void prepareTesseract() {
try {
prepareDirectory(DATA_PATH + TESSDATA);
} catch (Exception e) {
e.printStackTrace();
}
copyTessDataFiles(TESSDATA);
}
/**
* Copy tessdata files (located on assets/tessdata) to destination directory
*
* #param path - name of directory with .traineddata files
*/
private void copyTessDataFiles(String path) {
try {
String fileList[] = getAssets().list(path);
for (String fileName : fileList) {
// open file within the assets folder
// if it is not already there copy it to the sdcard
String pathToDataFile = DATA_PATH + path + "/" + fileName;
if (!(new File(pathToDataFile)).exists()) {
InputStream in = getAssets().open(path + "/" + fileName);
OutputStream out = new FileOutputStream(pathToDataFile);
// Transfer bytes from in to out
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
out.close();
Log.d(TAG, "Copied " + fileName + "to tessdata");
}
}
} catch (IOException e) {
Log.e(TAG, "Unable to copy files to tessdata " + e.toString());
}
}
/**
* don't run this code in main thread - it stops UI thread. Create AsyncTask instead.
* https://developer.android.com/reference/android/os/AsyncTask.html
*
* #param imgUri
*/
private void startOCR(Uri imgUri) {
try {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4; // 1 - means max size. 4 - means maxsize/4 size. Don't use value <4, because you need more memory in the heap to store your data.
Bitmap bitmap = BitmapFactory.decodeFile(imgUri.getPath(), options);
result = extractText(bitmap);
textView.setText(result);
} catch (Exception e) {
Log.e(TAG, e.getMessage());
}
}
private String extractText(Bitmap bitmap) {
try {
tessBaseApi = new TessBaseAPI();
} catch (Exception e) {
Log.e(TAG, e.getMessage());
if (tessBaseApi == null) {
Log.e(TAG, "TessBaseAPI is null. TessFactory not returning tess object.");
}
}
tessBaseApi.init(DATA_PATH, lang);
// //EXTRA SETTINGS
// //For example if we only want to detect numbers
// tessBaseApi.setVariable(TessBaseAPI.VAR_CHAR_WHITELIST, "1234567890");
//
// //blackList Example
// tessBaseApi.setVariable(TessBaseAPI.VAR_CHAR_BLACKLIST, "!##$%^&*()_+=-qwertyuiop[]}{POIU" +
// "YTRWQasdASDfghFGHjklJKLl;L:'\"\\|~`xcvXCVbnmBNM,./<>?");
Log.d(TAG, "Training file loaded");
tessBaseApi.setImage(bitmap);
String extractedText = "empty result";
try {
extractedText = tessBaseApi.getUTF8Text();
} catch (Exception e) {
Log.e(TAG, "Error in recognizing text.");
}
tessBaseApi.end();
return extractedText;
}
}

If you're targeting Android SDK 23 or higher, you have to request permissions from the user at run time in addition to requesting permission in the Android manifest file.
Alternatively, you can target SDK 22 or lower by changing the targetSdkVersion value in your build.gradle.

Related

Custom Object detection app crashed/stops after uploading video file and converting it to images for detection (Android Studio)

Context:
I used a template of an android app from https://github.com/pramod722445/Custom_Object_Detection_App
and added java files to detect images from a video. I trained the model on Google Colab and converted it to a tflite file and renamed it as bline_model.tflite with a text folder with the label bline_label.txt in Android Studio. When I run the application, it stops/crashes after the video to image phase is done. I think the error originates from either objectDetectorClass.java or PreviewImageActivity.java. How do I fix it?
CODE
MainActivity.java
package bline.detector.com.ffmpegvideoeditor.activity;
import android.Manifest;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.ContentUris;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import android.widget.VideoView;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.github.hiteshsondhi88.libffmpeg.ExecuteBinaryResponseHandler;
import com.github.hiteshsondhi88.libffmpeg.FFmpeg;
import com.github.hiteshsondhi88.libffmpeg.LoadBinaryResponseHandler;
import com.github.hiteshsondhi88.libffmpeg.exceptions.FFmpegCommandAlreadyRunningException;
import com.github.hiteshsondhi88.libffmpeg.exceptions.FFmpegNotSupportedException;
import org.florescu.android.rangeseekbar.RangeSeekBar;
import org.opencv.android.OpenCVLoader;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import bline.detector.com.ffmpegvideoeditor.R;
public class MainActivity extends AppCompatActivity {
static {
if (!OpenCVLoader.initDebug())
Log.d("ERROR", "Unable to load OpenCV");
else
Log.d("SUCCESS", "OpenCV loaded");
}
private Button uploadVideo;
private Button extractImages;
private VideoView videoView;
private String filePath;
private static final String FILEPATH = "filepath";
private Uri selectedVideoUri;
private FFmpeg ffmpeg;
private static final int REQUEST_TAKE_GALLERY_VIDEO = 100;
private static final String TAG = "amirah";
private int choice = 0;
private ProgressDialog progressDialog;
private RangeSeekBar rangeSeekBar;
private Context mContext;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mContext = this;
uploadVideo = (Button) findViewById(R.id.uploadVideo);
extractImages = (Button) findViewById(R.id.extractImages);
videoView = (VideoView) findViewById(R.id.videoView);
rangeSeekBar = (RangeSeekBar) findViewById(R.id.rangeSeekBar);
progressDialog = new ProgressDialog(this);
progressDialog.setTitle(null);
progressDialog.setCancelable(false);
rangeSeekBar.setEnabled(false);
loadFFMpegBinary();
uploadVideo.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (Build.VERSION.SDK_INT >= 23)
getPermission();
else
uploadVideo();
}
});
extractImages.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
choice = 1;
if (selectedVideoUri != null) {
extractImagesVideo(rangeSeekBar.getSelectedMinValue().intValue() * 1000, rangeSeekBar.getSelectedMaxValue().intValue() * 1000);
} else
Toast.makeText(MainActivity.this, "Please Upload a Video", Toast.LENGTH_LONG).show();
}
});
}
private void getPermission() {
String[] params = null;
String writeExternalStorage = Manifest.permission.WRITE_EXTERNAL_STORAGE;
String readExternalStorage = Manifest.permission.READ_EXTERNAL_STORAGE;
int hasWriteExternalStoragePermission = ActivityCompat.checkSelfPermission(this, writeExternalStorage);
int hasReadExternalStoragePermission = ActivityCompat.checkSelfPermission(this, readExternalStorage);
List<String> permissions = new ArrayList<String>();
if (hasWriteExternalStoragePermission != PackageManager.PERMISSION_GRANTED)
permissions.add(writeExternalStorage);
if (hasReadExternalStoragePermission != PackageManager.PERMISSION_GRANTED)
permissions.add(readExternalStorage);
if (!permissions.isEmpty()) {
params = permissions.toArray(new String[permissions.size()]);
}
if (params != null && params.length > 0) {
ActivityCompat.requestPermissions(MainActivity.this,
params,
100);
} else
uploadVideo();
}
/**
* Handling response for permission request
*/
#Override
public void onRequestPermissionsResult(int requestCode,
String permissions[], int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case 100: {
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
uploadVideo();
}
}
break;
}
}
private void uploadVideo() {
try {
Intent intent = new Intent();
intent.setType("video/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent, "Select Video"), REQUEST_TAKE_GALLERY_VIDEO);
} catch (Exception e) {
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
if (requestCode == REQUEST_TAKE_GALLERY_VIDEO) {
selectedVideoUri = data.getData();
videoView.setVideoURI(selectedVideoUri);
videoView.start();
}
}
}
private void loadFFMpegBinary() {
try {
if (ffmpeg == null) {
ffmpeg = FFmpeg.getInstance(this);
}
ffmpeg.loadBinary(new LoadBinaryResponseHandler() {
#Override
public void onFailure() { showUnsupportedExceptionDialog();
}
#Override
public void onSuccess() {
Log.d(TAG, "ffmpeg: successfully Loaded");
}
});
} catch (FFmpegNotSupportedException e) {
showUnsupportedExceptionDialog();
} catch (Exception e) {
Log.d(TAG, "exception: " + e);
}
}
private void showUnsupportedExceptionDialog() {
new AlertDialog.Builder(MainActivity.this)
.setIcon(android.R.drawable.ic_dialog_alert)
.setTitle("Not Supported")
.setMessage("Device Not Supported")
.setCancelable(false)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
MainActivity.this.finish();
}
})
.create()
.show();
}
private void extractImagesVideo(int startMs, int endMs) {
File moviesDir = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES
);
String filePrefix = "extract_picture";
String fileExtn = ".jpg";
String yourRealPath = getPath(MainActivity.this, selectedVideoUri);
File dir = new File(moviesDir, "VideoEditor");
int fileNo = 0;
while (dir.exists()) {
fileNo++;
dir = new File(moviesDir, "VideoEditor" + fileNo);
}
dir.mkdir();
filePath = dir.getAbsolutePath();
File dest = new File(dir, filePrefix + "%03d" + fileExtn);
Log.d(TAG, "startTrim: src: " + yourRealPath);
Log.d(TAG, "startTrim: dest: " + dest.getAbsolutePath());
String[] complexCommand = {"-y", "-i", yourRealPath, "-an", "-r", "1", "-ss", "" + startMs / 1000, "-t", "" + (endMs - startMs) / 1000, dest.getAbsolutePath()};
execFFmpegBinary(complexCommand);
}
//-y overwrite output files without asking
//-i ffmpeg reads from an arbitrary number of input "files" specified by the -i option
//-an disable audio recordings
//-r 1/2 will extract one image frame from every 2 second of video
//-r 1/2 will extract one image frame from every 2 second of video.Similarly, -r 1 will extract one image frame from every second of the video.
//remove -r option if u want to extract all video frames as images from the specified time duration
//-ss seeks to position
//-t limit the duration of data read from the input file
private void execFFmpegBinary(final String[] command) {
try {
ffmpeg.execute(command, new ExecuteBinaryResponseHandler() {
#Override
public void onFailure(String message) {
Log.d(TAG, "FAILED with output: " + message);
}
#Override
public void onSuccess(String s) {
Log.d(TAG, "SUCCESS with output: " + s);
Intent intent = new Intent(MainActivity.this, PreviewImageActivity.class);
intent.putExtra(FILEPATH, filePath);
startActivity(intent);
}
#Override
public void onProgress(String s) {
Log.d(TAG, "Started command : ffmpeg " + command);
if (choice == 1)
progressDialog.setMessage("progress: " + s);
Log.d(TAG, "progress : " + s);
}
#Override
public void onStart() {
Log.d(TAG, "Started command : ffmpeg " + command);
progressDialog.setMessage("Processing...");
progressDialog.show();
}
#Override
public void onFinish() {
Log.d(TAG, "Finished command : ffmpeg " + command);
if (choice == 1) {
progressDialog.dismiss();
}
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
// do nothing for now
}
}
public static boolean deleteDir(File dir) {
if (dir.isDirectory()) {
String[] children = dir.list();
if (children != null) {
for (int i = 0; i < children.length; i++) {
boolean success = deleteDir(new File(dir, children[i]));
if (!success) {
return false;
}
}
}
}
return dir.delete();
}
private String getPath(final Context context, final Uri uri) {
final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
// DocumentProvider
if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
// ExternalStorageProvider
if (isExternalStorageDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
if ("primary".equalsIgnoreCase(type)) {
return Environment.getExternalStorageDirectory() + "/" + split[1];
}
}
// DownloadsProvider
else if (isDownloadsDocument(uri)) {
final String id = DocumentsContract.getDocumentId(uri);
final Uri contentUri = ContentUris.withAppendedId(
Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
return getDataColumn(context, contentUri, null, null);
}
// MediaProvider
else if (isMediaDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
Uri contentUri = null;
if ("image".equals(type)) {
contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
} else if ("video".equals(type)) {
contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
}
final String selection = "_id=?";
final String[] selectionArgs = new String[]{
split[1]
};
return getDataColumn(context, contentUri, selection, selectionArgs);
}
}
// MediaStore (and general)
else if ("content".equalsIgnoreCase(uri.getScheme())) {
return getDataColumn(context, uri, null, null);
}
// File
else if ("file".equalsIgnoreCase(uri.getScheme())) {
return uri.getPath();
}
return null;
}
/**
* Get the value of the data column for this Uri.
*/
private String getDataColumn(Context context, Uri uri, String selection,
String[] selectionArgs) {
Cursor cursor = null;
final String column = "_data";
final String[] projection = {
column
};
try {
cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,
null);
if (cursor != null && cursor.moveToFirst()) {
final int column_index = cursor.getColumnIndexOrThrow(column);
return cursor.getString(column_index);
}
} finally {
if (cursor != null)
cursor.close();
}
return null;
}
private boolean isMediaDocument(Uri uri) {
return "com.android.providers.media.documents".equals(uri.getAuthority());
}
private boolean isDownloadsDocument(Uri uri) {
return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}
private boolean isExternalStorageDocument(Uri uri) {
return "com.android.externalstorage.documents".equals(uri.getAuthority());
}
}
objectDetectorClass.java
package bline.detector.com.ffmpegvideoeditor.activity;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import org.tensorflow.lite.Interpreter;
import org.tensorflow.lite.gpu.GpuDelegate;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public class objectDetectorClass {
// used to load model and predict
private Interpreter interpreter;
// store all label in array
private List<String> labelList;
private int INPUT_SIZE;
private int PIXEL_SIZE=3; // for RGB
private int IMAGE_MEAN=0;
private float IMAGE_STD=255.0f;
// use to initialize gpu in app
private GpuDelegate gpuDelegate;
private int height=0;
private int width=0;
objectDetectorClass(AssetManager assetManager,String modelPath, String labelPath,int inputSize) throws IOException{
INPUT_SIZE=inputSize;
// use to define gpu or cpu // no. of threads
Interpreter.Options options=new Interpreter.Options();
gpuDelegate=new GpuDelegate();
options.addDelegate(gpuDelegate);
options.setNumThreads(4); // set it according to your phone
// loading model
interpreter=new Interpreter(loadModelFile(assetManager,modelPath),options);
// load labelmap
labelList=loadLabelList(assetManager,labelPath);
}
private List<String> loadLabelList(AssetManager assetManager, String labelPath) throws IOException {
// to store label
List<String> labelList=new ArrayList<>();
// create a new reader
BufferedReader reader=new BufferedReader(new InputStreamReader(assetManager.open(labelPath)));
String line;
// loop through each line and store it to labelList
while ((line=reader.readLine())!=null){
labelList.add(line);
}
reader.close();
return labelList;
}
private ByteBuffer loadModelFile(AssetManager assetManager, String modelPath) throws IOException {
// use to get description of file
AssetFileDescriptor fileDescriptor=assetManager.openFd(modelPath);
FileInputStream inputStream=new FileInputStream(fileDescriptor.getFileDescriptor());
FileChannel fileChannel=inputStream.getChannel();
long startOffset =fileDescriptor.getStartOffset();
long declaredLength=fileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY,startOffset,declaredLength);
}
public Mat recognizeVideo(Mat mat_image){
Bitmap bitmap=null;
bitmap=Bitmap.createBitmap(mat_image.cols(),mat_image.rows(),Bitmap.Config.ARGB_8888);
Utils.matToBitmap(mat_image,bitmap);
height=bitmap.getHeight();
width=bitmap.getWidth();
// scale the bitmap to input size of model
Bitmap scaledBitmap=Bitmap.createScaledBitmap(bitmap,INPUT_SIZE,INPUT_SIZE,false);
// convert bitmap to bytebuffer as model input should be in it
ByteBuffer byteBuffer=convertBitmapToByteBuffer(scaledBitmap);
// defining output
Object[] input=new Object[1];
input[0]=byteBuffer;
Map<Integer,Object> output_map=new TreeMap<>();
// create treemap of three array (boxes,score,classes)
float[][][]boxes =new float[1][10][4];
// 10: top 10 objects detected
// 4: coordinates in image
float[][] scores=new float[1][10];
// stores scores of 10 object
float[][] classes=new float[1][10];
// stores class of object
// add it to object_map;
output_map.put(0,boxes);
output_map.put(1,classes);
output_map.put(2,scores);
// now predict
interpreter.runForMultipleInputsOutputs(input,output_map);
Object value=output_map.get(0);
Object Object_class=output_map.get(1);
Object score=output_map.get(2);
// loop through each object
// as output has only 10 boxes
for (int i=0;i<10;i++){
float class_value=(float) Array.get(Array.get(Object_class,0),i);
float score_value=(float) Array.get(Array.get(score,0),i);
// define threshold for score
// Here you can change threshold according to your model
if(score_value>0.8){
Object box1=Array.get(Array.get(value,0),i);
// multiply it with Original height and width of frame
float top=(float) Array.get(box1,0)*height;
float left=(float) Array.get(box1,1)*width;
float bottom=(float) Array.get(box1,2)*height;
float right=(float) Array.get(box1,3)*width;
// draw rectangle in Original frame // starting point //ending point of box //color of box thickness
Imgproc.rectangle(mat_image,new Point(left,top),new Point(right,bottom),new Scalar(0, 255, 0, 255),2);
// write text on frame
// string of class name of object // starting point // color of text // size of text
Imgproc.putText(mat_image,labelList.get((int) class_value),new Point(left,top),3,1,new Scalar(255, 0, 0, 255),2);
}
}
return mat_image;
}
private ByteBuffer convertBitmapToByteBuffer(Bitmap bitmap) {
ByteBuffer byteBuffer;
int quant=1;
int size_images=INPUT_SIZE;
if(quant==0){
byteBuffer=ByteBuffer.allocateDirect(1*size_images*size_images*3);
}
else {
byteBuffer=ByteBuffer.allocateDirect(4*1*size_images*size_images*3);
}
byteBuffer.order(ByteOrder.nativeOrder());
int[] intValues=new int[size_images*size_images];
bitmap.getPixels(intValues,0,bitmap.getWidth(),0,0,bitmap.getWidth(),bitmap.getHeight());
int pixel=0;
for (int i=0;i<size_images;++i){
for (int j=0;j<size_images;++j){
final int val=intValues[pixel++];
if(quant==0){
byteBuffer.put((byte) ((val>>16)&0xFF));
byteBuffer.put((byte) ((val>>8)&0xFF));
byteBuffer.put((byte) (val&0xFF));
}
else {
byteBuffer.putFloat((((val >> 16) & 0xFF))/255.0f);
byteBuffer.putFloat((((val >> 8) & 0xFF))/255.0f);
byteBuffer.putFloat((((val) & 0xFF))/255.0f);
}
}
}
return byteBuffer;
}
}
PreviewImageActivity.java
package bline.detector.com.ffmpegvideoeditor.activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageSwitcher;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ViewSwitcher;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import org.opencv.android.Utils;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import bline.detector.com.ffmpegvideoeditor.R;
public class PreviewImageActivity extends AppCompatActivity {
private static final String FILEPATH = "filepath";
private objectDetectorClass objectDetectorClass;
private ImageSwitcher imageSwitcher;
private Button prev;
private Button next;
int placement = 0;
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_preview_image);
TextView tvInstruction = (TextView) findViewById(R.id.tvInstruction);
imageSwitcher = (ImageSwitcher) findViewById(R.id.imageSwitcher);
prev = findViewById(R.id.prev);
next = findViewById(R.id.next);
ArrayList<Bitmap> bitmap = new ArrayList<Bitmap>();
ArrayList<Bitmap> bitmaps = new ArrayList<Bitmap>();
ArrayList<Drawable> drawables = new ArrayList<Drawable>();
try {
objectDetectorClass = new objectDetectorClass(getAssets(), "bline_model.tflite", "bline_label.txt", 300);
Log.d("PreviewImageActivity", "Model is successfully loaded");
} catch (IOException e) {
Log.d("PreviewImageActivity", "Getting some error");
e.printStackTrace();
}
imageSwitcher.setFactory(new ViewSwitcher.ViewFactory() {
#Override
public View makeView() {
ImageView imageView = new ImageView(getApplicationContext());
imageView.setScaleType(ImageView.ScaleType.FIT_CENTER);
return imageView;
}
});
prev.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
if (placement > 0) {
placement--;
imageSwitcher.setImageDrawable(drawables.get(placement));
} else {
Toast.makeText(PreviewImageActivity.this, "No Previous Images", Toast.LENGTH_SHORT).show();
}
}
});
next.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
if (placement < drawables.size() - 1) {
placement++;
imageSwitcher.setImageDrawable(drawables.get(placement));
} else {
Toast.makeText(PreviewImageActivity.this, "No More Images", Toast.LENGTH_SHORT).show();
}
}
});
String filePath = getIntent().getStringExtra(FILEPATH);
ArrayList<String> f = new ArrayList<String>();
File dir = new File(filePath);
tvInstruction.setText("Images stored at path " + filePath);
File[] listFile;
listFile = dir.listFiles();
for (File e : listFile) {
f.add(e.getAbsolutePath());
}
if (f != null) {
Log.d("PreviewImageAdapter", "filepath: " + f);
for (int position = 0; position < f.size(); position++) {
Bitmap bmp = BitmapFactory.decodeFile(f.get(position));
bitmap.add(bmp);
Log.d("PreviewImageAdapter", "bitmap: " + bitmap);
if (bitmap.size() == f.size()) {
for (int m = 0; m < bitmap.size(); m++) {
Mat selected_image = new Mat(bitmap.get(m).getHeight(), bitmap.get(m).getWidth(), CvType.CV_8UC4);
Utils.bitmapToMat(bitmap.get(m), selected_image);
selected_image = objectDetectorClass.recognizeVideo(selected_image);
Bitmap bitmap1 = Bitmap.createBitmap(selected_image.cols(), selected_image.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(selected_image, bitmap1);
bitmaps.add(bitmap1);
Log.d("PreviewImageAdapter", "detected images: " + bitmaps);
}
if (bitmaps.size() == bitmap.size()) {
for (int n = 0; n < bitmaps.size(); n++) {
Drawable drawable = new BitmapDrawable(bitmaps.get(n));
drawables.add(drawable);
Log.d("PreviewImageAdapter", "drawables: " + drawables);
}
}
imageSwitcher.setImageDrawable(drawables.get(0));
placement = 0;
}
}
}
}
}
I am also new to java and am sorry if my question is silly.

Download image and display from local hidden folder in Android

I am trying to download image from internet and store it locally in some hidden folder where the images are not visible to the user in his/her gallery.
Here is my code to do so. The image is getting displayed only when the device is connected to internet. In other words the image is not getting saved in the device and it is throwing an exception.
Here is my ImageStorage class:
package com.example.adhish.downloadretriveimage;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Environment;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
/**
* Created by appy-20 on 6/1/17.
*/
public class ImageStorage {
public static String saveToSdCard(Bitmap bitmap, String filename) {
String stored = null;
File sdcard = Environment.getExternalStorageDirectory() ;
File folder = new File(sdcard.getAbsoluteFile(), ".test_directory");//the dot makes this directory hidden to the user
folder.mkdir();
File file = new File(folder.getAbsoluteFile(), filename + ".jpg") ;
if (file.exists())
return stored ;
try {
FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, out);
out.flush();
out.close();
stored = "success";
} catch (Exception e) {
e.printStackTrace();
}
return stored;
}
public static File getImage(String imagename) {
File mediaImage = null;
try {
String root = Environment.getExternalStorageDirectory().toString();
File myDir = new File(root);
if (!myDir.exists())
return null;
mediaImage = new File(myDir.getPath() + "/.test_directory/"+imagename);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return mediaImage;
}
public static boolean checkifImageExists(String imagename)
{
Bitmap b = null ;
File file = ImageStorage.getImage("/"+imagename+".jpg");
String path = file.getAbsolutePath();
if (path != null)
b = BitmapFactory.decodeFile(path);
if(b == null || b.equals(""))
{
return false ;
}
return true ;
}
}
and here is my MainActivity.java:
package com.example.adhish.downloadretriveimage;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.ImageView;
import java.io.File;
import java.io.FileOutputStream;
import java.net.URL;
import java.net.URLConnection;
public class MainActivity extends AppCompatActivity {
ImageView imageView;
Bitmap b;
String imagename;
String imgurl;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
imageView=(ImageView)findViewById(R.id.imageView);
imagename="394968_538b_7";
imgurl="https://udemy-images.udemy.com/course/750x422/394968_538b_7.jpg";
if(ImageStorage.checkifImageExists(imagename))
{
File file = ImageStorage.getImage("/"+imagename+".jpg");
// File file = ImageStorage.getImage("https://udemy-images.udemy.com/course/750x422/394968_538b_7.jpg");
String path = file.getAbsolutePath();
if (path != null){
b = BitmapFactory.decodeFile(path);
imageView.setImageBitmap(b);
}
} else {
new GetImages(imgurl, imageView, imagename).execute() ;
}
}
private class GetImages extends AsyncTask<Object, Object, Object> {
private String requestUrl, imagename_;
private ImageView view;
private Bitmap bitmap;
private FileOutputStream fos;
private GetImages(String requestUrl, ImageView view, String _imagename_) {
this.requestUrl = requestUrl;
this.view = view;
this.imagename_ = _imagename_;
}
#Override
protected Object doInBackground(Object... objects) {
try {
URL url = new URL(requestUrl);
URLConnection conn = url.openConnection();
bitmap = BitmapFactory.decodeStream(conn.getInputStream());
} catch (Exception ex) {
}
return null;
}
#Override
protected void onPostExecute(Object o) {
if (!ImageStorage.checkifImageExists(imagename_)) {
view.setImageBitmap(bitmap);
ImageStorage.saveToSdCard(bitmap, imagename_);
}
}
}
}
I have already given the external storage read write permission in my Manifest.
I found the solution to this problem. I am storing the images in SQLite database and here is the code for it:
https://github.com/adhishlal/URL_to_DB_Image

Android SD card writing not working

So I have looked up several ways to write to an SD card in Android but none seem to actually result in something being written. I have already set the permission in the manifest. My FileIO class I wrote:
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
//import android.app.Activity;
//import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
public class FileIO {
private static final String TAG = "FileIO";
private String filename;
private File sdCard;
private File dir;
public FileIO(String path, String filename){
Log.d(TAG, "Creating new File IO");
this.filename = filename;
boolean mExternalStorageAvailable = false;
boolean mExternalStorageWriteable = false;
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
// We can read and write the media
Log.d(TAG, "Read and Write OK");
} else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) {
// We can only read the media
Log.w(TAG, "Read only OK");
} else {
// Something else is wrong. It may be one of many other states, but all we need
// to know is we can neither read nor write
Log.w(TAG, "Read and Write BLOCKED");
}
sdCard = Environment.getExternalStorageDirectory();
Log.d(TAG, "Writing to file: " + sdCard.getAbsolutePath()+path);
dir = new File (sdCard.getAbsolutePath()+path);
if (dir.mkdirs() || dir.isDirectory()) {
Log.d(TAG, "SUCCESS - Created directory");
} else {
Log.d(TAG, "FAILED - Create directory");
}
}
public void writeToFile(String s){
File file = new File(dir,this.filename);
try {
FileOutputStream f = new FileOutputStream(file,true); //True = Append to file, false = Overwrite
OutputStreamWriter osw = new OutputStreamWriter(f);
osw.write(s);
osw.flush();
f.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
System.out.printf("\nFile not found. Make sure to add WRITE_EXTERNAL_STORAGE permission to the manifest");
} catch (IOException e) {
e.printStackTrace();
}
}
}
And then this is class it is being used in.
import android.app.Activity;
import android.text.format.DateFormat;
import android.widget.TextView;
public class AudioClipLogWrapper implements AudioClipListener
{
private TextView log;
private Activity context;
private double previousFrequency = -1;
private int previousVolume = -1;
private int previousMax = -1;
private FileIO fileIO;
public AudioClipLogWrapper(TextView log, Activity context)
{
this.log = log;
this.context = context;
String dateStamp = (DateFormat.format("dd-MM-yyyy-hh-mm-ss", new java.util.Date()).toString());
fileIO = new FileIO("/Android/data/com.uni.Lab7/files/",String.format("audio-%s.txt",dateStamp));
}
#Override
public boolean heard(short[] audioData, int sampleRate)
{
final double freq = ZeroCrossing.calculate(sampleRate, audioData);
final int maxAmplitude = AudioUtil.getMaxValue(audioData);
final double volume = AudioUtil.rootMeanSquared(audioData);
final StringBuilder message = new StringBuilder();
if ((((int)volume) > (4 * previousVolume)) && (maxAmplitude > (4 * previousMax)) ) {
message.append(" Clap!");
}
previousVolume = (int) volume;
previousMax= (int) maxAmplitude;
fileIO.writeToFile(String.format("%d, %d, %f\n",(int)volume, maxAmplitude, freq));
context.runOnUiThread(new Runnable()
{
#Override
public void run()
{
AudioTaskUtil.appendToStartOfLog(log, message.toString());
}
});
return false;
}
}
When I look at the log it suggests everything should work:
01-12 23:21:55.815 2752-2752/com.uni.Lab7 D/FileIO﹕ Creating new File IO
01-12 23:21:55.825 2752-2752/com.uni.Lab7 D/FileIO﹕ Read and Write OK
01-12 23:21:55.825 2752-2752/com.uni.Lab7 D/FileIO﹕ Writing to file: /storage/sdcard0/Android/data/com.uni.Lab7/files/
01-12 23:21:55.835 2752-2752/com.uni.Lab7 D/FileIO﹕ SUCCESS - Created directory
But when I take the SD card out of the phone, and I look on my computer there is no folder and no file. I have looked at Android write to sd card folder and Write a file in SDcard in Android and I have what they suggested.
The phone is a Samsung GT-S53110B running 4.1.2. So in my gradle and manifest I have minimum SDK as 8 and target as 16.

Reduce Image size to KB's before uploading ANDROID, from following code

I wanted to reduce the image size to kb's, before uploading to server, basically what my code does it, capture a image store it locally, then upload it to the server.
just wanted to reduce image size while it stores locally on the External Storage. Found a good answer here
But the problem is, I really don't know how to add up the code in the link with my code below.
New to android, please help!
package com.project.camera;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
public class MainActivity extends Activity {
// LogCat tag
private static final String TAG = MainActivity.class.getSimpleName();
// Camera activity request codes
private static final int CAMERA_CAPTURE_IMAGE_REQUEST_CODE = 100;
public static final int MEDIA_TYPE_IMAGE = 1;
private Uri fileUri; // file url to store image
private Button btnCapturePicture;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
btnCapturePicture = (Button) findViewById(R.id.btnCapturePicture);
/**
* Capture image button click event
*/
btnCapturePicture.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// capture picture
captureImage();
}
});
// Checking camera availability
if (!isDeviceSupportCamera()) {
Toast.makeText(getApplicationContext(),
"Sorry! Your device doesn't support camera",
Toast.LENGTH_LONG).show();
// will close the app if the device does't have camera
finish();
}
}
/**
* Checking device has camera hardware or not
* */
private boolean isDeviceSupportCamera() {
if (getApplicationContext().getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
/**
* Launching camera app to capture image
*/
private void captureImage() {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
fileUri = getOutputMediaFileUri(MEDIA_TYPE_IMAGE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
// start the image capture Intent
startActivityForResult(intent, CAMERA_CAPTURE_IMAGE_REQUEST_CODE);
}
/**
* Here we store the file url as it will be null after returning from camera
* app
*/
#Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// save file url in bundle as it will be null on screen orientation
// changes
outState.putParcelable("file_uri", fileUri);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
// get the file url
fileUri = savedInstanceState.getParcelable("file_uri");
}
/**
* Receiving activity result method will be called after closing the camera
* */
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// if the result is capturing Image
if (requestCode == CAMERA_CAPTURE_IMAGE_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
// successfully captured the image
// launching upload activity
launchUploadActivity(true);
} else if (resultCode == RESULT_CANCELED) {
// user cancelled Image capture
Toast.makeText(getApplicationContext(),
"User cancelled image capture", Toast.LENGTH_SHORT)
.show();
} else {
// failed to capture image
Toast.makeText(getApplicationContext(),
"Sorry! Failed to capture image", Toast.LENGTH_SHORT)
.show();
}
}
}
private void launchUploadActivity(boolean isImage){
Intent i = new Intent(MainActivity.this, UploadActivity.class);
i.putExtra("filePath", fileUri.getPath());
i.putExtra("isImage", isImage);
startActivity(i);
}
/**
* ------------ Helper Methods ----------------------
* */
/**
* Creating file uri to store image
*/
public Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/**
* returning image
*/
private static File getOutputMediaFile(int type) {
// External sdcard location
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
Config.IMAGE_DIRECTORY_NAME);
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d(TAG, "Oops! Failed create "
+ Config.IMAGE_DIRECTORY_NAME + " directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "Selfie_" + timeStamp + ".jpg");
} else {
return null;
}
return mediaFile;
}
}
Use this, (I edited onPictureTaken() function to fit your needs)
Will return false if the input path does not exist and no error occured on compression.
public boolean resizeImage(String originalFilePath, String compressedFilePath) {
InputStream in = null;
try {
in = new FileInputStream(originalFilePath);
} catch (FileNotFoundException e) {
Log.e("TAG","originalFilePath is not valid", e);
}
if (in == null) {
return false;
}
BitmapFactory.Options options = new BitmapFactory.Options();
Bitmap preview_bitmap = BitmapFactory.decodeStream(in, null, options);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
preview_bitmap.compress(Bitmap.CompressFormat.JPEG, 60, stream);
byte[] byteArray = stream.toByteArray();
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(compressedFilePath);
outStream.write(byteArray);
outStream.close();
} catch (Exception e) {
Log.e("TAG","could not save", e);
}
return true;
}

Unable to start activity ComponentInfo{com.naviiid.soundshot/com.naviiid.soundshot.MicroFilm}: java.lang.NullPointerException

In my main activity with a single button click the below activity launches for taking an image with a default camera application in android, and then recording a voice.
However whenever I click the button in my main activity i get this error:
java.lang.RuntimeException: Unable to start activity
ComponentInfo{com.naviiid.soundshot/com.naviiid.soundshot.MicroFilm}:
java.lang.NullPointerException
There's a point that camera application launches but before that my application has stopped!
Here's the code (excuse me that the code is a little long):
package com.naviiid.soundshot;
import java.io.File
import java.util.Calendar;
import android.R.drawable;
import android.app.Activity;
import android.content.Intent;
import android.graphics.drawable.Drawable;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageView;
import android.widget.Toast;
public class MicroFilm extends Activity {
private static final int CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE = 100;
private Uri imageUri;
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
private static File mediaStorageDir;
private ImageView image;
private ImageView record;
private ImageView save;
private ImageView capture;
private boolean visibleButtons = true;
private MediaRecorder soundRecorder;
private String mediaFilePath;
private static Calendar cal;
boolean recording = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_micro_film);
String mediaState = Environment.getExternalStorageState();
if (!mediaState.equals(Environment.MEDIA_MOUNTED)){
Toast.makeText(MicroFilm.this, "Media unmounted! \n Mount media firts..", Toast.LENGTH_LONG).show();
Log.d("StorageState", "Media Unmounted");
finish();
}
mediaStorageDir = new File(Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM)
.getAbsolutePath()
+ "/MicroFilm");
takePicture(); // this method will capture image from camera intent and
// returns to main activity if user cancels
// View things
image = (ImageView) findViewById(R.id.imageViewPicture);
record = (ImageView) findViewById(R.id.imageViewMic);
save = (ImageView) findViewById(R.id.imageViewSave);
capture = (ImageView) findViewById(R.id.imageViewCapture);
setBackgroundImage(); // shows taken picture
captureSound(); // prepares for recording, recording starts with imageButton click
}
private void captureSound() {
// prepare things to record
soundRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
soundRecorder.setOutputFormat(MediaRecorder.OutputFormat.AMR_NB);
soundRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
File soundFile = getNewSoundFile();
soundRecorder.setOutputFile(soundFile.getAbsolutePath());
try{
soundRecorder.prepare();
}catch (Exception e) {
Log.d("SoundRecorder", "unable to prepare");
Toast.makeText(MicroFilm.this, "Mic in use", Toast.LENGTH_SHORT).show();
return;
}
//listeners
soundRecorderListeners();
}
private void soundRecorderListeners() {
record.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View arg0) {
if (recording){
soundRecorder.stop();
soundRecorder.release();
recording = false;
record.setBackgroundResource(drawable.ic_btn_speak_now);
}else{
soundRecorder.start();
recording = true;
record.setBackgroundResource(drawable.presence_audio_busy);
}
}
});
}
private File getNewSoundFile() {
// Create a media file name
String timeStamp;
cal = Calendar.getInstance();
timeStamp = "" + cal.get(Calendar.YEAR) + cal.get(Calendar.MONTH) + 1
+ cal.get(Calendar.DAY_OF_MONTH) + "_"
+ cal.get(Calendar.HOUR_OF_DAY) + cal.get(Calendar.MINUTE)
+ cal.get(Calendar.SECOND);
mediaFilePath = mediaStorageDir.getAbsolutePath() + "/AUD_" + timeStamp
+ ".amr";
return new File(mediaFilePath);
}
private void setBackgroundImage() {
// shows taken picture in background
image.setImageDrawable(Drawable.createFromPath(imageUri.getPath()));
image.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View arg0) {
if (visibleButtons) {
record.setVisibility(View.INVISIBLE);
save.setVisibility(View.INVISIBLE);
capture.setVisibility(View.INVISIBLE);
visibleButtons = false;
} else {
record.setVisibility(View.VISIBLE);
save.setVisibility(View.VISIBLE);
capture.setVisibility(View.VISIBLE);
visibleButtons = true;
}
}
});
}
private void takePicture() {
Intent camera = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
imageUri = getOutputMediaFileUri(MEDIA_TYPE_IMAGE); // create a file to
// save the image
camera.putExtra(MediaStore.EXTRA_OUTPUT, imageUri); // set the image file
// name
// start the image captugetOutputMediaFileUrire Intent
startActivityForResult(camera, CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE);
}
/** Create a file Uri for saving an image or video */
private static Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/** Create a File for saving an image or video */
private static File getOutputMediaFile(int type) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
// Environment
// .getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
// "MyCameraApp");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MicroFilm", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp;
cal = Calendar.getInstance();
timeStamp = "" + cal.get(Calendar.YEAR) + cal.get(Calendar.MONTH) + 1
+ cal.get(Calendar.DAY_OF_MONTH) + "_"
+ cal.get(Calendar.HOUR_OF_DAY) + cal.get(Calendar.MINUTE)
+ cal.get(Calendar.SECOND);
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
// Image captured and saved to fileUri specified in the Intent
Toast.makeText(MicroFilm.this, "Image saved", Toast.LENGTH_LONG).show();
} else if (resultCode == RESULT_CANCELED) {
finish();
} else {
// Image capture failed, advise user
}
}
}
}
thanks for your help :)
I think that your NullPointerException is thrown in the captureSound() method.
You have to initialize a value to the variable soundRecorder before starting to call methods on this object.
I would suggest you assign some initial values for ALL your instance variables.
NULLPOINTEREXCEPTION occurs when you pass as reference a null object when you call a method.
It is highly likely due to the fact that you did not initialize those variables.
For example,
private Uri imageUri;
to something like
private Uri imageUri = new Uri();
(this is just an example, and maybe not a proper initialization of Uri class for your intended use)
Just Write soundRecorder=new MediaRecorder soundRecorder(); after setContentView(R.layout.activity_micro_film); .

Categories

Resources