hi i'm making a photograph effect app.
So,i loaded bitmap from camera(i saved orginal image and then loaded) to glsurfaceview and applied some effects but i couldn't find a way of saving changed image as image file *.png or *.jpg.
i looked almost everywhere ,but they aren't usable for my app.. always force close when i try save.
Here is my code.
i found some save codes but i couldn't get work.
main.xml
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical" >
<android.opengl.GLSurfaceView
android:id="#+id/effectsview"
android:layout_width="fill_parent"
android:layout_height="0dp"
android:layout_weight="0.05" />
<GridLayout
android:layout_width="match_parent"
android:layout_height="wrap_content" >
</GridLayout>
<GridLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:columnCount="1" >
<Button
android:id="#+id/button1"
android:layout_width="159dp"
android:layout_height="wrap_content"
android:layout_column="0"
android:layout_row="0"
android:onClick="saverenderedimage"
android:layout_gravity="left|top"
android:text="Save Image" />
<Button
android:id="#+id/Button01"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_gravity="left|top"
android:onClick="gomain"
android:text="Go Main Menu without saving" />
<Button
android:id="#+id/button2"
android:layout_width="156dp"
android:layout_column="0"
android:layout_gravity="right|top"
android:layout_row="0"
android:onClick="sharedialog"
android:text="Share" />
</GridLayout>
</LinearLayout>
Effect selector and applier .java
public class Effects_selevtor extends Activity implements GLSurfaceView.Renderer {
i declared some strings ints (deleted)
String imagefilepath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/filename.jpg";
int mCurrentEffect;
public void setCurrentEffect(int effect) {
mCurrentEffect = effect;
}
public void onCreate(Bundle savedInstanceState) {
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_effects_selevtor);
mEffectView = (GLSurfaceView) findViewById(R.id.effectsview);
mEffectView.setEGLContextClientVersion(2);
mEffectView.setRenderer(this);
mEffectView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
mCurrentEffect = R.id.none;
Uri imageFileUri = Uri.parse("file:///sdcard/filename.jpg");
Intent cameraIntent = new Intent(
android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
cameraIntent.putExtra(android.provider.MediaStore.EXTRA_OUTPUT, imageFileUri);
startActivityForResult(cameraIntent, 2);
}
public void gomain(View View) {
startActivity(new Intent(Effects_selevtor.this,HelloEffects.class));
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == 2) {
try {
if (bitmap != null) {
bitmap.recycle();
}
GLES20.glGenTextures(2, mTextures, 0);
// Load input bitmap
Bitmap bitmap = BitmapFactory.decodeFile(imagefilepath);
mImageWidth = bitmap.getWidth();
mImageHeight = bitmap.getHeight();
mTexRenderer.updateTextureSize(mImageWidth, mImageHeight);
// Upload to texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Set texture parameters
GLToolbox.initTexParams();
Toast.makeText(getApplicationContext(), "Touch your phone's Menu button to select effects ", Toast.LENGTH_SHORT).show();
} catch (Exception e) {
e.printStackTrace();
}
}
}
public static Bitmap SavePixels(int x, int y, int w, int h, GL10 gl)
{
int b[]=new int[w*(y+h)];
int bt[]=new int[w*h];
IntBuffer ib=IntBuffer.wrap(b);
ib.position(0);
gl.glReadPixels(x, 0, w, y+h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, ib);
for(int i=0, k=0; i<h; i++, k++)
{//remember, that OpenGL bitmap is incompatible with Android bitmap
//and so, some correction need.
for(int j=0; j<w; j++)
{
int pix=b[i*w+j];
int pb=(pix>>16)&0xff;
int pr=(pix<<16)&0x00ff0000;
int pix1=(pix&0xff00ff00) | pr | pb;
bt[(h-k-1)*w+j]=pix1;
}
}
Bitmap sb=Bitmap.createBitmap(bt, w, h, Bitmap.Config.ARGB_8888);
return sb;
}
public static void SavePNG(int x, int y, int w, int h, String name, GL10 gl)
{
Bitmap bmp=SavePixels(x,y,w,h,gl);
try
{
FileOutputStream fos=new FileOutputStream("/sdcard/CamWay/"+name);
bmp.compress(CompressFormat.PNG, 100, fos);
try
{
fos.flush();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
try
{
fos.close();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
catch (FileNotFoundException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void saverenderedimage(View view) {
//i tried save but it not worked i don't understand what should i declare for "gl"
SavePNG(0, 0,mEffectView.getWidth() , mEffectView.getHeight(), "CamWay.png", gl);
// SavePNG(0, 0,mEffectView.getWidth() , mEffectView.getHeight(), imagefilepath, gl);
startActivity(new Intent(Effects_selevtor.this,HelloEffects.class));
}
public void OnClickselector(View arg0) {
startActivity(new Intent(Effects_selevtor.this,HelloEffects.class));
}
private void loadTextures() {
// Generate textures
GLES20.glGenTextures(2, mTextures, 0);
// Load input bitmap
Bitmap bitmap = BitmapFactory.decodeFile(imagefilepath);
// Load input bitmap
mImageWidth = bitmap.getWidth();
mImageHeight = bitmap.getHeight();
mTexRenderer.updateTextureSize(mImageWidth, mImageHeight);
// Upload to texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Set texture parameters
GLToolbox.initTexParams();
}
private void initEffect() {
EffectFactory effectFactory = mEffectContext.getFactory();
if (mEffect != null) {
mEffect.release();
}
/**
* Initialize the correct effect based on the selected menu/action item
*/
switch (mCurrentEffect) {
case R.id.none:
break;
case R.id.vignette:
mEffect = effectFactory.createEffect(
EffectFactory.EFFECT_VIGNETTE);
mEffect.setParameter("scale", .5f);
break;
//and a lot effect more i deleted for readability
default:
break;
}
}
private void applyEffect() {
mEffect.apply(mTextures[0], mImageWidth, mImageHeight, mTextures[1]);
}
private void renderResult() {
if (mCurrentEffect != R.id.none) {
// if no effect is chosen, just render the original bitmap
mTexRenderer.renderTexture(mTextures[1]);
}
else {
// render the result of applyEffect()
mTexRenderer.renderTexture(mTextures[0]);
}
}
#Override
public void onDrawFrame(GL10 gl) {
if (!mInitialized) {
//Only need to do this once
mEffectContext = EffectContext.createWithCurrentGlContext();
mTexRenderer.init();
loadTextures();
mInitialized = true;
}
if (mCurrentEffect != R.id.none) {
//if an effect is chosen initialize it and apply it to the texture
initEffect();
applyEffect();
}
renderResult();
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
if (mTexRenderer != null) {
mTexRenderer.updateViewSize(width, height);
}
}
I achieved saving the GLSurfaceView as PNG by cross-compiling libpng and using it through JNI.
Related
I'm posting the example and it makes a photo and let you see the preview but i have a problem: The issue is that if you try this code you'll see the preview but it is not oriented correctly please is there someone who can solve this?
public class MainActivity extends ActionBarActivity {
private ImageSurfaceView mImageSurfaceView;
private Camera camera;
private FrameLayout cameraPreviewLayout;
private ImageView capturedImageHolder;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
cameraPreviewLayout = (FrameLayout)findViewById(R.id.camera_preview);
capturedImageHolder = (ImageView)findViewById(R.id.captured_image);
camera = checkDeviceCamera();
mImageSurfaceView = new ImageSurfaceView(MainActivity.this, camera);
cameraPreviewLayout.addView(mImageSurfaceView);
Button captureButton = (Button)findViewById(R.id.button);
captureButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
camera.takePicture(null, null, pictureCallback);
}
});
}
private Camera checkDeviceCamera(){
Camera mCamera = null;
try {
mCamera = Camera.open();
} catch (Exception e) {
e.printStackTrace();
}
return mCamera;
}
PictureCallback pictureCallback = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
if(bitmap==null){
Toast.makeText(MainActivity.this, "Captured image is empty", Toast.LENGTH_LONG).show();
return;
}
capturedImageHolder.setImageBitmap(scaleDownBitmapImage(bitmap, 300, 200 ));
}
};
private Bitmap scaleDownBitmapImage(Bitmap bitmap, int newWidth, int newHeight){
Bitmap resizedBitmap = Bitmap.createScaledBitmap(bitmap, newWidth, newHeight, true);
return resizedBitmap;
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
ImageSurfaceView Class
public class ImageSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
private Camera camera;
private SurfaceHolder surfaceHolder;
public ImageSurfaceView(Context context, Camera camera) {
super(context);
this.camera = camera;
this.surfaceHolder = getHolder();
this.surfaceHolder.addCallback(this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
this.camera.setPreviewDisplay(holder);
this.camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
this.camera.stopPreview();
this.camera.release();
}
}
XML
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingLeft="#dimen/activity_horizontal_margin"
android:paddingRight="#dimen/activity_horizontal_margin"
android:paddingTop="#dimen/activity_vertical_margin"
android:paddingBottom="#dimen/activity_vertical_margin"
tools:context=".MainActivity"
android:baselineAligned="false">
<FrameLayout
android:id="#+id/camera_preview"
android:layout_width="match_parent"
android:layout_height="300dp"/>
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="#string/capture_button"
android:id="#+id/button"
android:layout_below="#+id/camera_preview"
android:layout_centerHorizontal="true"
android:layout_marginTop="15dp" />
<ImageView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:id="#+id/captured_image"
android:layout_below="#+id/button"
android:layout_alignParentLeft="true"
android:layout_alignParentStart="true"
android:layout_marginTop="15dp"
android:contentDescription="#string/desc" />
The issue is that if you try this code you'll see the preview but it is not oriented correctly please is there someone who can solve this?
You have to user Exifinterface and Matrix to get the right orientation.
try this:
public static void handleImageRotation(Context context, File mFileTemp) {
if (!mFileTemp.exists()) {
Toast.makeText(context, "File not found", Toast.LENGTH_SHORT).show();
return;
}
ExifInterface exif = null;
int orientation = ExifInterface.ORIENTATION_NORMAL;
try {
exif = new ExifInterface(mFileTemp.getAbsolutePath());
orientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
} catch (Exception e) {
e.printStackTrace();
}
Bitmap temp = BitmapFactory.decodeFile(mFileTemp.getAbsolutePath());
if (temp == null) Log.e(TAG, "Bitmap from File is null");
Matrix matrix = new Matrix();
if (orientation == ExifInterface.ORIENTATION_ROTATE_90) {
matrix.postRotate(90);
temp = Bitmap.createBitmap(temp, 0, 0, temp.getWidth(), temp.getHeight(), matrix, true);
} else if (orientation == ExifInterface.ORIENTATION_ROTATE_180) {
matrix.postRotate(180);
temp = Bitmap.createBitmap(temp, 0, 0, temp.getWidth(), temp.getHeight(), matrix, true);
} else if (orientation == ExifInterface.ORIENTATION_ROTATE_270) {
matrix.postRotate(270);
temp = Bitmap.createBitmap(temp, 0, 0, temp.getWidth(), temp.getHeight(), matrix, true);
}
ByteArrayOutputStream bos = new ByteArrayOutputStream();
temp.compress(Bitmap.CompressFormat.JPEG, 40, bos);
byte[] bitmapdata = bos.toByteArray();
// write the bytes in file
try {
FileOutputStream fos = new FileOutputStream(mFileTemp);
fos.write(bitmapdata);
fos.flush();
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
This is the code in question:
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
Camera.Parameters parameters = camera.getParameters();
parameters.setRotation(270); //set rotation to save the picture
camera.setDisplayOrientation(90);
//set the rotation for preview camera
camera.setParameters(parameters);
this.camera.setPreviewDisplay(holder);
this.camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
I'm creating an android app which basically merge a background image(default image) with user input via EditText in xml layout.
I have developed some of the working code but it seems like its not working properly.
Any kind of help is welcome.
P.S. new to development.
here is my MainActivity.Java class
public class MainActivity extends AppCompatActivity {
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
EditText editText; //defined edit text
Bitmap bmp; // defined Bitmap
Button button;
public void myBit(){
bmp = Bitmap.createBitmap(editText.getDrawingCache());
}
public void saveImg() {
File myDir = new File("/sdcard/saved_images");
myDir.mkdir();
Random ran = new Random();
int n = 10000;
n = ran.nextInt(n);
String fname = "Image-"+n+".jpg";
File file = new File (myDir, fname);
if (file.exists ()) file.delete ();
try {
FileOutputStream out = new FileOutputStream(file);
// canvas for merging text over image (Bitmap)
Canvas canvas = new Canvas(bmp);
Paint paint = new Paint();
paint.setColor(Color.WHITE); // Text Color
paint.setStrokeWidth(12); // Text Size
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_OVER)); // Text Overlapping Pattern
// some more settings...
canvas.drawBitmap(bmp, 400, 400, paint); // ◄ Edited here
canvas.drawText("Testing...", 10, 10, paint);
bmp.compress(Bitmap.CompressFormat.JPEG, 90, out);
out.flush();
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
//getMenuInflater().inflate(R.menu.main, menu);
Button button = (Button) findViewById(R.id.button1);
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
saveImg();
Toast.makeText(MainActivity.this, "Image Saved!!", Toast.LENGTH_SHORT).show();
}
});
return true;
}
}
To Create file from bitmap on image below two way i have tried
one by taking edittext as a image or passing view & get bitmap from the below posted code getBitmapFromView method will convert view to bitmap
second one is like give edittext text & draw on above the default
image.writeTextOnImage this method will create bitmap from the given text above the image it will work like MEME type of image. give your image in place of R.drawable.t_img.
Activity
public class CreateSaveImage extends AppCompatActivity {
private EditText editText;
private File file;
Bitmap bmp; // defined Bitmap
ImageView imgShowOuput;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.search_voice);
file = new File(Environment.getExternalStorageDirectory() + File.separator + "saveimage");
file.mkdirs();
editText = (EditText) findViewById(R.id.edtTextView);
imgShowOuput = (ImageView) findViewById(R.id.imgShowOuput);
findViewById(R.id.btnSaveImage).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
bmp = getBitmapFromView(editText);
String path = file.getAbsolutePath() + File.separator + System.currentTimeMillis() + "_image" + ".jpg";
saveBitmapToFile(bmp, path, 100);
imgShowOuput.setImageBitmap(bmp);
Toast.makeText(CreateSaveImage.this, "Image Saved!!", Toast.LENGTH_SHORT).show();
}
});
findViewById(R.id.btnSaveImageFromText).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
bmp = writeTextOnImage(CreateSaveImage.this, editText.getText().toString());
String path = file.getAbsolutePath() + File.separator + System.currentTimeMillis() + "_drawImage" + ".jpg";
saveBitmapToFile(bmp, path, 100);
imgShowOuput.setImageBitmap(bmp);
}
});
}
/**
* #param view
* #return
*/
public static Bitmap getBitmapFromView(View view) {
view.clearFocus();
view.setPressed(false);
view.setFocusable(false);
Bitmap returnedBitmap = Bitmap.createBitmap(view.getWidth(), view.getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(returnedBitmap);
Drawable bgDrawable = view.getBackground();
if (bgDrawable != null)
bgDrawable.draw(canvas);
else
canvas.drawColor(Color.WHITE);
view.draw(canvas);
return returnedBitmap;
}
public static String saveBitmapToFile(
Bitmap bitmap, String path, int quality) {
File imageFile = new File(path);
OutputStream os;
try {
os = new FileOutputStream(imageFile);
bitmap.compress(Bitmap.CompressFormat.JPEG, quality, os);
os.flush();
os.close();
} catch (Exception e) {
Log.e("BitmapToTempFile", "Error writing bitmap", e);
}
return imageFile.getAbsolutePath();
}
public Bitmap writeTextOnImage(Context mContext, String mText) {
try {
Resources resources = mContext.getResources();
float scale = resources.getDisplayMetrics().density;
/// Here you need to give your default image
Bitmap bitmap = BitmapFactory.decodeResource(resources, R.drawable.t_img);
android.graphics.Bitmap.Config bitmapConfig = bitmap.getConfig();
if (bitmapConfig == null) {
bitmapConfig = android.graphics.Bitmap.Config.ARGB_8888;
}
bitmap = bitmap.copy(bitmapConfig, true);
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
// Change Text Size & Color based on your requirement
paint.setColor(Color.BLACK);
paint.setTextSize((int) (21 * scale));
paint.setShadowLayer(20f, 0, 0, Color.LTGRAY);
Rect bounds = new Rect();
paint.getTextBounds(mText, 0, mText.length(), bounds);
// Change position based on your requirement
int x = 20;
int y = 20;
canvas.drawText(mText, x * scale, y * scale, paint);
return bitmap;
} catch (Exception e) {
return null;
}
}
}
Xml Layuout
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/mainlinear"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#7e95e6"
android:orientation="vertical">
<EditText
android:id="#+id/edtTextView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:textColor="#FF0000" />
<Button
android:id="#+id/btnSaveImage"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Save EditText Image" />
<Button
android:id="#+id/btnSaveImageFromText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Save Text from EditText as a Image" />
<ImageView
android:id="#+id/imgShowOuput"
android:layout_width="fill_parent"
android:layout_height="400dp" />
</LinearLayout>
Check output of above which i got
Let me know if anything..
I am trying to implement AsyncTak to inform the user that a background operation is running when a photo is taken. The AyncTask is able to display the setMessage function but along the line the app crashes and it displays the following error
Caused by: android.view.ViewRootImpl$CalledFromWrongThreadException: Only the original thread that created a view hierarchy can touch its views.
This is the complete codebase of my AsyncTask
private class CheckTypesTask extends AsyncTask<Void, Void, Void>{
ProgressDialog asyncDialog = new ProgressDialog(MainActivity.this);
#Override
protected void onPreExecute() {
//set message of the dialog
asyncDialog.setMessage("Loading");
//show dialog
asyncDialog.show();
super.onPreExecute();
}
#Override
protected Void doInBackground(Void... arg0) {
//don't touch dialog here it'll break the application
//do some lengthy stuff like calling login webservice
onPhotoTaken();
return null;
}
#Override
protected void onPostExecute(Void result) {
//hide the dialog
asyncDialog.dismiss();
super.onPostExecute(result);
}
}
Then I am calling the AsyncTask in onActivity result this way:
if (resultCode == -1) {
//onPhotoTaken();
(new MainActivity.CheckTypesTask()).execute();
} else {
Log.v(TAG, "User cancelled");
}
}
EDITTED:
This is the onPhotoTaken code and it is defined in MainActivity class.
public void onPhotoTaken() {
_taken = true;
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
Bitmap bitmap = BitmapFactory.decodeFile(_path, options);
try {
//ProgressDialog dialog = ProgressDialog.show(this, "Loading", "Please wait...", true);
ExifInterface exif = new ExifInterface(_path);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
Log.v(TAG, "Orient: " + exifOrientation);
int rotate = 0;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
}
Log.v(TAG, "Rotation: " + rotate);
if (rotate != 0) {
// Getting width & height of the given image.
int w = bitmap.getWidth();
int h = bitmap.getHeight();
// Setting pre rotate
Matrix mtx = new Matrix();
mtx.preRotate(rotate);
// Rotating Bitmap
bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
}
// Convert to ARGB_8888, required by tess
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
//dialog.dismiss();
} catch (IOException e) {
Log.e(TAG, "Couldn't correct orientation: " + e.toString());
}
Please what could be wrong?
You are trying to access UI components (View) from a background thread in your case inside the doInBackground() method. You are not allowed to do that.
Call your function from onPostExecute()
Try this code
This is your layout which contain an trigger for image capture and an imageview to display captured image :-
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:fillViewport="true"
android:orientation="vertical"
tools:context="com.serveroverload.cube.ui.HomeActivity" >
<ImageView
android:id="#+id/preview"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_above="#+id/take_picture"
android:layout_alignParentTop="true"
android:layout_margin="5dp" />
<ImageView
android:id="#+id/take_picture"
android:layout_width="50dp"
android:layout_height="50dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_gravity="center"
android:layout_margin="10dp"
android:background="#drawable/flat_selector"
android:padding="5dp"
android:scaleType="fitXY"
android:src="#drawable/take_pic" />
</RelativeLayout>
This is your android manifest with required permisions
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.camtest"
android:versionCode="1"
android:versionName="1.0" >
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="21" />
<application
android:allowBackup="true"
android:icon="#drawable/ic_launcher"
android:label="#string/app_name"
android:theme="#style/AppTheme" >
<activity
android:name="com.serveroverload.cube.ui.HomeActivity"
android:label="#string/app_name" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
This is camera handler class to re-size and rotate images if required currently it will sample image to 1024x1280 for UI
public class CamerHandler {
private CamerHandler() {
getAllImages();
}
private static CamerHandler camerHandler;
public static CamerHandler GetCamerHandlerInstance() {
if (null == camerHandler) {
camerHandler = new CamerHandler();
}
return camerHandler;
}
private static final String CAM_DIRECTORY = "CamDirectory";
private static final int MAX_HEIGHT = 1024;
private static final int MAX_WIDTH = 1280;
private ArrayList<File> imageURL = new ArrayList<File>();
public ArrayList<File> getImageURL() {
return imageURL;
}
public void setImageURL(ArrayList<File> imageURL) {
this.imageURL = imageURL;
}
public void getAllImages() {
imageURL.clear();
File folder = new File(getImageDirectory());
File[] listOfFiles = folder.listFiles();
if (null != listOfFiles && listOfFiles.length != 0) {
for (int i = 0; i < listOfFiles.length; i++) {
if (listOfFiles[i].isFile()) {
imageURL.add(listOfFiles[i]);
System.out.println("File " + listOfFiles[i].getName());
} else if (listOfFiles[i].isDirectory()) {
System.out.println("Directory " + listOfFiles[i].getName());
}
}
}
}
/**
* This method is responsible for solving the rotation issue if exist. Also
* scale the images to 1024x1024 resolution
*
* #param context
* The current context
* #param selectedImage
* The Image URI
* #return Bitmap image results
* #throws IOException
*/
public Bitmap handleSamplingAndRotationBitmap(Context context, Uri selectedImage) throws IOException {
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
InputStream imageStream = context.getContentResolver().openInputStream(selectedImage);
BitmapFactory.decodeStream(imageStream, null, options);
imageStream.close();
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, MAX_WIDTH, MAX_HEIGHT);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
imageStream = context.getContentResolver().openInputStream(selectedImage);
Bitmap img = BitmapFactory.decodeStream(imageStream, null, options);
// img = rotateImageIfRequired(img, selectedImage);
img = rotateBitmap(context, img, selectedImage);
return img;
}
public Bitmap rotateBitmap(Context context, Bitmap bitmap, Uri selectedImage) {
ExifInterface exif;
try {
exif = new ExifInterface(selectedImage.getPath());
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED);
Matrix matrix = new Matrix();
switch (orientation) {
case ExifInterface.ORIENTATION_NORMAL:
return bitmap;
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
matrix.setScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.setRotate(180);
break;
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_TRANSPOSE:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.setRotate(90);
break;
case ExifInterface.ORIENTATION_TRANSVERSE:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.setRotate(-90);
break;
default:
return bitmap;
}
// try {
Bitmap bmRotated = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
bitmap.recycle();
return bmRotated;
} catch (IOException e) {
e.printStackTrace();
return null;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
}
/**
* Calculate an inSampleSize for use in a {#link BitmapFactory.Options}
* object when decoding bitmaps using the decode* methods from
* {#link BitmapFactory}. This implementation calculates the closest
* inSampleSize that will result in the final decoded bitmap having a width
* and height equal to or larger than the requested width and height. This
* implementation does not ensure a power of 2 is returned for inSampleSize
* which can be faster when decoding but results in a larger bitmap which
* isn't as useful for caching purposes.
*
* #param options
* An options object with out* params already populated (run
* through a decode* method with inJustDecodeBounds==true
* #param reqWidth
* The requested width of the resulting bitmap
* #param reqHeight
* The requested height of the resulting bitmap
* #return The value to be used for inSampleSize
*/
private int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
// Calculate ratios of height and width to requested height and
// width
final int heightRatio = Math.round((float) height / (float) reqHeight);
final int widthRatio = Math.round((float) width / (float) reqWidth);
// Choose the smallest ratio as inSampleSize value, this will
// guarantee a final image
// with both dimensions larger than or equal to the requested height
// and width.
inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio;
// This offers some additional logic in case the image has a strange
// aspect ratio. For example, a panorama may have a much larger
// width than height. In these cases the total pixels might still
// end up being too large to fit comfortably in memory, so we should
// be more aggressive with sample down the image (=larger
// inSampleSize).
final float totalPixels = width * height;
// Anything more than 2x the requested pixels we'll sample down
// further
final float totalReqPixelsCap = reqWidth * reqHeight * 2;
while (totalPixels / (inSampleSize * inSampleSize) > totalReqPixelsCap) {
inSampleSize++;
}
}
return inSampleSize;
}
public void openGallery(Context context) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("content://media/internal/images/media"));
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
private void convertToBase64(Bitmap bitmap) {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
bitmap.compress(CompressFormat.JPEG, 30, byteArrayOutputStream);
byte[] byteArray = byteArrayOutputStream.toByteArray();
String encoded = Base64.encodeToString(byteArray, Base64.NO_WRAP);
// bitmap.recycle();
encoded = null;
byteArray = null;
}
public String getImageDirectory() {
return createDirIfNotExists().getAbsolutePath();
}
public File createDirIfNotExists() {
File imageDirectory = new File(Environment.getExternalStorageDirectory(), CAM_DIRECTORY);
if (!imageDirectory.exists()) {
if (!imageDirectory.mkdirs()) {
Log.e("imageDirectory:: ", "Problem creating Image folder");
}
}
return imageDirectory;
}
}
and this is activity code which take pictures and perform all down sampling and image rotation in background before updating on UI
public class HomeActivity extends Activity {
static final int REQUEST_IMAGE_CAPTURE = 1;
private ImageView previewLayout;
private static final String TAG = "MainActivity";
static Uri capturedImageUri = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
// Imageview to display Image
previewLayout = (ImageView) findViewById(R.id.preview);
findViewById(R.id.take_picture).setOnClickListener(
new OnClickListener() {
#Override
public void onClick(View v) {
dispatchTakePictureIntent();
}
});
}
private void dispatchTakePictureIntent() {
Intent intent = new Intent("android.media.action.IMAGE_CAPTURE");
if (intent.resolveActivity(getPackageManager()) != null) {
Calendar cal = Calendar.getInstance();
// store image in new File in image directory
File file = new File(CamerHandler.GetCamerHandlerInstance()
.getImageDirectory(), (cal.getTimeInMillis() + ".png"));
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Toast.makeText(getApplicationContext(),
"Failed to make file", 500).show();
}
} else {
file.delete();
try {
file.createNewFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Toast.makeText(getApplicationContext(),
"Failed to make file", 500).show();
}
}
capturedImageUri = Uri.fromFile(file);
intent.putExtra(MediaStore.EXTRA_OUTPUT, capturedImageUri);
startActivityForResult(intent, REQUEST_IMAGE_CAPTURE);
}
}
#SuppressLint("NewApi")
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK && requestCode == REQUEST_IMAGE_CAPTURE) {
// update file in gallery
sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,
capturedImageUri));
// Downsample image before displaying in imageview to avoi OOM
// exception
new LoadBitMap(previewLayout, HomeActivity.this)
.execute(capturedImageUri);
} else {
Log.e(TAG, "FAILED TO TAKE IMAGE");
}
}
}
class LoadBitMap extends AsyncTask<Uri, Void, Void> {
public LoadBitMap(ImageView preview, Context context) {
this.prevImageView = preview;
this.mContext = context;
}
private Bitmap bitmap = null;
private ImageView prevImageView;
private Context mContext;
#Override
protected Void doInBackground(Uri... params) {
try {
bitmap = CamerHandler.GetCamerHandlerInstance()
.handleSamplingAndRotationBitmap(mContext, params[0]);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
#Override
protected void onPostExecute(Void result) {
if (null != bitmap) {
prevImageView.setBackground(new BitmapDrawable(mContext
.getResources(), bitmap));
}
super.onPostExecute(result);
}
}
Points to note here you can use Picasso or Glide for image loading task as we already have uri path in captureuri variable.
I write program, camera for android. I use next code
package camera;
public class SurfaceCamera extends Activity implements SurfaceHolder.Callback {
TextView testView;
Camera camera;
SurfaceView surfaceView;
SurfaceHolder surfaceHolder;
PictureCallback rawCallback;
ShutterCallback shutterCallback;
PictureCallback jpegCallback;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.video_surf);
surfaceView = (SurfaceView) findViewById(R.id.surfaceView);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format("/sdcard/Octopus/gif/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d("Log", "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Toast.makeText(getApplicationContext(), "Picture Saved", 2000).show();
refreshCamera();
}
};
}
public void captureImage(View v) throws IOException {
//take the picture
camera.takePicture(null, null, jpegCallback);
}
public void refreshCamera() {
if (surfaceHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
camera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
} catch (Exception e) {
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
refreshCamera();
}
public void surfaceCreated(SurfaceHolder holder) {
try {
// open the camera
camera = Camera.open();
} catch (RuntimeException e) {
// check for exceptions
System.err.println(e);
return;
}
Camera.Parameters param;
param = camera.getParameters();
//param.setColorEffect(Camera.Parameters.EFFECT_NEGATIVE);
// modify parameter
param.setPreviewSize(600, 900);
camera.setDisplayOrientation(90);
camera.setParameters(param);
try {
// The Surface has been created, now tell the camera where to draw
// the preview.
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
} catch (Exception e) {
// check for exceptions
System.err.println(e);
return;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// stop preview and release camera
camera.stopPreview();
camera.release();
camera = null;
}
}
layout
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="#+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
tools:context="com.javacodegeeks.androidsurfaceviewexample.AndroidSurfaceviewExample" >
<SurfaceView
android:id="#+id/surfaceView"
android:layout_width="300dp"
android:layout_height="200dp" />
<LinearLayout
android:id="#+id/capture"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:clickable="true"
android:gravity="center"
android:onClick="captureImage"
android:orientation="horizontal" >
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center"
android:padding="10dp"
android:text="Capture"
android:textAppearance="?android:attr/textAppearanceLarge" />
</LinearLayout>
</LinearLayout>
Where I see on surface image (on surface) the picture is distorted, stretched at the corners https://www.dropbox.com/s/ul5enua7tb07zkr/shot_000002.png?dl=0
but if I capture photo all ok https://www.dropbox.com/s/zibn89tl38a8eur/1424809721079.jpg?dl=0
Please help, what I write wrong? How I can fix it?
Thank you.
I'm going to develop an android application using BeyondAR framework. I try to use a CameraView component at the first half of the screen (app running only in portrait mode), but when i rotate camera 90 degrees image stretches and aspect ratio is wrong.Any help?
CameraView class (Beyondar framework)
public class CameraView extends SurfaceView implements SurfaceHolder.Callback,
Camera.PictureCallback {
/**
*
* #author Joan Puig Sanz (joanpuigsanz#gmail.com)
*
*/
public static interface IPictureCallback {
/**
* This method is called when the snapshot of the camera is ready. If
* there is an error, the image will be null
*
* #param picture
*/
void onPictureTaken(Bitmap picture);
}
private SurfaceHolder mHolder;
private Camera mCamera;
private IPictureCallback mCameraCallback;
private BitmapFactory.Options mOptions;
private Size mPreviewSize;
private List<Size> mSupportedPreviewSizes;
private List<String> mSupportedFlashModes;
public CameraView(Context context) {
super(context);
init(context);
}
public CameraView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context);
}
public CameraView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
private void init(Context context) {
mHolder = getHolder();
mHolder.addCallback(this);
try {
mCamera = Camera.open();
//mCamera.setDisplayOrientation(90);
//Camera.Parameters params = mCamera.getParameters();
//params.setPreviewSize(427, 1240);
//mCamera.setParameters(params);
setCamera(mCamera);
} catch (Exception e) {
Log.e(Constants.TAG, "ERROR: Unable to open the camera", e);
}
if (android.os.Build.VERSION.SDK_INT <= 10) {// Android 2.3.x or lower
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
mSupportedFlashModes = mCamera.getParameters().getSupportedFlashModes();
// Set the camera to Auto Flash mode.
if (mSupportedFlashModes != null
&& mSupportedFlashModes.contains(Camera.Parameters.FLASH_MODE_AUTO)) {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
//parameters.setPreviewSize(300, 200);
mCamera.setParameters(parameters);
}
}
}
public void setSupportedPreviewSizes(List<Size> supportedPreviewSizes) {
mSupportedPreviewSizes = supportedPreviewSizes;
}
public Size getPreviewSize() {
return mPreviewSize;
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
try {
if (mCamera == null) {
init(getContext());
if (mCamera == null) {
return;
}
}
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
if (mCamera != null) {
mCamera.release();
}
mCamera = null;
Log.e(Constants.TAG, "CameraView -- ERROR en SurfaceCreated", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
if (mCamera == null) {
return;
}
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
private Size getOptimalPreviewSize(List<Size> sizes, int width, int height) {
Size result = null;
for (Camera.Size size : sizes) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
}
return result;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mCamera == null || getPreviewSize() == null) {
return;
}
Camera.Parameters parameters = mCamera.getParameters();
Size previewSize = getPreviewSize();
parameters.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(parameters);
previewCamera();
}
#Override
public void onPictureTaken(byte[] imageData, Camera camera) {
if (imageData != null && mCameraCallback != null) {
mCameraCallback.onPictureTaken(StoreByteImage(imageData));
}
previewCamera();
}
public void previewCamera() {
if (mCamera == null){
return;
}
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d(Constants.TAG, "Cannot start preview.", e);
}
}
private Bitmap StoreByteImage(byte[] imageData) {
Bitmap myImage = DebugBitmap.decodeByteArray(imageData, 0, imageData.length, mOptions);
imageData = null;
System.gc();
return myImage;
}
public void tackePicture(IPictureCallback cameraCallback) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
tackePicture(cameraCallback, options);
}
public void tackePicture(IPictureCallback cameraCallback, BitmapFactory.Options options) {
if (mCamera == null) {
return;
}
mCameraCallback = cameraCallback;
mCamera.takePicture(null, this, this);
mOptions = options;
}
}
Edit
MyLayout xml file
<LinearLayout
android:layout_width="match_parent"
android:layout_height="0dip"
android:layout_weight="1"
android:orientation="horizontal"
android:paddingBottom="#dimen/padding"
android:paddingLeft="#dimen/padding"
android:paddingRight="#dimen/padding"
android:paddingTop="#dimen/padding" >
<FrameLayout
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
>
<com.beyondar.android.opengl.views.BeyondarGLSurfaceView
android:id="#+id/customGLSurface"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<com.beyondar.android.views.CameraView
android:id="#+id/camera"
android:layout_width="fill_parent"
android:layout_height="fill_parent" />
<TextView
android:id="#+id/labelText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Touch an AR Object"
android:background="#000000"
android:textColor="#FFFFFF"
/>
</FrameLayout>
</LinearLayout
If you look at the example application for BeyondAR, you can see that it has a similar problem in that the camera's image gets stretched to fill the screen and so doesn't have the proper aspect ratio in both landscape in portrait. This is a common problem when working with the camera preview since it locks to a particular orientation when you first start it.
In order to get that, you need to resize the view on rotation to an aspect ratio that matches the device's camera. Here's the official Android guide.
Notice the part specifically called "Set the Preview Orientation".
BeyondAR has been updated, now it is way easier thanks to fragments. Check the web page and update your library:
https://github.com/BeyondAR/beyondar