Is it possible to connect your phone to your computer via USB cable and use it as webcam? I know there are plenty of software that allows it (at least for wireless), but how to use it in opencv (I am using Java)?
I think it's possible, however I did not test it. First download such an application that allows to use your phone as a webcam and then connect to it via VideoCapture(0) if it's the only webcam attached to your system. This code should be good for testing, if everything works you should see the video in a JFrame:
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Image;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.awt.image.WritableRaster;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream;
import javax.imageio.ImageIO;
import javax.swing.JFrame;
import javax.swing.JPanel;
import org.opencv.core.*;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
public class JPanelOpenCV extends JPanel{
BufferedImage image;
public static void main (String args[]) throws InterruptedException{
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
JPanelOpenCV t = new JPanelOpenCV();
VideoCapture camera = new VideoCapture(0);
Mat frame = new Mat();
camera.read(frame);
if(!camera.isOpened()){
System.out.println("Error");
}
else {
while(true){
if (camera.read(frame)){
BufferedImage image = t.MatToBufferedImage(frame);
t.window(image, "Original Image", 0, 0);
t.window(t.grayscale(image), "Processed Image", 40, 60);
//t.window(t.loadImage("ImageName"), "Image loaded", 0, 0);
break;
}
}
}
camera.release();
}
#Override
public void paint(Graphics g) {
g.drawImage(image, 0, 0, this);
}
public JPanelOpenCV() {
}
public JPanelOpenCV(BufferedImage img) {
image = img;
}
//Show image on window
public void window(BufferedImage img, String text, int x, int y) {
JFrame frame0 = new JFrame();
frame0.getContentPane().add(new JPanelOpenCV(img));
frame0.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame0.setTitle(text);
frame0.setSize(img.getWidth(), img.getHeight() + 30);
frame0.setLocation(x, y);
frame0.setVisible(true);
}
//Load an image
public BufferedImage loadImage(String file) {
BufferedImage img;
try {
File input = new File(file);
img = ImageIO.read(input);
return img;
} catch (Exception e) {
System.out.println("erro");
}
return null;
}
//Save an image
public void saveImage(BufferedImage img) {
try {
File outputfile = new File("Images/new.png");
ImageIO.write(img, "png", outputfile);
} catch (Exception e) {
System.out.println("error");
}
}
//Grayscale filter
public BufferedImage grayscale(BufferedImage img) {
for (int i = 0; i < img.getHeight(); i++) {
for (int j = 0; j < img.getWidth(); j++) {
Color c = new Color(img.getRGB(j, i));
int red = (int) (c.getRed() * 0.299);
int green = (int) (c.getGreen() * 0.587);
int blue = (int) (c.getBlue() * 0.114);
Color newColor =
new Color(
red + green + blue,
red + green + blue,
red + green + blue);
img.setRGB(j, i, newColor.getRGB());
}
}
return img;
}
public BufferedImage MatToBufferedImage(Mat frame) {
//Mat() to BufferedImage
int type = 0;
if (frame.channels() == 1) {
type = BufferedImage.TYPE_BYTE_GRAY;
} else if (frame.channels() == 3) {
type = BufferedImage.TYPE_3BYTE_BGR;
}
BufferedImage image = new BufferedImage(frame.width(), frame.height(), type);
WritableRaster raster = image.getRaster();
DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer();
byte[] data = dataBuffer.getData();
frame.get(0, 0, data);
return image;
}
}
P.S. I do think your question is off-topic, too.
You can simply create camera activity and write code given
package com.techamongus.opencvcamera;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.ContentValues;
import android.content.Intent;
import android.hardware.Camera;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SubMenu;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import java.io.File;
import java.util.List;
public class CameraActivity extends Activity implements CameraBridgeViewBase.CvCameraViewListener2 {
// A tag for log output.
private static final String TAG =
CameraActivity.class.getSimpleName();
// A key for storing the index of the active camera.
private static final String STATE_CAMERA_INDEX = "cameraIndex";
// A key for storing the index of the active image size.
private static final String STATE_IMAGE_SIZE_INDEX =
"imageSizeIndex";
// An ID for items in the image size submenu.
private static final int MENU_GROUP_ID_SIZE = 2;
// The index of the active camera.
private int mCameraIndex;
// The index of the active image size.
private int mImageSizeIndex;
// Whether the active camera is front-facing.
// If so, the camera view should be mirrored.
private boolean mIsCameraFrontFacing;
// The number of cameras on the device.
private int mNumCameras;
// The camera view.
private CameraBridgeViewBase mCameraView;
// The image sizes supported by the active camera.
private List<Camera.Size> mSupportedImageSizes;
// Whether the next camera frame should be saved as a photo.
private boolean mIsPhotoPending;
// A matrix that is used when saving photos.
private Mat mBgr;
// Whether an asynchronous menu action is in progress.
// If so, menu interaction should be disabled.
private boolean mIsMenuLocked;
// The OpenCV loader callback.
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(final int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.d(TAG, "OpenCV loaded successfully");
mCameraView.enableView();
mBgr = new Mat();
break;
default:
super.onManagerConnected(status);
break;
}
}
};
#SuppressLint("NewApi")
#Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Window window = getWindow();
window.addFlags(
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (savedInstanceState != null) {
mCameraIndex = savedInstanceState.getInt(
STATE_CAMERA_INDEX, 0);
mImageSizeIndex = savedInstanceState.getInt(
STATE_IMAGE_SIZE_INDEX, 0);
} else {
mCameraIndex = 0;
mImageSizeIndex = 0;
}
final Camera camera;
if (Build.VERSION.SDK_INT >=
Build.VERSION_CODES.GINGERBREAD) {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraIndex, cameraInfo);
mIsCameraFrontFacing =
(cameraInfo.facing ==
Camera.CameraInfo.CAMERA_FACING_FRONT);
mNumCameras = Camera.getNumberOfCameras();
camera = Camera.open(mCameraIndex);
} else { // pre-Gingerbread
// Assume there is only 1 camera and it is rear-facing.
mIsCameraFrontFacing = false;
mNumCameras = 1;
camera = Camera.open();
}
final Camera.Parameters parameters = camera.getParameters();
camera.release();
mSupportedImageSizes =
parameters.getSupportedPreviewSizes();
final Camera.Size size = mSupportedImageSizes.get(mImageSizeIndex);
mCameraView = new JavaCameraView(this, mCameraIndex);
mCameraView.setMaxFrameSize(size.width, size.height);
mCameraView.setCvCameraViewListener(this);
setContentView(mCameraView);
}
public void onSaveInstanceState(Bundle savedInstanceState) {
// Save the current camera index.
savedInstanceState.putInt(STATE_CAMERA_INDEX, mCameraIndex);
// Save the current image size index.
savedInstanceState.putInt(STATE_IMAGE_SIZE_INDEX,
mImageSizeIndex);
super.onSaveInstanceState(savedInstanceState);
}
#SuppressLint("NewApi")
#Override
public void recreate() {
if (Build.VERSION.SDK_INT >=
Build.VERSION_CODES.HONEYCOMB) {
super.recreate();
} else {
finish();
startActivity(getIntent());
}
}
#Override
public void onPause() {
if (mCameraView != null) {
mCameraView.disableView();
}
super.onPause();
}
#Override
public void onResume() {
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_2_0,
this, mLoaderCallback);
mIsMenuLocked = false;
}
#Override
public void onDestroy() {
if (mCameraView != null) {
mCameraView.disableView();
}
super.onDestroy();
}
public Mat onCameraFrame(final CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
final Mat rgba = inputFrame.rgba();
if (mIsPhotoPending) {
mIsPhotoPending = false;
//takePhoto(rgba);
}
if (mIsCameraFrontFacing) {
// Mirror (horizontally flip) the preview.
Core.flip(rgba, rgba, 1);
}
return rgba;
}
Disclaimer:Check my blog link
http://www.techamongus.com/2017/03/android-camera-in-opencv.html
Related
I'm working on writing a JWindow type UI with OpenGL using libgdx. For some reason my app still has a black background even after enabling GL20.GL_BLEND.
Here is my code:
package com.mygdx.game;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.utils.viewport.FitViewport;
public class MyGdxGame extends ApplicationAdapter {
Stage stage;
SpriteBatch batch;
ShapeRenderer shapeRenderer;
#Override
public void create() {
stage = new Stage(new FitViewport(2560, 1440));
batch = new SpriteBatch();
shapeRenderer = new ShapeRenderer();
shapeRenderer.setAutoShapeType(true);
}
#Override
public void render() {
Gdx.gl.glEnable(GL20.GL_BLEND);
Gdx.gl.glBlendFunc(GL20.GL_SRC_ALPHA, GL20.GL_ONE_MINUS_SRC_ALPHA);
Gdx.gl.glClearColor(0, 0, 0, .25f);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
batch.begin();
shapeRenderer.begin(ShapeRenderer.ShapeType.Line);
shapeRenderer.setColor(Color.RED);
shapeRenderer.rect(100, 25, 200, 450);
shapeRenderer.set(ShapeRenderer.ShapeType.Filled);
shapeRenderer.setColor(Color.GREEN);
shapeRenderer.rect(303, 24, 10, 451);
shapeRenderer.end();
batch.end();
Gdx.gl.glDisable(GL20.GL_BLEND);
}
#Override
public void resize(int width, int height) {
stage.getViewport().update(width, height, true);
}
#Override
public void dispose() {
stage.dispose();
batch.dispose();
shapeRenderer.dispose();
}
}
I've found a thread of someone asking the same thing but in C (How to make an OpenGL rendering context with transparent background?). I even tried adding a LwglAWTCanvas to a JWindow but I'm still unable to make the background transparent.
I'd like to have a clear background so I can draw rects/shapes over the users screen.
Thanks!
I was able to make this work on Windows using JNA.
import com.badlogic.gdx.backends.lwjgl.LwjglApplication;
import com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration;
import com.badlogic.gdx.graphics.Color;
import com.mygdx.game.CharlatanoOverlay;
import com.sun.jna.Pointer;
import com.sun.jna.platform.win32.User32;
import com.sun.jna.platform.win32.WinDef;
import com.sun.jna.platform.win32.WinUser;
import static com.sun.jna.platform.win32.WinUser.*;
public class DesktopLauncher {
public static final WinDef.DWORD DWM_BB_ENABLE = new WinDef.DWORD(0x00000001);
public static final WinDef.DWORD DWM_BB_BLURREGION = new WinDef.DWORD(0x00000002);
public static final WinDef.DWORD DWM_BB_TRANSITIONONMAXIMIZED = new WinDef.DWORD(0x00000004);
public static final WinDef.HWND HWND_TOPPOS = new WinDef.HWND(new Pointer(-1));
private static final int SWP_NOSIZE = 0x0001;
private static final int SWP_NOMOVE = 0x0002;
private static final int WS_EX_TOOLWINDOW = 0x00000080;
private static final int WS_EX_APPWINDOW = 0x00040000;
public static void main(String[] arg) {
LwjglApplicationConfiguration cfg = new LwjglApplicationConfiguration();
System.setProperty("org.lwjgl.opengl.Window.undecorated", "true");
cfg.width = LwjglApplicationConfiguration.getDesktopDisplayMode().width-1;
cfg.height = LwjglApplicationConfiguration.getDesktopDisplayMode().height-1;
cfg.resizable = false;
cfg.fullscreen = false;
cfg.initialBackgroundColor = new Color(0, 0, 0, 0);
new LwjglApplication(new CharlatanoOverlay(), cfg);
WinDef.HWND hwnd;
while ((hwnd = User32.INSTANCE.FindWindow(null, "CharlatanoOverlay")) == null) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
System.out.println(hwnd);
System.out.println(transparentWindow(hwnd));
}
public static boolean transparentWindow(WinDef.HWND hwnd) {
DWM_BLURBEHIND bb = new DWM_BLURBEHIND();
bb.dwFlags = DWM_BB_ENABLE;
bb.fEnable = true;
bb.hRgnBlur = null;
DWM.DwmEnableBlurBehindWindow(hwnd, bb);
int wl = User32.INSTANCE.GetWindowLong(hwnd, WinUser.GWL_EXSTYLE);
wl = wl | WinUser.WS_EX_LAYERED | WinUser.WS_EX_TRANSPARENT;
User32.INSTANCE.SetWindowLong(hwnd, WinUser.GWL_EXSTYLE, wl);
wl &= ~(WS_VISIBLE);
wl |= WS_EX_TOOLWINDOW; // flags don't work - windows remains in taskbar
wl &= ~(WS_EX_APPWINDOW);
User32.INSTANCE.ShowWindow(hwnd, SW_HIDE); // hide the window
User32.INSTANCE.SetWindowLong(hwnd, GWL_STYLE, wl); // set the style
User32.INSTANCE.ShowWindow(hwnd, SW_SHOW); // show the window for the new style to come into effect
User32.INSTANCE.SetWindowLong(hwnd, WinUser.GWL_EXSTYLE, wl);
return User32.INSTANCE.SetWindowPos(hwnd, HWND_TOPPOS, 0, 0, 2560, 1440, SWP_NOMOVE | SWP_NOSIZE);
}
}
DWM:
import com.sun.jna.Native;
import com.sun.jna.platform.win32.WinDef;
import com.sun.jna.platform.win32.WinNT;
public class DWM {
static {
Native.register("Dwmapi");
}
public static native WinNT.HRESULT DwmEnableBlurBehindWindow(WinDef.HWND hWnd, DWM_BLURBEHIND pBlurBehind);
}
DWM_BLURBEHIND:
import com.sun.jna.Structure;
import com.sun.jna.platform.win32.WinDef;
import java.util.Arrays;
import java.util.List;
public class DWM_BLURBEHIND extends Structure {
public WinDef.DWORD dwFlags;
public boolean fEnable;
public WinDef.HRGN hRgnBlur;
public boolean fTransitionOnMaximized;
#Override
protected List<String> getFieldOrder() {
return Arrays.asList("dwFlags", "fEnable", "hRgnBlur", "fTransitionOnMaximized");
}
}
I got the video stream from cctv camera using opencv. Now I want to detect a simple motion / object from this video stream. For example, if any person come in any selected zone then rectangular border should be generated around him. I search some opencv tutorial, but I didn't got any proper solution or idea.I used the following code to display video stream.
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JButton;
import javax.swing.JOptionPane;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
public class VideoStream
{
static BufferedImage tmpImg=null;
public static void main(String args[]) throws InterruptedException
{
System.out.println("opencv start..");
// Load native library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
VideoCapture camView=new VideoCapture();
camView.open("http://192.168.1.7:80/cgi-bin/view.cgi?chn=6&u=admin&p=");
if(!camView.isOpened())
{
System.out.println("Camera Error..");
}
else
{
System.out.println("Camera successfully opened");
}
videoCamera cam=new videoCamera(camView);
//Initialize swing components
JFrame frame = new JFrame();
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.add(cam);
frame.setSize(1024,768);
frame.setVisible(true);
while(camView.isOpened())
{
cam.repaint();
}
}
}
#SuppressWarnings("serial")
class videoCamera extends JPanel
{
VideoCapture camera;
Mat mat=new Mat();
public videoCamera(VideoCapture cam)
{
camera = cam;
}
public BufferedImage Mat2BufferedImage(Mat m)
{
int type = BufferedImage.TYPE_BYTE_GRAY;
if (m.channels() > 1)
{
type = BufferedImage.TYPE_3BYTE_BGR;
}
int bufferSize = m.channels() * m.cols() * m.rows();
byte[] b = new byte[bufferSize];
m.get(0, 0, b); // get all the pixels
BufferedImage img = new BufferedImage(m.cols(), m.rows(), type);
final byte[] targetPixels = ((DataBufferByte) img.getRaster().getDataBuffer()).getData();
System.arraycopy(b, 0, targetPixels, 0, b.length);
return img;
}
#Override
public void paintComponent(Graphics g)
{
super.paintComponent(g);
Mat mat = new Mat();
camera.read(mat);
BufferedImage image = Mat2BufferedImage(mat);
g.drawImage(image,0,0,image.getWidth(),image.getHeight(), null);
}
}
Can any one knows how could we do that.
please check this code, but i implement it by python
import cv2, time, pandas
from datetime import datetime
first_frame = None
status_list = [None,None]
times = []
df=pandas.DataFrame(columns=["Start","End"])
video = cv2.VideoCapture('rtsp://admin:Paxton10#10.199.27.128:554')
while True:
check, frame = video.read()
status = 0
gray = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray,(21,21),0)
if first_frame is None:
first_frame=gray
continue
delta_frame=cv2.absdiff(first_frame,gray)
thresh_frame=cv2.threshold(delta_frame, 30, 255, cv2.THRESH_BINARY)[1]
thresh_frame=cv2.dilate(thresh_frame, None, iterations=2)
(cnts,_)=cv2.findContours(thresh_frame.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
for contour in cnts:
if cv2.contourArea(contour) < 200000:
continue
status=1
(x, y, w, h)=cv2.boundingRect(contour)
cv2.rectangle(frame, (x, y), (x+w, y+h), (0,255,0), 3)
status_list.append(status)
status_list=status_list[-2:]
if status_list[-1]==1 and status_list[-2]==0:
times.append(datetime.now())
if status_list[-1]==0 and status_list[-2]==1:
times.append(datetime.now())
#cv2.imshow("Gray Frame",gray)
#cv2.imshow("Delta Frame",delta_frame)
imS = cv2.resize(thresh_frame, (640, 480))
cv2.imshow("Threshold Frame",imS)
imS = cv2.resize(frame, (640, 480))
cv2.imshow("Color Frame",imS)
#cv2.imshow("Color Frame",frame)
key=cv2.waitKey(1)
if key == ord('q'):
if status == 1:
times.append(datetime.now())
break
print(status_list)
print(times)
for i in range(0, len(times), 2):
df = df.append({"Start": times[i],"End": times[i+1]}, ignore_index=True)
df.to_csv("Times.csv")
video.release()
cv2.destroyAllWindows
I am making a game which requires the sprites to move towards the bottom of the screen. You can imagine it a bit like a rhythm game(guitar hero). However, I am having problems as the sprites are not moving at all even if the loop seems to work fine.
The GamePanel-
package com.jollygent.tapthepointer;
import java.util.ArrayList;
import java.util.Random;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class GamePanel extends SurfaceView{
SurfaceHolder holder;
GameLoopThread loop;
GamePanel game = this;
Random rand;
int y;
//ArrayList<Pointers> pointers = new ArrayList<Pointers>();
Pointers pointers;
public GamePanel(Context context) {
super(context);
holder = getHolder();
loop = new GameLoopThread(this);
rand = new Random();
pointers = new Pointers(rand.nextInt(4)+1,getWidth()/2,0,this);
//placeholder
this.setBackgroundColor(Color.WHITE);
holder.addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceDestroyed(SurfaceHolder arg0) {
// TODO Auto-generated method stub
loop.setRunning(false);
}
#Override
public void surfaceCreated(SurfaceHolder arg0) {
loop = new GameLoopThread(game);
if(!loop.isRunning()){
loop.setRunning(true);
loop.start();
}
else{
loop.setPause(false);
}
}
#Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
// TODO Auto-generated method stubn
}
});
}
public GameLoopThread getLoop(){
return loop;
}
public void update(){
}
public void draw(Canvas canvas){
pointers.draw(canvas);
Paint paint = new Paint();
paint.setColor(Color.BLACK);
paint.setStrokeWidth(20);
paint.setStyle(Paint.Style.STROKE);
canvas.drawLine(0,getHeight()/2 + 50,getWidth(),getHeight()/2 + 50, paint);
}
}
The Pointer class
package com.jollygent.tapthepointer;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
public class Pointers {
GamePanel game;
private int x,y;
Bitmap pointer;
Bitmap rotated_pointer;
Matrix matrix;
public Pointers(int type,int x,int y,GamePanel game){
this.game = game;
this.x = x;
this.y = y;
pointer = BitmapFactory.decodeResource(game.getResources(),R.drawable.pointer_xx);
matrix = new Matrix();
switch(type){
//points right
case 1:
matrix.postRotate(90);
break;
//points left
case 2:
matrix.postRotate(-90);
break;
//points up
case 3:
rotated_pointer = pointer;
break;
//points down
case 4:
matrix.postRotate(180);
break;
}
rotated_pointer = Bitmap.createBitmap(pointer,0,0,pointer.getHeight(),pointer.getWidth(), matrix,true);
}
public void update(){
/*if(y < game.getHeight()){
y += pointer.getHeight();
}*/
}
public int getHeight(){
return pointer.getHeight();
}
public void draw(Canvas canvas){
y++;//placeholder movement to see if the sprite actually moves. It doesn't.
canvas.drawBitmap(rotated_pointer,x,y,null);
}
}
Finally, I think it's necessary to post the GameLoopThread-
package com.jollygent.tapthepointer;
import android.graphics.Canvas;
public class GameLoopThread extends Thread{
GamePanel game;
boolean running = false;
boolean paused = false;
Canvas canvas;
static final int FPS = 10;
public GameLoopThread(GamePanel game){
this.game = game;
}
public boolean isRunning(){
return running;
}
public void setRunning(boolean b){
this.running = b;
}
public void setPause(boolean b){
this.paused = b;
}
#Override
public void run() {
// TODO Auto-generated method stub
long beginTime;
long ticks = 1000/FPS;
long sleepTime;
while(running){
if(!paused){
beginTime = System.currentTimeMillis();
canvas = null;
try{
canvas = game.holder.lockCanvas();
synchronized(game.getHolder()){
game.draw(canvas);
}
}finally{
if(canvas != null){
game.getHolder().unlockCanvasAndPost(canvas);
}
}
sleepTime = (System.currentTimeMillis() - beginTime)/ticks;
try{
if(sleepTime > 0)
Thread.sleep(sleepTime);
else
Thread.sleep(10);
}catch(Exception e){
e.printStackTrace();
}
}
}
}
}
I'm probably making a very stupid mistake, but no matter how much I scan into the code and change around few bits I can't seem to figure out how exactly to move the sprite. Also, how can I make it so the sprite is centered exactly in any device?
Thank you.
EDIT:
package com.jollygent.tapthepointer;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
public class GameActivity extends Activity {
GamePanel game;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
game = new GamePanel(this);
setContentView(game);
}
#Override
public void onPause(){
super.onPause();
game.getLoop().setPaused(true);
}
#Override
public void onResume(){
super.onResume();
game.getLoop().setPaused(false);
}
}
The problem is that the loop only runs once and once only. Perhaps the GameActivity is the problem?
I have Mat difference which has some black pixels(or really nearly black pixels -> if earhquake occurs, buliding will move etc.) in it and the Mat current which consists real image with natural colors. I would like to replace pixels in Mat current with these black pixels in Mat difference. If I do it manually like this:
for(int i = 0; i < difference.rows(); i++){
for(int j = 0; j < difference.cols(); j++){
subtractedPixel = difference.get(i, j);
if(subtractedPixel[0] < 10 && subtractedPixel[1] < 10 && subtractedPixel[2] < 10){
originalPixel = current.get(i, j);
difference.put(i, j, originalPixel);
}
}
it is extremely slow.
Full code(running):
package com.example.szpieg2;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Range;
import org.opencv.core.Size;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.Window;
import android.view.WindowManager;
public class TrackActivity extends Activity implements CvCameraViewListener {
private Mat current;
private CameraBridgeViewBase cameraView;
private Mat previous;
private Mat difference;//difference between previous and current
private boolean first = true;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i("Co sie dzieje?", "OpenCV loaded successfully");
cameraView.enableView();
// cameraView.setOnTouchListener(ColorBlobDetectionActivity.this);
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.activity_track);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
cameraView = (CameraBridgeViewBase) findViewById(R.id.surface_view);
cameraView.setCvCameraViewListener(this);
}
// --------Activity Actions---------
#Override
public void onPause() {
if (cameraView != null)
cameraView.disableView();
super.onPause();
}
#Override
public void onResume() {
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this,
mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
if (cameraView != null)
cameraView.disableView();
}
// --------/Activity Actions/---------
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.activity_track, menu);
return true;
}
// --------listener method implementation-------------
#Override
public void onCameraViewStarted(int width, int height) {
/*current = new Mat(width, height, CvType.CV_64FC4);
previous = new Mat(width, height, CvType.CV_64FC4);
difference = new Mat(width, height, CvType.CV_64FC4);*/
current = new Mat(width, height, CvType.CV_8UC4);
previous = new Mat(width, height, CvType.CV_8UC4);
difference = new Mat(width, height, CvType.CV_8UC4);//RGBA 0..255
}
#Override
public void onCameraViewStopped() {
current.release();
}
#Override
public Mat onCameraFrame(Mat inputFrame) {
inputFrame.copyTo(current);
if(first){//first is true at the first time
inputFrame.copyTo(previous);
first = false;
Log.i("First processing", "Pierwszy przebieg");
}
Core.absdiff(current, previous, difference);
// Core.absdiff( previous,current, difference);
//I leave black pixels and load original colors
double[] subtractedPixel, originalPixel;
String s = "";
for(int i = 0; i < difference.rows(); i++){
for(int j = 0; j < difference.cols(); j++){
subtractedPixel = difference.get(i, j);
if(subtractedPixel[0] < 10 && subtractedPixel[1] < 10 && subtractedPixel[2] < 10){
originalPixel = previous.get(i, j);
difference.put(i, j, originalPixel);
}
}
// s+="\n";
}
// Log.i("mat ", s);
// Log.i("mat ", difference.get(44,444)[0] + "");
//---------------------------------------------
inputFrame.copyTo(previous);
return difference;//UNREAL COLORS
}
}
You can do the following.
Threshold your image using inRange().
Mat mask;
void inRange(difference, Scalar(0,0,0), Scalar(9,9,9), mask)
The output mask will be a binary mask where its pixel is set to 255 if its under threshold cv::Scalar(9,9,9);
Copy difference image into current image through mask using copyTo().
difference.copyTo(current, mask);
First Of all I'm doing this on Eclipse.
1.I wish to draw a rectangle on reception of Touch event.
2.That rectangle should be persistent and on an another Touchevent should draw another rectangle.
3.I have managed to get it persistent for a single TouchEvent after which it shifts according to coordinates.
4.So basically I should have multiple rectangles due to different touch events.
I am thinking of iterating through arrays...
But I'm still confused pls help!
This one does not work... I'm asking for improvements...
Thanks! Also manifests and stuff is proper and permissions are taken properly!
code is somewhat like:
package code.e14.balldetector;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
public class MainActivity extends Activity implements CvCameraViewListener2, OnTouchListener {
private static final String TAG = "OCVBallTracker";
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private Mat mRgba;
int i=0;
private Double[] h=new Double[20];
private Double[] k=new Double[20];
private double x=0;
private double y=0;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(MainActivity.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public MainActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
if (mIsJavaCamera)
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.java_surface_view);
else
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.native_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
return true;
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
}
public void onCameraViewStopped() {
mRgba.release();
}
#Override
public boolean onTouch(View arg0,MotionEvent event) {
double cols = mRgba.cols();
double rows = mRgba.rows();
double xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
double yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
h[i] = (double)(event).getX() - xOffset;
k[i] = (double)(event).getY() - yOffset;
h[i]=x;
k[i]=y;
Log.i(TAG, "Touch image coordinates: (" + h[i] + ", " + k[i] + ")");
i++;
return false;// don't need subsequent touch events
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba=inputFrame.rgba();
Core.rectangle(mRgba, new Point(x-100,y-100),new Point(x+100,y+100),new Scalar( 0, 0, 255 ),0,8, 0 );
return mRgba;
}
}
It is quite easy to store the rectangles in a List. I have made some small adjustments that should work.
private List<Rect> ListOfRect = new ArrayList<Rect>();
#Override
public boolean onTouch(View arg0,MotionEvent event) {
double cols = mRgba.cols();
double rows = mRgba.rows();
double xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
double yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
h[i] = (double)(event).getX() - xOffset;
k[i] = (double)(event).getY() - yOffset;
h[i]=x;
k[i]=y;
ListOfRect.add(new Rect(x-100, y-100, x+100, y+100));
Log.i(TAG, "Touch image coordinates: (" + h[i] + ", " + k[i] + ")");
i++;
return false;// don't need subsequent touch events
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba=inputFrame.rgba();
for(int i=0; i<ListOfRect.size(); i++){
Core.rectangle(mRgba, ListOfRect.get(i).tl(), ListOfRect.get(i).br(),new Scalar( 0, 0, 255 ),0,8, 0 );}
return mRgba;
}
However keep in mind that you need to release and clear your list to free memory when you don't need the rectangles anymore. I hope it helped.