OpenGL Bitmap-Font color blending antialised - java

I wrote myself a program that generates me bitmap-fonts from any installed font to png.
I use png for two reasons:
1. Preserve alpha values
2. It's an open format
It also generates a xml-file so that I can read single chars.
The exported version of this bitmap-font uses a white font color to allow simple color-blending.
However, when I use antialiasing while exporting the bitmap-font my color-blending gets ugly,
because the antialiased border preserver the original white of the font causing half-colored edges.
Does anyone know how to dodge this effect without using a realtime font-renderer or having to forego antialiasing.
Edit1:
As you can see, when I use orange to blend, I get a white border.
With black, this border is not visible because black is 0f 0f 0f 1f (RBGA).
The second the blending color has some similarity to white it leaves a "whitened" border caused by half transparent pixels from the antialiasing.
Edit2:
Here is some code showing how I load the image to gl and how I init gl:
initGL:
protected void initGL() {
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glClearColor(0f, 0f, 0f, 0f); // Black Background
glDisable(GL_DEPTH_TEST); // Enables Depth Testing
glDepthMask(false);
glMatrixMode(GL_PROJECTION); // Select The Projection Matrix
glLoadIdentity(); // Reset The Projection Matrix
doResize();
glMatrixMode(GL_MODELVIEW);
}
load image to gl:
public static Texture loadTexture(String resName, BufferedImage image) {
int textureID = glGenTextures(); // Generate texture ID
Texture texture = new Texture(resName, textureID);
int texWidth = image.getWidth();
int texHeight = image.getHeight();
texture.setWidth(texWidth);
texture.setHeight(texHeight);
ByteBuffer buffer = convertImageData(image, texture);
texture.bind();
// Setup wrap mode
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL12.GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL12.GL_CLAMP_TO_EDGE);
// Setup texture scaling filtering
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// Send texel data to OpenGL
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, texWidth, texHeight, 0, (image.getColorModel().hasAlpha() ? GL_RGBA : GL_RGB), GL_UNSIGNED_BYTE, buffer);
// Return the texture ID so we can bind it later again
return texture;
}

Related

OpenGL rendering to texture produces empty Texture

I am trying to render and orhto projection of my scenes depth values to a texture inorder to use the texture in a later render cylce to determine what fragments are in shadow. Basically a Shadow Map.
However the texture that I am rendering to ends up being uniformly empty. Given that i can only really test it in a shader i am limited to what output i can generate. However it seems that all my z values in the Texture are 0.
Here is the code that generates the Texture(Width and height are 1024 and pixelFormat is GL_DEPTH_COMPONENT):
this.id = glGenTextures();
glBindTexture(GL_TEXTURE_2D, id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, width, height, 0, pixelFormat, GL_FLOAT, (ByteBuffer) null);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
return id;
Here I create the FrameBuffer and attach the Texture:
// Create a FBO to render the depth
this.depthMapFBO = glGenFramebuffers();
// Create the depth map texture
this.depthMap = new Texture(SHADOW_MAP_WIDTH, SHADOW_MAP_HEIGHT, GL_DEPTH_COMPONENT);
// Attach the the depth map texture to the FBO
glBindFramebuffer(GL_FRAMEBUFFER, depthMapFBO);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, this.depthMap.getId(), 0);
// Set only depth
glDrawBuffer(GL_NONE);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
throw new Exception("Could not create FrameBuffer" +glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
// Unbind
glBindFramebuffer(GL_FRAMEBUFFER, 0);
Before I render my Scene I call this function to render the depth to the texture:
if(shaderMap.containsKey("shadow")){
shaderprogram = shaderMap.get("shadow");
}
shaderprogram.bind();
Sun sun = resourceManager.getSun();
Matrix4f LightViewMatrix = transformation.getLightViewMatrix(sun);
Matrix4f modelLightViewMatrix = transformation.getModelViewMatrix(object, LightViewMatrix);
shaderprogram.setUniform("modelLightViewMatrix",modelLightViewMatrix);
glBindFramebuffer(GL_FRAMEBUFFER,this.shadowmap.getDepthMapFBO());
glViewport(0, 0, 1024, 1024);
glClear(GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
this.shadowmap.getDepthMapTexture().bind();
glPolygonMode( GL_FRONT_AND_BACK, GL_FILL );
glBindVertexArray(object.getMesh().getVaoId());
glEnableVertexAttribArray(0);//Vertex positions
glEnableVertexAttribArray(1);//Color Positions
glEnableVertexAttribArray(2);//Normals
glDrawElements(GL_TRIANGLES, object.getMesh().getVertexcount(),GL_UNSIGNED_INT ,0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D,0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
shaderprogram.unbind();
I can post the Matrices for OrthogonalViewMatrix and LightViewMatrix if needed but i did test them and rendered my scene with them and it gives the desired effect of the Camera flying over the Terrain and looking at the center of the map. Basically how you would imagine the scene to look like if the camera was the sun. So I dont think there is anything wrong with them.
This is my second render with normal projections. Basically the normal Camera:
shaderprogram.createUniform("shadowMap");
glActiveTexture(GL_TEXTURE4);
this.shadowmap.getDepthMapTexture().bind();
shaderprogram.setUniform("shadowMap", 4);
glPolygonMode( GL_FRONT_AND_BACK, GL_FILL );
glBindVertexArray(object.getMesh().getVaoId());
glEnableVertexAttribArray(0);//Vertex positions
glEnableVertexAttribArray(1);//Color Positions
glEnableVertexAttribArray(2);//Normals
glDrawElements(GL_TRIANGLES, object.getMesh().getVertexcount(),GL_UNSIGNED_INT ,0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D,0);
shaderprogram.unbind();
Some parts are left out but I think those are the most important Code parts where the Error might be.
Here is the vertex and the fragment shader that is used in the first render cycle for the shadowmap:
#version 330
layout (location=0) in vec3 position;
layout (location=1) in vec2 texCoord;
layout (location=2) in vec3 vertexNormal;
uniform mat4 modelLightViewMatrix;
uniform mat4 orthoProjectionMatrix;
void main()
{
gl_Position = orthoProjectionMatrix * modelLightViewMatrix * vec4(position, 1.0f);
}
I know i am not using the the texCoords and the vertexNormal.
Here the fragment shader:
#version 330
void main()
{
gl_FragDepth = gl_FragCoord.z;
}
It should just save the Fragments Depth value.
And here the part of the normal scenes fragment shader:
float shadowfactor = 0;
vec3 projCoords = mlightviewVertexPos.xyz;
projCoords = projCoords * 0.5 + 0.5;
if (projCoords.z < texture(shadowMap,projCoords.xy).r){
// Current fragment is not in shade
shadowfactor = 1;
}else{
shadowfactor = 0.5;
}
color = color * (vec4(1,1,1,1)* shadowfactor);
fragColor = color;
Im inputing the orthoMatrix and the LightViewMatrix to determine where the fragment would be in the Suns POV and checking the Z Value in that part of the Texture.
The Problem is that shadowfactor seems to be a uniformly black texture. I tried assigning the texture(shadowMap,projCoords.xy).r directly to the fragment to see if there are any differences anywhere but it is all the same black color eg. 0.
I also tried to use the ShadowMap texture directly on the terrain to see if there is anything on there but I also only get a black Texture.
I am aware that this is a very long question but I tried debugging it for the last 2 days and cant find the error. My guess is that I m either not binding the Texture right or that the wrong FrameBuffer is used in the render cycle.
Hopefully someone wants to help and can find the Error.
Thank you for your time in advance,
Alex

OpenGL - Why does my fbo/texture remain black?

*I've been trying my very best to implement renderable texture functionality using OpenGL's framebuffering together with the LWJGL library from Java. However, the result that I always get is a 100% **black ** texture.*
I'm simply asking for some advice of what the problem is. I'm not rendering any specific shapes. I bind my generated framebuffer and call a glClearColor(1, 0, 0, 1); and then a glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); and then unbind the framebuffer. But when I try to render the texture bound to the framebuffer, the texture only shows black, where it actually should be red, right?
Also, the glCheckFramebufferStatus() returns GL_FRAMEBUFFER_COMPLETE so I suppose that the error lies within the rendering part, rather than the initialization phase. But I'll show the initialization code anyways.
The initialization code:
public RenderableTexture initialize(int width, int height, int internalFormat, int[] attachments, boolean useDepthBuffer) {
if(!GLContext.getCapabilities().GL_EXT_framebuffer_object) {
System.err.println("FrameBuffers not supported on your graphics card!");
}
this.width = width;
this.height = height;
hasDepthBuffer = useDepthBuffer;
fbo = glGenFramebuffers();
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
id = glGenTextures();
glBindTexture(GL_TEXTURE_2D, id);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, (ByteBuffer) null);
if(useDepthBuffer) {
rbo = glGenRenderbuffers();
glBindRenderbuffer(GL_RENDERBUFFER, rbo);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, width, height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, rbo);
}
glFramebufferTexture2D(GL_FRAMEBUFFER, attachments[0], GL_TEXTURE_2D, id, 0);
int[] drawBuffers = new int[attachments.length];
for(int i = 0; i < attachments.length; i++)
if(attachments[i] == GL_DEPTH_ATTACHMENT)
drawBuffers[i] = GL_NONE;
else
drawBuffers[i] = attachments[i];
glDrawBuffers(Util.toIntBuffer(drawBuffers));
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
System.err.println("Warning! Incomplete Framebuffer");
glBindFramebuffer(GL_FRAMEBUFFER, 0);
return this;
}
internalFormat has the value of GL_RGBA8 and width and height have the value of 512 and 512. attachments[] only contains 1 value and that's GL_COLOR_ATTACHMENT0. useDepthBuffer is set to true.
The code above is only called once.
This is the rendering code:
public RenderManager draw() {
glClearColor(bg.x, bg.y, bg.z, bg.w);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
texture.bindAsRenderTarget(true);
texture.releaseRenderTarget();
quad.draw();
return this;
}
I set the clear color to black (0, 0, 0, 1) and then clear the screen. I then call texture.bindAsRenderTarget(true);. The texture object is the one who contains the initialize method from above so some variables are shared between that method and bindAsRenderTarget().
This method looks like this:
public RenderableTexture bindAsRenderTarget(boolean clear) {
glViewport(0, 0, width, height);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
glClearColor(1, 0, 0, 1f);
if(clear)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
return this;
}
As you can see I adjust the viewport to the size of the texture / framebuffer. I then bind the framebuffer and set the clear color to red. Then, since I passed true in the rendering code, it (as i believe) clears the currently bound framebuffer to red.
texture.releaseRenderTarget(); adjusts the viewport to fit the display and then calls glBindFramebuffer(GL_FRAMEBUFFER, 0);
The final line of code quad.draw(); simply binds the textureID of the texture bound to the framebuffer and then draws a simple quad with it.
That's pretty much all there is.
I can assure you that I'm rendering the quad correctly, since I can bind textures loaded from PNG files to it and the texture is successfully shown.
So to make things clear, the main question is pretty much:
Why on earth is the texture black after the clear as it should be red? Where and what am I doing wrong?
EDIT: I have a feeling that it might have to do with something about the bounding of different gl ojects. Does the renderbuffer have to be bound at the point of rendering to it's framebuffer? Does it not? Does it matter? How about the texture? at what points should they be?
I did something very stupid. The class that I initialized the fbo texture within (RenderableTextue.class) was a subclass of Texture.class. The binding method including the textureID was supposed to be inherited from the Texture class as I had declared the id variable as protected. However, I had accidently created a local private variable within the subclass, and thus, when generating the texture, saving the textureID to the local id variable and when binding, using the uninitialized id from the superclass. Sorry for anyone trying to solve this without being able to do so :s

How to draw textures and use VBO's?

Currently I have a working system of VBO's where I have two buffers - one for position and one for color. However, I want to draw textures instead of colors. How can I draw textures over my VBO position buffer? This is in Java and LWJGL, but an example in any language helps.
I want to add textures to my VBO. How would I go about doing that?
You don't. At least you're not adding texture images to the VBO. What you add is a new attribute, called the texture coordinate, that assigns each vertex the location of an texture image.
The texture itself is an independent object, created using glGenTextures, glBindTexture and glTexImage….
Here is a simple code snippet to initialize and render a cube with a texture. Uses Slick2D library.
int vertexHandle;
int textureHandle;
private void init() throws IOException {
Camera.init();
texture = TextureLoader.getTexture("PNG", ResourceLoader.getResourceAsStream("test.png"));
this.initialize3D();
vertexHandle = GL15.glGenBuffers();
FloatBuffer positionData = BufferUtils.createFloatBuffer(72);
// Initalize position data.
positionData.flip();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER,vertexHandle);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER,positionData,GL15.GL_STATIC_DRAW);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER,0);
FloatBuffer textureData = BufferUtils.createFloatBuffer(72);
// Initialize texture data.
textureBuffer.flip();
textureHandle = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, textureHandle);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, textureData, GL15.GL_STATIC_DRAW);
GL11.glTexCoordPointer(3, GL11.GL_FLOAT, 0, 0);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
GL11.glDisableClientState(GL11.GL_TEXTURE_COORD_ARRAY);
}
public void render() {
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT | GL11.GL_DEPTH_BUFFER_BIT);
GL11.glLoadIdentity();
texture.bind();
GL11.glPushMatrix();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vertexHandle);
GL11.glVertexPointer(3, GL11.GL_FLOAT, 0, 0L);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, textureHandle);
GL11.glVertexPointer(3, GL11.GL_FLOAT, 0, 0L);
GL11.glDrawArrays(GL11.GL_QUADS, 0, 24);
GL11.glPopMatrix();
}
In addition you may need to integrate parts of this method to initialize 3D rendering:
public void initialize3D() {
GL11.glEnable(GL11.GL_TEXTURE_2D); // Allows 2D textures.
GL11.glShadeModel(GL11.GL_SMOOTH); // Smoother textures.
//GL11.glClearColor(0.4f,0.6f,1.0f,0.0f); // BG color. 6698FF
GL11.glClearDepth(1.0); // Buffer depth, allows objects to draw over things behind them.
GL11.glEnable(GL11.GL_DEPTH_TEST); // Depth testing (see above).
GL11.glDepthFunc(GL11.GL_LEQUAL); // Type of depth testing.
GL11.glEnableClientState(GL11.GL_VERTEX_ARRAY);
//GL11.glEnableClientState(GL11.GL_COLOR_ARRAY);
GL11.glEnableClientState(GL11.GL_TEXTURE_COORD_ARRAY);
GL11.glMatrixMode(GL11.GL_PROJECTION); // Sets matrix mode to displaying pixels.
GL11.glLoadIdentity(); // Loads the above matrix mode.
// Sets default perspective location. Render Distances: Min Max
GLU.gluPerspective(45.0f,(float)Display.getWidth()/(float)Display.getHeight(),0.1f,300.0f);
GL11.glMatrixMode(GL11.GL_MODELVIEW); // Sets the matrix to displaying objects.
GL11.glHint(GL11.GL_PERSPECTIVE_CORRECTION_HINT,GL11.GL_NICEST); // Something unimportant for quality.
}

Texture appears white

I have a textured skydome. It renders white when an image is attached, but it does renders right when a color is given. I have reasons to assume the texture is overwritten, thus some tips on this would be great. It used to work fine, displaying the texture appropriately.
EDIT: If I print the texture directly to the fbo, it does show the texture. However when I map it to the sphere it shows up white. Give the sphere a color, and it shows correctly with the color. Also for the record, white is not the clear color. And I use an image that's quite large (3000x1000~).
ADD: No errors are given anywhere.
Changing:
glActiveTextureARB(GL_TEXTURE6_ARB);
glCallList(SkySphere.getDisplayList());
To:
glActiveTextureARB(GL_TEXTURE0_ARB);
glCallList(SkySphere.getDisplayList());
displays the proper image once, first cycle, then, white again.
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboId);
glViewport(0,0,screenWidth,screenHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(90.0f, ((float)screenWidth/(float)screenHeight),0.1f,100.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glShadeModel(GL_SMOOTH);
glHint(GL_PERSPECTIVE_CORRECTION_HINT,
GL_NICEST);
glDisable(GL_DEPTH_TEST);
glClearColor(1.0f,1.0f,0.0f,1.0f);
glClear (GL_COLOR_BUFFER_BIT);
glLoadIdentity ();
camera.look();
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
glActiveTextureARB(GL_TEXTURE6_ARB);
glCallList(SkySphere.getDisplayList());
glDisable(GL_TEXTURE_2D);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
This is the skysphere code:
public static int loadTexture(String filename) {
ByteBuffer buf = null;
int tWidth = 0;
int tHeight = 0;
.. load png into buffer..
// Create a new texture object in memory and bind it
textureId = GL11.glGenTextures();
GL11.glBindTexture(GL11.GL_TEXTURE_2D, textureId);
// All RGB bytes are aligned to each other and each component is 1 byte
GL11.glPixelStorei(GL11.GL_UNPACK_ALIGNMENT, 1);
// Upload the texture data and generate mip maps (for scaling)
GL11.glTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGB, tWidth, tHeight, 0,
GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE, buf);
// Setup what to do when the texture has to be scaled
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER,
GL11.GL_NEAREST);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER,
GL11.GL_LINEAR);
return textureId;
}
public static int getDisplayList() {
return displayList;
}
public static int makeSphere() {
Sphere s = new Sphere(); // an LWJGL class for drawing sphere
s.setOrientation(GLU.GLU_INSIDE); // normals point inwards
s.setTextureFlag(true); // generate texture coords
displayList = GL11.glGenLists(1);
GL11.glNewList(displayList, GL11.GL_COMPILE);
{
GL11.glPushMatrix();
{
GL11.glBindTexture(GL11.GL_TEXTURE_2D, getTextureId());
//GL11.glTranslatef(0,0,0);
GL11.glRotatef(90f, 1,0,0); // rotate the sphere to align the axis vertically
s.draw(1, 48, 48); // run GL commands to draw sphere
}
GL11.glPopMatrix();
}
GL11.glEndList();
return displayList;
}
In initGL:
SkySphere.createShader();
SkySphere.loadTexture("textures/panorama2.png");
SkySphere.makeSphere();
Also I'm doing most of my work in framebuffers:
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, modelsFboId);
And in one occasion copy the depth to a texture:
glActiveTextureARB(GL_TEXTURE3_ARB);
glBindTexture(GL_TEXTURE_2D, modelsDepthTextureId);
glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0, screenWidth, screenHeight);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
I used
glPushAttrib(GL_ALL_ATTRIB_BITS);
at the beginning and
glPopAttrib();
at the end to reset the OpenGL states each frame.

How to scale an image using openGL (JGL)?

I want to scale an image using openGL, can anyone provide me with such code about how to do this ?
PS, I am using JGL as an openGL library for Java.
I will be brief on this, as you can find tutorials for pretty much every part of the solution.
You need to load your image from the disk, and create a texture.
You need to create a framebuffer object (FBO) with the desired target dimensions (in your case, double width, double height). Make the FBO active for rendering.
Render a fullscreen quad with your texture applied.
Read the result using glReadPixels().
And that's it ... the texturemapping hardware will take care of rescaling it for you. But it will likely be slower than if done on CPU, especially for "scaling 2x".
EDIT: as requested by OP, it is necessary to include source code.
So ... for loading an image in Java, you would go for this:
BufferedImage img;
try {
img = ImageIO.read(new File("myLargeImage.jpg"));
} catch (IOException e) { /* ... */ }
int w = img.getWidth(), h = img.getHeight();
For JGL, one might want to convert an image to an array:
byte [][][] imageData = new byte[img.getWidth()][img.getHeight()][3]; // alloc buffer for RGB data
for(int y = 0; y < h; ++ y) {
for(int x = 0; x < w; ++ x) {
int RGBA = img.getRGB(x, y);
imageData[x][y][0] = RGBA & 0xff;
imageData[x][y][1] = (RGBA >> 8) & 0xff;
imageData[x][y][2] = (RGBA >> 16) & 0xff;
}
}
Note that there could be alpha channel as well (for transparency) and that this will likely be quite slow. One could also use:
int[] rgbaData = img.GetRGB(0, 0, w, h, new int[w * h], 0, w);
But that doesn't return the data in the correct format expected by JGL. Tough luck.
Then you need to fill a texture:
int[] texId = {0};
gl.glGenTextures(1, texId); // generate texture id for your texture (can skip this line)
gl.glEnable(GL.GL_TEXTURE_2D);
gl.glBindTexture(GL.GL_TEXTURE_2D, texId[0]); // bind the texture
gl.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1); // set alignment of data in memory (a good thing to do before glTexImage)
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP); // set clamp (GL_CLAMP_TO_EDGE would be better)
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR); // set linear filtering (so you can scale your image)
gl.glTexImage2D(GL.GL_TEXTURE_2D, 0, GL.GL_RGB, w, h, 0, GL.GL_RGB, GL.GL_UNSIGNED_BYTE, imageData); // upload image data to the texture
Once you have a texture, you can draw stuff. Let's resample your image:
int newW = ..., newH = ...; // fill-in your values
gl.glViewport(0, 0, newW, newH); // set viewport
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glLoadIdentity();
gl.glColor3f(1.0f, 1.0f, 1.0f); // set "white" color
gl.glDisable(GL.GL_CULL_FACE); // disable backface culling
gl.glDisable(GL.GL_LIGHTING); // disable lighting
gl.glDisable(GL.GL_DEPTH_TEST); // disable depth test
// setup OpenGL so that it renders texture colors without modification (some of that is not required by default)
gl.glBegin(GL_QUADS);
gl.glTexCoord2f(0.0f, 1.0f);
gl.glVertex2f(-1.0f, -1.0f);
gl.glTexCoord2f(1.0f, 1.0f);
gl.glVertex2f(+1.0f, -1.0f);
gl.glTexCoord2f(1.0f, 0.0f);
gl.glVertex2f(+1.0f, +1.0f);
gl.glTexCoord2f(0.0f, 0.0f);
gl.glVertex2f(-1.0f, +1.0f);
gl.glEnd();
// draw a fullscreen quad with your texture on it (scaling is performed here)
Now that the scaled image is rendered, all that needs to be done, is to download it.
byte[][][] newImageData = new byte[newW][newH][3];
gl.glPixelStorei(GL.GL_PACK_ALIGNMENT, 1); // set alignment of data in memory (this time pack alignment; a good thing to do before glReadPixels)
gl.glReadPixels(0, 0, newW, newH, GL.GL_RGB, GL.GL_UNSIGNED_BYTE, newImageData);
And then the data can be converted to BufferedImage in similar way the input image img was converted to imageData.
Note that I used variable gl, that is an instance of the GL class. Put the code into some JGL example and it should work.
One word of caution, JGL doesn't seem to support framebuffer objects, so that you are limited by output image size by your OpenGL window size (attempting to create larger images will result in black borders). It can be solved by using multipass rendering (render your image tile by tile and assemble the full image in memory).

Categories

Resources