Combine multiple pixmaps that have transparent pixels - java

I've been trying to combine multiple pixmaps that have transparency. The problem I'm having is that there end up with strange artifacts where they cross over.
I've created an example that should show a red circle crossing over a blue circle.
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.Pixmap;
import com.badlogic.gdx.graphics.PixmapIO;
public class PixmapExample extends ApplicationAdapter {
#Override
public void create() {
final int width = 128;
final int height = 128;
final Pixmap pixmap = new Pixmap(width, height, Pixmap.Format.RGBA8888);
pixmap.setBlending(Pixmap.Blending.SourceOver);
pixmap.setColor(1f, 0f, 0f, 0.4f);
pixmap.fillCircle(64, 64, 32);
pixmap.drawPixel(45,45);
final Pixmap otherPixmap = new Pixmap(width, height, Pixmap.Format.RGBA8888);
otherPixmap.setBlending(Pixmap.Blending.SourceOver);
otherPixmap.setColor(0f, 0f, 1f, 0.4f);
otherPixmap.fillCircle(84, 64, 32);
pixmap.drawPixmap(otherPixmap, 0, 0);
// Write the pixmap
final FileHandle fileHandle = new FileHandle("pixmap_combine_example" + ".png");
PixmapIO.writePNG(fileHandle, pixmap);
pixmap.dispose();
}
}
I've tried using Pixmap.Blending.None but this makes the second pixmap overwrite the first.
What could be causing this?

Related

How does a frameBuffer interact with multiple camera's and viewports

My goal is to draw all my visuals to a framebuffer which has a size that matches 1 by 1 with my pixel art assets. And then output the framebuffer texture to the current screen size. (A common solution for scaling pixel art).
The problem I am having while trying to achieve this is 2-fold
The frameBuffer texture is drawn too small. I expect the texture to match the whole window.
resizing the screen further distorts the proportions and position. I expect the frameBuffer texture to keep filling the screen after resizing it.
This is my trimmed down code.
public class BattleScreen extends GameScreen {
public static final int VISIBLE_WIDTH = 21;
public static final int VISIBLE_HEIGHT = 21;
private static OrthographicCamera mapCamera = null;
private SpriteBatch spriteBatch;
private OrthographicCamera hudCamera;
private Hud hud;
private EntityStage entityStage;
private FrameBuffer fbo;
private void initializeVariables(UnitOwner enemy, MapManager mapMgr) {
mapCamera = new OrthographicCamera();
mapCamera.setToOrtho(false, VISIBLE_WIDTH, VISIBLE_HEIGHT);
fbo = new FrameBuffer(Pixmap.Format.RGBA8888, 672, 672, false);
spriteBatch = new SpriteBatch(900);
entityStage = new EntityStage();
hudCamera = new OrthographicCamera();
hudCamera.setToOrtho(false, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
}
#Override
public void show() {
resize(Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
mapCamera.position.set(currentMap.getMapWidth() * 0.5f, currentMap.getMapHeight() * 0.5f, 0f);
mapRenderer = new OrthogonalTiledMapRenderer(mapMgr.getCurrentTiledMap(), Map.UNIT_SCALE, spriteBatch);
mapRenderer.setView(mapCamera);
final FitViewport vp = new FitViewport(VISIBLE_WIDTH, VISIBLE_HEIGHT, mapCamera);
currentMap.getTiledMapStage().setViewport(vp);
entityStage.setViewport(vp);
}
private void update(final float delta) {
mapCamera.position.x = Utility.clamp(mapCamera.position.x, currentMap.getTilemapWidthInTiles() - (mapCamera.viewportWidth * 0.5f), 0 + (mapCamera.viewportWidth * 0.5f));
mapCamera.position.y = Utility.clamp(mapCamera.position.y, currentMap.getTilemapHeightInTiles() - (mapCamera.viewportHeight * 0.5f), 0 + (mapCamera.viewportHeight * 0.5f));
mapCamera.update();
hudCamera.update();
}
private void renderElements(final float delta) {
Gdx.gl.glClearColor(0, 0, 0, 0);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
fbo.begin();
Gdx.gl.glClearColor(0, 0, 0, 0);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
renderMap();
renderUnits();
renderHUD(delta);
fbo.end();
spriteBatch.begin();
Texture texture = fbo.getColorBufferTexture();
TextureRegion textureRegion = new TextureRegion(texture);
textureRegion.flip(false, true);
spriteBatch.draw(textureRegion, 0, 0, fbo.getWidth(), fbo.getHeight());
spriteBatch.end();
}
private void renderMap() {
spriteBatch.setProjectionMatrix(mapCamera.combined);
mapRenderer.setView(mapCamera);
currentMap.getTiledMapStage().getViewport().apply();
mapRenderer.render();
}
private void renderUnits() {
spriteBatch.begin();
entityStage.getViewport().apply();
for (Entity unit : units) {
final Color temp = spriteBatch.getColor();
spriteBatch.setColor(new Color(temp.r, temp.g, temp.b, unit.getEntityactor().getColor().a));
spriteBatch.draw(getFrame(), getEntityactor().getX(), unit.getEntityactor().getY(), 1.0f, 1.0f);
spriteBatch.setColor(temp);
}
}
private void renderHUD(final float delta) {
spriteBatch.setProjectionMatrix(hud.getStage().getCamera().combined);
hud.getStage().getViewport().apply();
hud.render(delta);
}
#Override
public void resize(final int width, final int height) {
currentMap.getTiledMapStage().getViewport().update(width, height, false);
entityStage.getViewport().update(width, height, false);
hud.resize(width, height);
pauseMenu.resize(width, height);
}
}
What I tried is changing the width and height on this line in the render method
spriteBatch.draw(textureRegion, 0, 0, 1000, 1000);
And then it does fill up the whole screen nicely
Which is weird because the screen is 672 x 672 in size.
Fixed problem 1 with the following adaptation
mapCamera.setToOrtho(false, fbo.getWidth(), fbo.getHeight()); //adapt camera to fbo
mapCamera.update();
spriteBatch.setProjectionMatrix(mapCamera.combined);
spriteBatch.draw(textureRegion, 0, 0, fbo.getWidth(), fbo.getHeight());
spriteBatch.end();
mapCamera.setToOrtho(false, VISIBLE_WIDTH, VISIBLE_HEIGHT); //reset camera
mapCamera.update();
The spriteBatch is being reused for the map, the entities and the ui for efficiency. So I had to configure it to project the buffer texture on the screen

libgdx sprite-sheet based animation scaling

I am quite new to libgdx and android programming in general. I am having problems rendering a sprite-sheet-based animation, and getting it to be the same size on different screen sizes.
If I run the following code on my note 4, the animation is quite small, on the Zenfone 2 instead it's quite big, and lastly on my laptop it is jut so small it can barely be seen.
I really don't understand why this happens, and how to make it the same on the two phones. I thought that using an orthographic camera with ingame units and a viewport would do the job, but I might be doing something wrong because it doesn't.
I am following the book "libgdx cross-platform game development cookbook".
I would hugely appreciate any help on how to properly use in game units to get the game to be the same on different screen sizes, so that a 512x512 px image isn't tiny on the note4 and huge on the Zenfone (each frame of my animation is 512px squared).
And as far as the pc goes, I just have no clue what is going on, I would really appreciate any explanation on why that happens!
Thank you all!
package com.mygdxGame;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.Animation;
import com.badlogic.gdx.graphics.g2d.Animation.PlayMode;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import com.badlogic.gdx.graphics.g2d.TextureAtlas.AtlasRegion;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.viewport.FitViewport;
import com.badlogic.gdx.utils.viewport.Viewport;
import java.util.Comparator;
public class MyGdxGame extends ApplicationAdapter {
private static final float WORLD_TO_SCREEN = 1.0f / 100.0f;
private static final float SCENE_WIDTH = 12.80f;
private static final float SCENE_HEIGHT = 7.20f;
private static final float FRAME_DURATION = 1.0f / 20.0f;
private TextureAtlas techmanAtlas;
private Animation techmanRun;
private float animationTime;
private OrthographicCamera camera;
public Viewport viewport;
public SpriteBatch batch;
#Override
public void create(){
camera = new OrthographicCamera();
viewport = new FitViewport(Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), camera);
batch = new SpriteBatch();
animationTime = 0.0f;
techmanAtlas = new TextureAtlas(Gdx.files.internal("TechMan.atlas"));
Array<TextureAtlas.AtlasRegion> techmanRegions = new Array<TextureAtlas.AtlasRegion>(techmanAtlas.getRegions());
techmanRegions.sort(new RegionComparator());
techmanRun = new Animation(FRAME_DURATION, techmanRegions, PlayMode.LOOP);
camera.position.set(SCENE_WIDTH * 0.5f, SCENE_HEIGHT * 0.5f, 0.0f);
}
#Override
public void dispose(){
batch.dispose();
techmanAtlas.dispose();
}
#Override
public void render(){
Gdx.gl.glClearColor(1, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
animationTime += Gdx.graphics.getDeltaTime();
camera.update();
batch.setProjectionMatrix(camera.combined);
batch.begin();
TextureRegion techmanFrame = techmanRun.getKeyFrame(animationTime);
int width = techmanFrame.getRegionWidth();
int height = techmanFrame.getRegionWidth();
float originX = width * 0.5f;
float originY = height * 0.5f;
batch.draw(techmanFrame,
1.0f - originX, 3.70f - originY,
originX, originY,
width, height, //width, height
WORLD_TO_SCREEN, WORLD_TO_SCREEN,
0.0f);
batch.draw(techmanRun.getKeyFrame(animationTime), 100.0f, 275.0f);
batch.end();
}
#Override
public void resize(int width, int height){
viewport.update(width, height, false);
}
private static class RegionComparator implements Comparator<AtlasRegion> {
#Override
public int compare(AtlasRegion region1, AtlasRegion region2){
return region1.name.compareTo(region2.name);
}
}
}
That's not the proper way to use viewport:
viewport = new FitViewport(Gdx.graphics.getWidth(),Gdx.graphics.getHeight(), camera);
You should initialize viewport with constant values, 800x600, for example, and use them when writing code:
viewport = new FitViewport(WORLD_WIDTH,WORLD_HEIGHT, camera);
The image will be streched/resized automaticly when rendering, you dont need to do that:
batch.draw(techman.Frame,x,y,width,height);
PS: Do not call batch.setProjectionMatrix(camera.combined); inside the render() method. You only need to do that once.
Documentation:
https://github.com/libgdx/libgdx/wiki/Viewports
https://github.com/libgdx/libgdx/wiki/2D-Animation

LibGDX - modify pixels after rendering a 3D scene

I'm writing a 3D game that is drawn with ASCII art just to achieve a special look. I render my models to a ModelBatch and now I would like to convert every pixel to an ASCII symbol before drawing the final result to the screen. I already have the code to convert pixels to ASCII, but I have no idea how to get the pixels of the render result.
Code so far (not including the ASCII convertation):
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL10;
import com.badlogic.gdx.graphics.PerspectiveCamera;
import com.badlogic.gdx.graphics.VertexAttributes.Usage;
import com.badlogic.gdx.graphics.g3d.Model;
import com.badlogic.gdx.graphics.g3d.ModelBatch;
import com.badlogic.gdx.graphics.g3d.ModelInstance;
import com.badlogic.gdx.graphics.g3d.materials.ColorAttribute;
import com.badlogic.gdx.graphics.g3d.materials.Material;
import com.badlogic.gdx.graphics.g3d.utils.ModelBuilder;
public class MyGame implements ApplicationListener {
private PerspectiveCamera cam;
private ModelBatch batch;
public Model model;
public ModelInstance instance;
#Override
public void create() {
//float w = Gdx.graphics.getWidth();
//float h = Gdx.graphics.getHeight();
cam = new PerspectiveCamera(67, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
cam.position.set(10f, 10f, 10f);
cam.lookAt(0,0,0);
cam.near = 0.1f;
cam.far = 300f;
cam.update();
batch = new ModelBatch();
ModelBuilder modelBuilder = new ModelBuilder();
model = modelBuilder.createBox(5f, 5f, 5f,
new Material(ColorAttribute.createDiffuse(Color.GREEN)),
Usage.Position | Usage.Normal);
instance = new ModelInstance(model);
}
#Override
public void dispose() {
batch.dispose();
model.dispose();
}
#Override
public void render() {
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
batch.begin(cam);
batch.render(instance);
batch.end();
}
#Override
public void resize(int width, int height) {
}
#Override
public void pause() {
}
#Override
public void resume() {
}
}
I think you want to render to a texture, convert the texture to a Pixmap, and then parse the pixmap to draw your ASCII characters to the real screen.
First, to render to a texture (offscreen buffer) in LibGDX, use a FrameBufferObject:
fbo = new FrameBuffer();
fbo.begin();
// draw your stuff
// ...
fbo.end();
Second, to get bytes that the CPU can use, you can use the [ScreenUtils class](http://libgdx.badlogicgames.com/nightlies/docs/api/com/badlogic/gdx/utils/ScreenUtils.html
). You can either get a Pixmap or a raw byte[]. For example, replace the // ... above with:
Pixmap p = ScreenUtils.getFrameBufferPixmap(0, 0, width, height);
Now you can wander over the pixmap checking out pixels (using [Pixmap.getPixel](http://libgdx.badlogicgames.com/nightlies/docs/api/com/badlogic/gdx/graphics/Pixmap.html#getPixel(int, int))).

How to draw a BitmapFont in LibGDX?

I'm seriously betting that I did something effing stupid and just can't seem to notice it.
package com.me.mygdxgame;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.GL10;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.Texture.TextureFilter;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
public class Locked implements ApplicationListener
{
private OrthographicCamera camera;
private SpriteBatch batch;
private Texture texture;
private Sprite sprite;
private BitmapFont font;
private CharSequence str = "Hello World!";
private float width;
private float height;
#Override
public void create()
{
width = Gdx.graphics.getWidth();
height = Gdx.graphics.getHeight();
camera = new OrthographicCamera(1, height / width);
batch = new SpriteBatch();
texture = new Texture(Gdx.files.internal("data/libgdx.png"));
texture.setFilter(TextureFilter.Linear, TextureFilter.Linear);
TextureRegion region = new TextureRegion(texture, 0, 0, 512, 275);
sprite = new Sprite(region);
sprite.setSize(0.9f, 0.9f * sprite.getHeight() / sprite.getWidth());
sprite.setOrigin(sprite.getWidth() / 2, sprite.getHeight() / 2);
sprite.setPosition(-sprite.getWidth() / 2, -sprite.getHeight() / 2);
font = new BitmapFont(Gdx.files.internal("data/digib.fnt"),
Gdx.files.internal("data/digib.png"), false);
}
#Override
public void dispose()
{
batch.dispose();
texture.dispose();
}
#Override
public void render()
{
Gdx.gl.glClearColor(1, 1, 1, 1);
Gdx.gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
batch.setProjectionMatrix(camera.combined);
batch.begin();
font.setColor(0.0f, 0.0f, 0.0f, 1.0f);
//sprite.draw(batch);
font.draw(batch, str, width*0.5f, height*0.5f);
batch.end();
}
#Override
public void resize(int width, int height)
{
}
#Override
public void pause()
{
}
#Override
public void resume()
{
}
}
The project was generated with the template tool they provide gdx-setup-ui.jar
As you can see in the code, I didn't bother to get rid of the default codes (Just some simple draw codes to render the LibGDX logo).
So, with the cleanly generated project, I followed this guide here
http://code.google.com/p/libgdx-users/wiki/addingText2D
and finally arriving with the provided code above.
The problem is, why won't the !##$ing text show!? I changed the position so many times and still no luck :\
Did I miss something?
FYI: The fonts are fine, I dropped them into another game and it works.
Try to change projection matrix like this:
Matrix4 normalProjection = new Matrix4().setToOrtho2D(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
batch.setProjectionMatrix(normalProjection);
All I do is
spriteBatch = new SpriteBatch();
font = new BitmapFont(Gdx.files.internal("data/nameOfFont.fnt"),
Gdx.files.internal("data/nameOfFont.png"), false);
and in render method
spriteBatch.begin();
font.setColor(1.0f, 1.0f, 1.0f, 1.0f);
font.draw(spriteBatch, "some string", 25, 160);
spriteBatch.end();
You can read something more about it on my blog: http://algorhymes.wordpress.com/2012/11/17/javalibgdx-fonts/
Personally I'm not a big fan of converting all the fonts to .fnt format. If you need different sizes for a certain font you have to spend a lot of time (and app space) to make all the conversions.
You can just use the FreeType Extension and load straight from a .ttf
FreeTypeFontGenerator generator = new FreeTypeFontGenerator(fontFile);
BitmapFont font15 = generator.generateFont(15);
BitmapFont font22 = generator.generateFont(22);
generator.dispose();
More info here
Rendering is done in the same way as explained by watis.
create a .fnt file using hiero which is provided by libgdx website
set the size of font 150 ,it will create a .fnt file and a png file
copy both of file in your assests folder
now declare the font
BitmapFont font;
nw in create method
font = new BitmapFont(Gdx.files.internal("data/100.fnt"), false);//100 is the font name you can give your font any name
in render
font.setscale(.2f);
font.draw(batch, "whatever you want to write", x,y);
this will work smoothly
The main problem with your code is that you have created camera with
viewportWidth = 1 &
viewportHeight = width/height
and you are drawing font at width*0.5f & height*0.5f which is out of scope from camera
Either change the camera initialization to
camera = new OrthographicCamera(width, height);
....
or change the draw font statement to
font.setScale(1,height/width);
font.draw(batch, str, 0.5f, height/width*0.5f);
Did you try giving position manually like this. I hope this will work
batch.setProjectionMatrix(camera.combined);
batch.enableBlending();
batch.begin();
font.draw(batch, yourString, 100,100);
batch.end();

Transparent PNG isn't transparent in LWJGL

I have a PNG with a transparent background, and I'm trying to display it using LWJGL. But instead of a transparent background, it appears with a black opaque background. I'm following the code from the "space invaders" example.
Here's my code. I apologise for its length, but I couldn't cut it down any further and still show the graphic. "ball.png" is a 256x256 image with a transparent background.
package com.ziroby.kata.bouncingBalls;
import static org.lwjgl.opengl.GL11.*;
import org.lwjgl.opengl.Display;
import java.awt.*;
import java.awt.color.ColorSpace;
import java.awt.image.*;
import java.io.IOException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Hashtable;
import javax.swing.ImageIcon;
public class Game {
public void start() throws Exception {
Display.setInitialBackground(0.5f, 0.5f, 0.5f);
Display.create();
// enable textures since we're going to use these for our sprites
glEnable(GL_TEXTURE_2D);
glMatrixMode(GL_PROJECTION);
glOrtho(0, 800, 600, 0, -1, 1);
glMatrixMode(GL_MODELVIEW);
getTexture("ball.png");
// clear screen
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// translate to the right location and prepare to draw
glTranslatef(300, 200, 0);
// draw a quad textured to match the sprite
glBegin(GL_QUADS);
{
glTexCoord2f(0, 0);
glVertex2f(0, 0);
glTexCoord2f(0, 1);
glVertex2f(0, 100);
glTexCoord2f(1, 1);
glVertex2f(100, 100);
glTexCoord2f(1, 0);
glVertex2f(100, 0);
}
glEnd();
// update window contents
Display.update();
Thread.sleep(1000);
Display.destroy();
}
public void getTexture(String resourceName) throws IOException {
glBindTexture(GL_TEXTURE_2D, 1);
BufferedImage bufferedImage = loadImage(resourceName);
ByteBuffer textureBuffer = convertImageData(bufferedImage);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// produce a texture from the byte buffer
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferedImage.getWidth(),
bufferedImage.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE,
textureBuffer);
}
/**
* Convert the buffered image to a texture
*/
private ByteBuffer convertImageData(BufferedImage bufferedImage) {
ByteBuffer imageBuffer;
WritableRaster raster;
BufferedImage texImage;
ColorModel glAlphaColorModel = new ComponentColorModel(ColorSpace
.getInstance(ColorSpace.CS_sRGB), new int[] { 8, 8, 8, 8 },
true, false, Transparency.TRANSLUCENT, DataBuffer.TYPE_BYTE);
raster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE,
bufferedImage.getWidth(), bufferedImage.getHeight(), 4, null);
texImage = new BufferedImage(glAlphaColorModel, raster, true,
new Hashtable());
// copy the source image into the produced image
Graphics g = texImage.getGraphics();
g.setColor(new Color(0f, 0f, 0f, 0f));
g.fillRect(0, 0, 256, 256);
g.drawImage(bufferedImage, 0, 0, null);
// build a byte buffer from the temporary image
// that be used by OpenGL to produce a texture.
byte[] data = ((DataBufferByte) texImage.getRaster().getDataBuffer())
.getData();
imageBuffer = ByteBuffer.allocateDirect(data.length);
imageBuffer.order(ByteOrder.nativeOrder());
imageBuffer.put(data, 0, data.length);
imageBuffer.flip();
return imageBuffer;
}
/**
* Load a given resource as a buffered image
*/
private BufferedImage loadImage(String ref) throws IOException {
URL url = getClass().getClassLoader().getResource(ref);
// due to an issue with ImageIO and mixed signed code
// we are now using good oldfashioned ImageIcon to load
// images and the paint it on top of a new BufferedImage
Image img = new ImageIcon(url).getImage();
BufferedImage bufferedImage = new BufferedImage(img.getWidth(null), img
.getHeight(null), BufferedImage.TYPE_INT_ARGB);
Graphics g = bufferedImage.getGraphics();
g.drawImage(img, 0, 0, null);
g.dispose();
return bufferedImage;
}
}
You need to enable blending to allow transparent bits on an image when using OpenGL.
You'll need to add something like the following to your code:
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

Categories

Resources