I am trying to draw a texture on a triangle mesh. I get the mesh but not the picture on it. See on the picture below. As you can see that's a plain triangle but no texture in it.
String vertexShader = "attribute vec4 a_position; \n" +
"attribute vec4 a_color;\n" +
"attribute vec2 a_texCoord0;\n" +
"uniform mat4 u_worldView;\n" +
"varying vec4 v_color;" +
"varying vec2 v_texCoords;" +
"void main() \n" +
"{ \n" +
" v_color = vec4(1, 1, 1, 1); \n" +
" v_texCoords = a_texCoord0; \n" +
" gl_Position = u_worldView * a_position; \n" +
"} \n" ;
String fragmentShader = "#ifdef GL_ES\n" +
"precision mediump float;\n" +
"#endif\n" +
"varying vec4 v_color;\n" +
"varying vec2 v_texCoords;\n" +
"uniform sampler2D u_texture;\n" +
"void main() \n" +
"{ \n" +
" gl_FragColor = v_color * texture2D(u_texture, v_texCoords);\n" +
"}";
shader = new ShaderProgram(vertexShader, fragmentShader);
if (shader.isCompiled() == false) {
Gdx.app.log("ShaderError", shader.getLog());
System.exit(0);
}
mesh = new Mesh(true, 3, 3,
new VertexAttribute(Usage.Position, 3, ShaderProgram.POSITION_ATTRIBUTE),
new VertexAttribute(Usage.Color, 4, ShaderProgram.COLOR_ATTRIBUTE),
new VertexAttribute(Usage.TextureCoordinates, 2, ShaderProgram.TEXCOORD_ATTRIBUTE));
mesh.setVertices(new float[] { -0.5f, -0.5f, 0, 0.2f, 0.3f, 0.4f, 1f, 0, 1,
0.5f, -0.5f, 0, 0.1f, 0.2f, 0.1f, 1f, 1, 1,
0, 0.5f, 0, 0, 0.4f, 0.5f, 0.5f, 1f, 0 });
mesh.setIndices(new short[] { 0, 1, 2 });
texture = new Texture(Gdx.files.internal("data/caveman.png"));
And in my render method:
#Override
public void render () {
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
Gdx.gl.glEnable(GL20.GL_TEXTURE_2D);
texture.bind();
shader.begin();
shader.setUniformMatrix("u_worldView", viewport.getCamera().combined);
shader.setUniformi("u_texture", 0);
mesh.render(shader, GL20.GL_TRIANGLES);
shader.end();
}
Ok I fixed it by changing my shader to
String vertexShader =
"attribute vec4 a_position;\n" +
"attribute vec4 a_color;\n" +
"attribute vec2 a_texCoord;\n" +
"uniform mat4 u_worldView;\n" +
"varying vec4 v_color;\n" +
"varying vec2 v_texCoords;\n" +
"void main()\n" +
"{\n" +
" v_color = vec4(1, 1, 1, 1);\n" +
" v_texCoords = a_texCoord;\n" +
" gl_Position = u_worldView * a_position;\n" +
"}\n" ;
String fragmentShader =
"#ifdef GL_ES\n" +
"precision mediump float;\n" +
"#endif\n" +
"varying vec4 v_color;\n" +
"varying vec2 v_texCoords;\n" +
"uniform sampler2D u_texture;\n" +
"void main()\n" +
"{\n" +
"vec4 texColor = texture2D(u_texture, v_texCoords);\n" +
" gl_FragColor = texColor;\n" +
"}";
Now it draws textures inside the mesh.
Related
I have a shader program which will make a border line depends on alpha value arround each pixels. I hope to add a yellow border line along an image just like this:
However, it does not give me the expected answer. The thing that I cant understand the most is why there will always be a border line at one boundary side of image.
My fragment shader codes:
varying vec4 v_color;
varying vec2 v_texCoords;
uniform sampler2D u_texture;
uniform vec2 u_imageSize;
uniform vec4 u_borderColor;
uniform float u_borderSize;
void main() {
vec4 color = texture2D(u_texture, v_texCoords);
vec2 pixelToTextureCoords = 1. / u_imageSize;
bool isInteriorPoint = true;
bool isExteriorPoint = true;
for (float dx = -u_borderSize; dx < u_borderSize; dx++)
{
for (float dy = -u_borderSize; dy < u_borderSize; dy++){
vec2 point = v_texCoords + vec2(dx,dy) * pixelToTextureCoords;
float alpha = texture2D(u_texture, point).a;
if ( alpha < 0.5 )
isInteriorPoint = false;
if ( alpha > 0.5 )
isExteriorPoint = false;
}
}
if (!isInteriorPoint && !isExteriorPoint && color.a < 0.5)
gl_FragColor = u_borderColor;
else
gl_FragColor = v_color * color;
}
My vertex shader codes:
attribute vec4 a_position;
attribute vec4 a_color;
attribute vec2 a_texCoord0;
varying vec4 v_color;
varying vec2 v_texCoords;
uniform mat4 u_projTrans;
uniform vec2 u_viewportInverse;
void main() {
v_color = a_color;
v_texCoords = a_texCoord0;
gl_Position = u_projTrans * a_position;
}
My definition codes:
shaderProgram.setUniformf( "u_imageSize", new Vector2(getWidth(), getHeight()) );
shaderProgram.setUniformf( "u_borderColor", Color.YELLOW );
shaderProgram.setUniformf( "u_borderSize", 1 );
Outcome image(shape above is without shader and shape below is with shader):
Please provide me any kind of guides.
The issue is caused, because the texture coordinates in the loop become < 0.0 respectively > 1.0. So the texture is looked up "out of bounds". What happens in this case depends on the wrap parameters (see glTexParameter. Add a range check to the loop and skip the lookup when coordinates are not in range [0.0, 1.0], to solve the issue:
for (float dx = -u_borderSize; dx < u_borderSize; dx++)
{
for (float dy = -u_borderSize; dy < u_borderSize; dy++){
vec2 point = v_texCoords + vec2(dx,dy) * pixelToTextureCoords;
// range check
if (point.x < 0.0 || point.x > 1.0 || point.y < 0.0 || point.y > 1.0)
continue;
float alpha = texture2D(u_texture, point).a;
if ( alpha < 0.5 )
isInteriorPoint = false;
if ( alpha > 0.5 )
isExteriorPoint = false;
}
}
I have a new class MyActor extended Actor, with applying shader into it.
However, the shader will fill up the transparent background unexpectedly.
draw() method codes inside MyActor as follow:
#Override
public void draw(Batch batch, float parentAlpha) {
if(shaderProgram!=null)
{
batch.setShader(shaderProgram);
}
if(!drawParentAtBack)super.draw(batch, parentAlpha); // by default is false
Color c = getColor(); // used to apply tint color effect
batch.setColor(c.r, c.g, c.b, c.a * parentAlpha);
if ( isVisible() )
{
if(displayFrame !=null) // a textureregion
{
batch.draw(displayFrame,
getX(),getY(),
getOriginX(),getOriginY(),
getWidth(),getHeight(),
getScaleX(),getScaleY(),
getRotation());
}
}
if(drawParentAtBack)super.draw(batch, parentAlpha);
if(shaderProgram!=null)
{
batch.setShader(null);
}
}
public void setShader(String vs, String fs){
vertexShaderCode = Gdx.files.internal("shaders/" + vs + ".vs").readString();
fragmentShaderCode = Gdx.files.internal("shaders/" + fs + ".fs").readString();
shaderProgram = new ShaderProgram(vertexShaderCode, fragmentShaderCode);
if (!shaderProgram.isCompiled())
{
d( "Shader compile error: " + shaderProgram.getLog() );
}
}
My definition goes like this:
MyActor myActor1;myActor2;
..... // setting up myActor1 & myActor2
myActor1.setShader("default","greyScale");
myActor2.setShader("default","greyScale");
my simple shaderCodes from this tutorial:
#ifdef GL_ES
precision mediump float;
#endif
varying vec4 v_color;
varying vec2 v_texCoords;
uniform sampler2D u_texture;
uniform mat4 u_projTrans;
void main() {
vec3 color = texture2D(u_texture, v_texCoords).rgb;
float gray = (color.r + color.g + color.b) / 3.0;
vec3 grayscale = vec3(gray);
gl_FragColor = vec4(grayscale, 1.0);
}
My vertex shader codes from this tutorial:
#ifdef GL_ES
precision mediump float;
#endif
varying vec4 v_color;
varying vec2 v_texCoords;
uniform sampler2D u_texture;
uniform mat4 u_projTrans;
void main() {
vec3 color = texture2D(u_texture, v_texCoords).rgb;
float gray = (color.r + color.g + color.b) / 3.0;
vec3 grayscale = vec3(gray);
gl_FragColor = vec4(grayscale, 1.0);
}
My expected result images are gray color shapes with transparent background, BUT it turns out like this:
sample shape image without shader:
Any helps please.
In general transparency is achieved Alpha Blending. The alpha channel of the fragment controls the transparency and has to be set.
In the fragment shader the alpha channel of the texture is omitted:
gl_FragColor = vec4(grayscale, 1.0);
Set the alpha channel of the texture (u_texture) to the alpha channel of the output (gl_FragColor.a):
#ifdef GL_ES
precision mediump float;
#endif
varying vec4 v_color;
varying vec2 v_texCoords;
uniform sampler2D u_texture;
uniform mat4 u_projTrans;
void main() {
// read RGB color channels and alpha channel
vec4 color = texture2D(u_texture, v_texCoords);
float gray = (color.r + color.g + color.b) / 3.0;
vec3 grayscale = vec3(gray);
// write gray scale and alpha channel
gl_FragColor = vec4(grayscale.rgb, color.a);
}
I use FBO to render an image pass blur shader and the fps go down to 20.why? It was ok when I blured it through x-axis direction directly without FBO. It was also ok when I just use FBO to restore an image and draw it to sceen. Is there any thing I shoud pay attention to? Any help would be great! Thanks.
Here is my code:
private static final int FB_SIZE = 200;
public SpriteBatch batch;
protected ShaderProgram mShaderA;
protected ShaderProgram mShaderB;
protected Mesh mMeshA;
protected Mesh mMeshB;
RenderSurface blurTargetB;
RenderSurface blurTargetC;
Texture texture;
public void init() {
texture = new Texture(
Gdx.files.internal("mainMenuBack.png"));
texture.setFilter(TextureFilter.Linear, TextureFilter.Linear);
texture.setWrap(TextureWrap.ClampToEdge, TextureWrap.ClampToEdge);
blurTargetB = new RenderSurface(Format.RGBA4444, 1080, 1920, true);
blurTargetC = new RenderSurface(Format.RGBA4444, 1080, 1920, true);
if (mMeshA != null)
mMeshA.dispose();
mMeshA = new Mesh(true, 4, 4, new VertexAttribute(Usage.Position, 2,
ShaderProgram.POSITION_ATTRIBUTE), new VertexAttribute(
Usage.TextureCoordinates, 2, ShaderProgram.TEXCOORD_ATTRIBUTE
+ "0"));
mMeshA.setVertices(new float[] { -1f, -1f, 0, 1, 1f, -1f, 1, 1, 1f, 1f,
1, 0, -1f, 1f, 0, 0 });
if (mMeshB != null)
mMeshB.dispose();
mMeshB = new Mesh(true, 4, 4, new VertexAttribute(Usage.Position, 2,
ShaderProgram.POSITION_ATTRIBUTE), new VertexAttribute(
Usage.TextureCoordinates, 2, ShaderProgram.TEXCOORD_ATTRIBUTE
+ "1"));
mMeshB.setVertices(new float[] { -1f, -1f, 0, 1, 1f, -1f, 1, 1, 1f, 1f,
1, 0, -1f, 1f, 0, 0 });
}
public BlurRenderer() {
mShaderA = createXBlurShader();
mShaderB = createYBlurShader();
init();
batch=new SpriteBatch();
}
public void render() {
drawAToB();
drawBToC();
drawCToSceen();
}
public void drawAToB() {
blurTargetB.begin(GL20.GL_COLOR_BUFFER_BIT);
batch.setShader(mShaderA);
batch.begin();
batch.draw(texture, 0,0,1080,1920);
batch.flush();
blurTargetB.end();
}
public void drawBToC() {
blurTargetC.begin(GL20.GL_COLOR_BUFFER_BIT);
batch.draw( blurTargetB.getTexture(), 0, 0);
batch.flush();
blurTargetC.end();
}
public void dispose() {
mMeshA.dispose();
mMeshB.dispose();
}
public void drawCToSceen() {
batch.draw( blurTargetC.getTexture(), 0, 0);
batch.end();
}
public ShaderProgram createXBlurShader() {
String vertexShader = "attribute vec4 " + ShaderProgram.POSITION_ATTRIBUTE + ";\n" //
+ "attribute vec4 " + ShaderProgram.COLOR_ATTRIBUTE + ";\n" //
+ "attribute vec2 " + ShaderProgram.TEXCOORD_ATTRIBUTE + "0;\n" //
+ "uniform float uBlurBufferSize;\n" // 1 / Size of the blur
+ "uniform mat4 u_projTrans;\n" //
+ "varying vec4 v_color;\n" //
+ "varying vec2 v_texCoords;\n" //
+ "varying vec2 vBlurTexCoords[5];\n" // output texture
+ "\n" //
+ "void main()\n" //
+ "{\n" //
+ " v_color = " + ShaderProgram.COLOR_ATTRIBUTE + ";\n" //
+ " v_texCoords = " + ShaderProgram.TEXCOORD_ATTRIBUTE + "0;\n" //
+ " vBlurTexCoords[0] = v_texCoords + vec2(-2.0 * uBlurBufferSize, 0.0);\n"
+ " vBlurTexCoords[1] = v_texCoords + vec2(-1.0 * uBlurBufferSize, 0.0);\n"
+ " vBlurTexCoords[2] = v_texCoords;\n"
+ " vBlurTexCoords[3] = v_texCoords + vec2( 1.0 * uBlurBufferSize, 0.0);\n"
+ " vBlurTexCoords[4] = v_texCoords + vec2( 2.0 * uBlurBufferSize, 0.0);\n"
+ " gl_Position = u_projTrans * " + ShaderProgram.POSITION_ATTRIBUTE + ";\n" //
+ "}\n";
String fragmentShader = "#ifdef GL_ES\n" //
+ "#define LOWP lowp\n" //
+ "precision mediump float;\n" //
+ "#else\n" //
+ "#define LOWP \n" //
+ "#endif\n" //
+ "varying LOWP vec4 v_color;\n" //
+ "varying vec2 v_texCoords;\n" //
+ "varying vec2 vBlurTexCoords[5];\n" // input texture coords
+ "uniform sampler2D u_texture;\n" //
+ "void main()\n"//
+ "{\n" //
+ " vec4 sum = vec4(0.0);\n"
+ " sum += texture2D(u_texture, vBlurTexCoords[0]) * 0.164074;\n"
+ " sum += texture2D(u_texture, vBlurTexCoords[1]) * 0.216901;\n"
+ " sum += texture2D(u_texture, vBlurTexCoords[2]) * 0.23805;\n"
+ " sum += texture2D(u_texture, vBlurTexCoords[3]) * 0.216901;\n"
+ " sum += texture2D(u_texture, vBlurTexCoords[4]) * 0.164074;\n"
+ " gl_FragColor = sum;\n"
+ "}";
ShaderProgram shader = new ShaderProgram(vertexShader, fragmentShader);
if (shader.isCompiled() == false) {
Gdx.app.log("ERROR", shader.getLog());
}
return shader;
}
It's expensive to blur a 1080x1920 image, especially on Android! Usually, when doing a Gaussian blur, you can downsample the screen to half or a quarter of the width and height without much quality loss. That also lets you get away with a smaller sampling radius to achieve the same appearance.
I have two sprites and I want to fade from one into the other like:
Sprite sprite1, sprite2;
float alpha = 1;
...
public void render(float delta) {
alpha -= 0.01f;
if (alpha<0) alpha = 0;
sprite1.setAlpha(alpha);
sprite2.setAlpha(1-alpha);
sprite1.draw(batch);
sprite2.draw(batch);
}
But when I do this the blending is not right, the combined image becomes almost completely transparent around where alpha is 0.5. I've made a video of the problem here: http://vimeo.com/100472883 - sprite1 is the sharp cloud image, sprite2 is the blurred cloud image and I'm fading from one to the other and back.
If I use batch.setBlendFunction(GL20.GL_SRC_ALPHA, GL20.GL_ONE_MINUS_CONSTANT_ALPHA); before drawing sprites, the alpha of the images looks good, but it messes up the colors of both sprites.
I don't think they're going "almost completely" transparent but rather just partially transparent. If you have two 50% transparent objects in front of each other, you can still see through the pair of them. If you were doing a full-screen crossfade, the way you would correctly do it is draw one of the two sprites at 100% alpha behind the other sprite, and only modify the alpha of the sprite in front. However, it is not so easy when both sprites have an alpha channel.
One way would be with multi-texturing, which is not really how SpriteBatch is designed. If you did multi-texturing with SpriteBatch, you would need to make sure both sprites were aligned to the same TextureRegion of their respective Textures, or you could put them in the same texture and use a known offset in your shader to separate them. Then you could use a custom shader to blend the two sprites in the shader before drawing them.
If your sprites are going to remain monochrome, this becomes much easier. You can just combine the two sprites into one sprite by putting one sprite''s color and alpha in the R and G channels of the image and the other sprite's color and alpha in the B and A channels. Then use a custom shader with a fragment shader like this:
String fragmentShader = "#ifdef GL_ES\n" //
+ "#define LOWP lowp\n" //
+ "precision mediump float;\n" //
+ "#else\n" //
+ "#define LOWP \n" //
+ "#endif\n" //
+ "varying LOWP vec4 v_color;\n" //
+ "varying vec2 v_texCoords;\n" //
+ "uniform sampler2D u_texture;\n" //
+ "uniform float u_crossfade;\n" //
+ "void main()\n"//
+ "{\n" //
+ " vec4 texture = texture2D(u_texture, v_texCoords);\n" //
+ " float sprite1Alpha = (1 - u_crossfade) * texture.g * v_color.a;\n" //
+ " float sprite2Alpha = u_crossfade * texture.a * v_color.a;\n" //
+ " gl_FragColor = v_color * vec4(vec3(sprite1Alpha*texture.r + sprite2Alpha*texture.b), sprite1Alpha + sprite2Alpha);\n" //
+ "}";
This should be drawn with blendfunc GL_ONE, GL_ONE_MINUS_SRC_ALPHA since it pre-multiplies the alpha. And you need to call customShader.setUniformf("u_crossfade", crossfadeBlendValue); in between spriteBatch.begin() and end(). Which means you can only batch one sprite at a time if you need different crossfades per sprite.
You could get around this if you aren't planning to color tint your sprites, by using the R component of the sprite's tint to represent the blend value. Then the shader would become
String fragmentShader = "#ifdef GL_ES\n" //
+ "#define LOWP lowp\n" //
+ "precision mediump float;\n" //
+ "#else\n" //
+ "#define LOWP \n" //
+ "#endif\n" //
+ "varying LOWP vec4 v_color;\n" //
+ "varying vec2 v_texCoords;\n" //
+ "uniform sampler2D u_texture;\n" //
+ "void main()\n"//
+ "{\n" //
+ " vec4 texture = texture2D(u_texture, v_texCoords);\n" //
+ " float sprite1Alpha = (1 - v_color.r) * texture.g * v_color.a;\n" //
+ " float sprite2Alpha = v_color.r * texture.a * v_color.a;\n" //
+ " gl_FragColor = vec4(vec3(sprite1Alpha*texture.r + sprite2Alpha*texture.b), sprite1Alpha + sprite2Alpha);\n" //
+ "}";
How can I draw Triangles that are independent from each other? I wat to make a ship game and when the ship collides with the triangle I want it to disapear. But I can't find a good way to do this. Creating Mesh isn't the best solution because they end hardcoded, Using a shaperenderer doesn't seems to be good either because I can't control them separately. So I'm stuck, anyone has any idea?
I ended up solving my problem this way:
I created a object Triangle that created a Mesh. In that object I had another problem, the bind wasn't working, but I figured out how to solve it, I'm not sure how I solved it, but it works. Here's the final code that worked for me:
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.Mesh;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.VertexAttribute;
import com.badlogic.gdx.graphics.VertexAttributes.Usage;
import com.badlogic.gdx.graphics.glutils.ShaderProgram;
public class Triangle {
ShaderProgram shader;
Mesh mesh;
Texture texture;
float[] attributes = new float[15];
static int screenWidth;
static int screenHeight;
public Triangle(float[] vertices, float[] color, float[] textureVertices, Texture texture){
this.texture = texture;
createShader();
int j = 0;
int k = 0;
int l = 0;
if ((screenWidth <= 0) || (screenHeight <= 0))
{
throw new NullPointerException("Invalid screen dimensions for triangles.");
}
for (int i = 0; i < vertices.length; i++) {
vertices[i] = (vertices[i]/screenWidth - 1); // (vertices[i] - width/2)/(width/2)
vertices[++i] = (vertices[i]/screenHeight - 1); // (vertices[i] - height/2)/(height/2)
}
for (int i = 0; i < attributes.length;) {
attributes[i++] = vertices[j++];
attributes[i++] = vertices[j++];
attributes[i++] = color[k++];
attributes[i++] = textureVertices[l++];
attributes[i++] = textureVertices[l++];
}
mesh = new Mesh(false, attributes.length, 0, new VertexAttribute(
Usage.Position, 2, "a_position"), new VertexAttribute(
Usage.ColorPacked, 4, "a_color"), new VertexAttribute(
Usage.TextureCoordinates, 2, "a_texCoords"));
mesh.setVertices(attributes);
}
public static void setDimensions(int paramWidth, int paramHeight)
{
screenWidth = paramWidth;
screenHeight = paramHeight;
}
public void createShader()
{
// this shader tells opengl where to put things
String vertexShader = "attribute vec4 a_position; \n"
+ "attribute vec4 a_color; \n"
+ "attribute vec2 a_texCoords; \n"
+ "varying vec4 v_color; \n"
+ "varying vec2 v_texCoords; \n"
+ "void main() \n"
+ "{ \n"
+ " v_color = a_color; \n"
+ " v_texCoords = a_texCoords; \n"
+ " gl_Position = a_position; \n"
+ "} \n";
// this one tells it what goes in between the points (i.e
// colour/texture)
String fragmentShader = "#ifdef GL_ES \n"
+ "precision mediump float; \n"
+ "#endif \n"
+ "varying vec4 v_color; \n"
+ "varying vec2 v_texCoords; \n"
+ "uniform sampler2D u_texture;\n"
+ "void main() \n"
+ "{ \n"
+ " gl_FragColor = v_color * texture2D(u_texture, v_texCoords); \n"
+ "} \n";
shader = new ShaderProgram(vertexShader, fragmentShader);
}
public void render() {
Gdx.gl20.glViewport(0, 0, Gdx.graphics.getWidth(),
Gdx.graphics.getHeight());
Gdx.gl20.glEnable(GL20.GL_TEXTURE_2D);
Gdx.gl20.glActiveTexture(GL20.GL_TEXTURE);
shader.begin();
texture.bind(0);
shader.setUniformi("u_texture", 0);
mesh.render(shader, GL20.GL_TRIANGLES);
shader.end();
}
public void dispose() {
texture.dispose();
mesh.dispose();
shader.dispose();
}
}
I hope it helps someone!