一、 常用的坐标系
1. Android View 坐标系
2. opengl坐标系中采用的是3维坐标:
static final float COORD[] = {
-1.0f, -1.0f, //1
1.0f, -1.0f, //2
-1.0f, 1.0f, //3
1.0f, 1.0f, //4
};
坐标原点默认在屏幕的中间,即(width/2,height/2)位置上,z轴是从屏幕"内"指向屏幕外,而且还要注意原点和x,y轴平面是在屏幕的"表面",有引号,这个"表面"刚好是人看不到的面,所以如果你要画一条线,线端点(1,0,0),(0,1,0),直接画到屏幕上,将看不到显示,怎么办?很简单,这个坐标系是三维空间的,那么就将坐标系沿Z轴的负轴方向移动一点,将X,Y平面稍微向屏幕内部移动一点,就能够看到直线了,想象一下在三维空间中,将坐标系往屏幕里面推一下,那么x,y轴形成的平面就在屏幕里面了,在它上面的直线就可以看见了.后面有例子.
3. opengl 纹理的坐标系:
一般坐标系都自由操作移动(或旋转).
怎样证明 open gl 的纹理坐标系是什么样子的
在open gl 的坐标系中画出 x、Y、Z 轴的直线,根据现象就可以确定
public class MainActivity extends Activity {
private GLSurfaceView mSurfaceView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mSurfaceView=new GLSurfaceView(this);
mSurfaceView.setRenderer(new PumpKinRenderer()/*new PumpkinDotRenderer()*//*new PumpKinLineRenderer(this)*//*new PumpKinTriangleRenderer()*//*new PumpKinPyramidRenderer()*/);
setContentView(mSurfaceView/*R.layout.activity_main*/);
}
@Override
protected void onResume() {
super.onResume();
mSurfaceView.onResume();
}
@Override
protected void onPause() {
super.onPause();
mSurfaceView.onPause();
}
}
public class PumpKinRenderer implements GLSurfaceView.Renderer {
private PumpKin pumpKin;
public PumpKinRenderer(){
pumpKin=new PumpKin();
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glClearColor(0.0f,0.0f,0.0f,1.0f);
gl.glClearDepthf(1.0f);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glDepthFunc(GL10.GL_LEQUAL);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT,GL10.GL_NICEST);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glDisable(GL10.GL_DITHER);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
if(height==0){
height=1;
}
float aspect=(float)width/height;
gl.glViewport(0,0,width,height);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluPerspective(gl, 45, aspect, 0.1f, 100.0f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
@Override
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT|GL10.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity();
//Z 轴下沉-2,不然看不到Z轴的线条
gl.glTranslatef(0.0f,0.0f,-2f);
// 旋转角度
//gl.glRotatef(45,1.0f,1.0f,1.0f);
pumpKin.draw(gl);
}
}
public class PumpKin {
private FloatBuffer vertexsBuffer;
private FloatBuffer colorsBuffer;
private ByteBuffer indicesBuffer;
private float vertexs[]={
0.0f,0.0f,0.0f,
1.0f,0.0f,0.0f,
0.0f,0.0f,0.0f,
0.0f,1.0f,0.0f,
0.0f,0.0f,0.0f,
0.0f,0.0f,1.0f
};
// r g b 颜色
//colors[0]和colors[1]表示从原点到坐标(1,0,0)的颜色,如果[0]和[1]颜色不同,将会产生颜色渐变
private float colors[]={
//红色
1.0f,0.0f,0.0f,1.0f,
1.0f,0.0f,0.0f,1.0f,
//绿色
0.0f,1.0f,0.0f,1.0f,
0.0f,1.0f,0.0f,1.0f,
//蓝色
0.0f,0.0f,1.0f,1.0f,
0.0f,0.0f,1.0f,1.0f
};
private byte indices[]={0,1,2};
public PumpKin(){
ByteBuffer vbb=ByteBuffer.allocateDirect(vertexs.length*4);
vbb.order(ByteOrder.nativeOrder());
vertexsBuffer=vbb.asFloatBuffer();
vertexsBuffer.put(vertexs);
vertexsBuffer.position(0);
ByteBuffer cbb=ByteBuffer.allocateDirect(colors.length*4);
cbb.order(ByteOrder.nativeOrder());
colorsBuffer=cbb.asFloatBuffer();
colorsBuffer.put(colors);
colorsBuffer.position(0);
}
public void draw(GL10 gl){
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glVertexPointer(3,GL10.GL_FLOAT,0,vertexsBuffer);
gl.glColorPointer(4,GL10.GL_FLOAT,0,colorsBuffer);
gl.glDrawArrays(GL10.GL_LINES,0,vertexs.length/3);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
}
红色 蓝色为坐标系
贴图规则
a .顶点数组 (2 open gl 的顶点数组)
static final float COORD1[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
b. 提供纹理坐标,它称为纹理坐标数组 (需要纹理如何展示)
b1 "整个纹理图片"的纹理坐标
float texCoords[] = new float[] {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
b2 . 右上角(整个纹理1/4)
float texCoords[] = new float[] {
0.5f, 0.5f,
1f, 0.5f,
0.5f, 1f,
1f, 1f
};
b3. 截取中间的一个三角形
float texCoords[] = new float[] {
0.0f, 0.0f,
1.0f, 0.0f,
0.5f, 1.0f,
};
用上面的顶点数组 和 纹理数组挨个替换,可以查看到效果
float[] coord = COORD_REVERSE;
float[] texture_coord = TEXTURE_COORD_REVERSE;
b4 顶点坐标和 纹理坐标是 1 1 对应的 ,
corrd1[0] --> TEXTURE_COORD1[0]
corrd1[1] --> TEXTURE_COORD1[1]
opengl会把纹理中颜色顶点绘到对应的世界坐标顶点上
所以可以任意改变顺序的,只要对应上就可以了,只要世界坐标和纹理坐标数组里的点能够对的上,顺序不是问题
如下4组显示效果一样
static final float COORD1[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static final float TEXTURE_COORD1[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
static final float COORD2[] = {
-1.0f, 1.0f,
-1.0f, -1.0f,
1.0f, 1.0f,
1.0f, -1.0f,
};
static final float TEXTURE_COORD2[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
static final float COORD3[] = {
1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, -1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORD3[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
static final float COORD4[] = {
1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, -1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORD4[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
-1,-1 对应 0,1
1,-1 对应 1,1
static final float COORD1[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static final float TEXTURE_COORD1[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
翻转效果
static final float COORD_REVERSE[] = {
1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, -1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORD_REVERSE[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
二 、 翻转的问题
1、OpenGL纹理的原点是左下角
camera、mediaplayer出来的oes纹理或者fbo的纹理 左下角左下角坐标系
public static final float RECTANGLE_2D_TEX_COORDS[] = {
0, 1, // 0 bottom left
1, 1, // 1 bottom right
0, 0, // 2 top left
1, 0 // 3 top right
};
2、在Android平台中,Bitmap绑定的2D纹理,是上下颠倒的, 可以按照在左上角处理
所以在 绑定2D纹理时, 需要修改纹理坐标系为左上角
public static final float RECTANGLE_2D_TEX_COORDS[] = {
0, 1, // 0 bottom left
1, 1, // 1 bottom right
0, 0, // 2 top left
1, 0 // 3 top right
};
三、demo
public class Filter {
protected static final String VERTEX_SHADER = "" +
"attribute vec4 position;\n" +
"attribute vec4 inputTextureCoordinate;\n" +
" \n" +
"varying vec2 textureCoordinate;\n" +
" \n" +
"void main()\n" +
"{\n" +
" gl_Position = position;\n" +
" textureCoordinate = inputTextureCoordinate.xy;\n" +
"}";
protected static final String FRAGMENT_SHADER = "" +
"varying highp vec2 textureCoordinate;\n" +
" \n" +
"uniform sampler2D inputImageTexture;\n" +
" \n" +
"void main()\n" +
"{\n" +
" gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n" +
"}";
static final float COORD1[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static final float TEXTURE_COORD1[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
static final float COORD2[] = {
-1.0f, 1.0f,
-1.0f, -1.0f,
1.0f, 1.0f,
1.0f, -1.0f,
};
static final float TEXTURE_COORD2[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
static final float COORD3[] = {
1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, -1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORD3[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
static final float COORD4[] = {
1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, -1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORD4[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
static final float COORD_REVERSE[] = {
1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, -1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORD_REVERSE[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
static final float COORD_FLIP[] = {
1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, -1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORD_FLIP[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f,
};
private String mVertexShader;
private String mFragmentShader;
private FloatBuffer mCubeBuffer;
private FloatBuffer mTextureCubeBuffer;
protected int mProgId;
protected int mAttribPosition;
protected int mAttribTexCoord;
protected int mUniformTexture;
public Filter() {
this(VERTEX_SHADER, FRAGMENT_SHADER);
}
public Filter(String vertexShader, String fragmentShader) {
mVertexShader = vertexShader;
mFragmentShader = fragmentShader;
}
public void init() {
loadVertex();
initShader();
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
}
public void loadVertex() {
float[] coord = COORD1;
float[] texture_coord = TEXTURE_COORD1;
mCubeBuffer = ByteBuffer.allocateDirect(coord.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mCubeBuffer.put(coord).position(0);
mTextureCubeBuffer = ByteBuffer.allocateDirect(texture_coord.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mTextureCubeBuffer.put(texture_coord).position(0);
}
public void initShader() {
mProgId = GLHelper.loadProgram(mVertexShader, mFragmentShader);
mAttribPosition = GLES20.glGetAttribLocation(mProgId, "position");
mUniformTexture = GLES20.glGetUniformLocation(mProgId, "inputImageTexture");
mAttribTexCoord = GLES20.glGetAttribLocation(mProgId,
"inputTextureCoordinate");
}
public void drawFrame(int glTextureId) {
if (!GLES20.glIsProgram(mProgId)) {
initShader();
}
GLES20.glUseProgram(mProgId);
mCubeBuffer.position(0);
GLES20.glVertexAttribPointer(mAttribPosition, 2, GLES20.GL_FLOAT, false, 0, mCubeBuffer);
GLES20.glEnableVertexAttribArray(mAttribPosition);
mTextureCubeBuffer.position(0);
GLES20.glVertexAttribPointer(mAttribTexCoord, 2, GLES20.GL_FLOAT, false, 0,
mTextureCubeBuffer);
GLES20.glEnableVertexAttribArray(mAttribTexCoord);
if (glTextureId != GLHelper.NO_TEXTURE) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, glTextureId);
GLES20.glUniform1i(mUniformTexture, 0);
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(mAttribPosition);
GLES20.glDisableVertexAttribArray(mAttribTexCoord);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glDisable(GLES20.GL_BLEND);
}
}