ICode9

精准搜索请尝试: 精确搜索
首页 > 编程语言> 文章详细

java – Android OpenGLES 2从触摸坐标中拾取光线,取消计算略微偏离计算

2019-05-29 12:48:40  阅读:214  来源: 互联网

标签:java android 3d matrix opengl-es-2-0


我试图通过交叉射线测试实现基于触摸坐标的对象拾取.
我无法找到有关将触摸坐标转换为世界中使用的坐标系以构建此光线的信息.

到目前为止,我的理解是应用于场景中每个顶点的矩阵是:

projectionMatrix * viewMatrix * modelMatrix

这是我的过程,用于反转该过程,试图在场景中找到光线的端点以及我的绘图循环,以防我只是错误地应用不同的矩阵:

    public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
        {
        float[] rayDirection = new float[4];

        float normalizedX = 2 * touchX/windowWidth - 1;
        float normalizedY = 1 - 2*touchY/windowHeight;

        float[] unviewMatrix = new float[16];
        float[] viewMatrix = new float[16];
        Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
        Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);

        float[] nearPoint = multiplyMat4ByVec4(projection, new float[]{normalizedX, normalizedY, 0, 1});
        float[] modelviewInverse = new float[16];
        Matrix.invertM(modelviewInverse, 0, modelView, 0);

        float[] cameraPos = new float[4];
        cameraPos[0] = modelviewInverse[12];
        cameraPos[1] = modelviewInverse[13];
        cameraPos[2] = modelviewInverse[14];
        cameraPos[3] = modelviewInverse[15];

        rayDirection[0] = nearPoint[0] - cameraPos[0];
        rayDirection[1] = nearPoint[1] - cameraPos[1];
        rayDirection[2] = nearPoint[2] - cameraPos[2];
        rayDirection[3] = nearPoint[3] - cameraPos[3];

        return rayDirection;
        }

    public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
        {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
        }

    @Override
    public void onDrawFrame(GL10 gl10) {
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        long time = SystemClock.uptimeMillis() % 10000L;
        float angleInDegrees = (360.0f / 10000.0f) * ((int) time);

        GLES20.glViewport(0, 0, (int)(width/2), (int)(height/2));
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, 1.5f, 0f, 0f, -5f, 0f, 1f, 0f);
        //Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
        drawTriangle(triangleVertices);

        //Matrix.translateM(mModelMatrix, 0, 1.5f, 0, -1f);
        //Matrix.frustumM(mProjectionMatrix, 0, left, right, -1.0f, 1.0f, 1.0f, 10.0f);
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, 1.5f, 0.8f, 0.5f, 0f, 0f, 0f, 0f, 1f, 0f);
        GLES20.glViewport((int)(width/2), (int)(height/2), (int)(width/2), (int)(height/2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();

        /*
        Matrix.setLookAtM(viewMatrix, 0, 0, 1.5f, 0.5f, 0, 0, 0, 0, 0, -1f);
        GLES20.glViewport((int)(width/2), (int)height, (int)(width/2), (int)(height/2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();
        */
        }

    private void drawTriangle(final FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        Matrix.multiplyMM(mMVPMatrix, 0, viewMatrix, 0, mModelMatrix, 0);

        mMVMatrix = mMVPMatrix;

        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);

        //Log.d("OpenGLES2Test", "The intersection ray is: " + floatArrayAsString(getCameraPos(mMVMatrix)) + " + " + floatArrayAsString(getMouseRayProjection((int)(width / 2), (int)(height / 2), 1.0f, (int)width, (int)height, mMVMatrix, mProjectionMatrix)));
    }

    private void drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
    }

    private void moveIntersectionLineEndPoint(float[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

虽然我很确定我的4×4矩阵乘4d矢量乘法方法是正确的,但这里的方法也是如此:

public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
    {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
    }

这个测试应用程序的目标是从几个不同的角度显示场景,以便我可以看到交叉线基于我的代码看起来如何.我想绘制从相机的原点开始并在交叉点结束的线,但它的行为很奇怪.端点似乎沿着x轴在正方向上被推得比它应该的更远,并且在某些位置它似乎有点……跳过,好像在那个位置有一个洞或什么的.虽然我还记得微积分中的一些线性代数,但我记不清楚到底我在做什么,我在网上搜索了很多资源而没有运气.我希望读到这篇文章的人会比我更有经验处理这个问题,并且会很友善地帮助我,或者给我任何提示,如果还有别的东西我可能做错了或效率低下.

变量参考:
矩阵都是长度为16的浮点数组

mProjectionMatrix = projection matrix

mModelMatrix = model matrix

mMVPMatrix = projection * modelview matrix

mMVMatrix = modelview matrix


    private final FloatBuffer triangleVertices;

    private FloatBuffer lineVertices;

    private final int bytesPerFloat = 4;

    private float[] viewMatrix = new float[16];

    private static Context context;

    private int mMVPMatrixHandle;

    private int mPositionHandle;

    private int mColorHandle;

    private float[] mProjectionMatrix = new float[16];

    private float[] mModelMatrix = new float[16];

    private float[] mMVPMatrix = new float[16];

    private float[] mMVMatrix = new float[16];

    private final int strideBytes = 7 * bytesPerFloat;
    private final int lineStrideBytes = 3 * bytesPerFloat;

    private final int positionOffset = 0;

    private final int positionDataSize = 3;

    private final int colorOffset = 3;

    private final int colorDataSize = 4;

    private float width, height;

    private float[] lineStartPoint = new float[]{0, 0, 1.5f};

    private float[] lineEndPoint = new float[]{0, 0, 0};

解决方法:

经过一番搜索,我找到了一个以不同方式详细说明这个过程的页面.现在我不再遇到光线在随机时间跳到意外位置的结束问题,终点指向它应该的确切位置!
这是我用来修复进程的页面:
http://www.antongerdelan.net/opengl/raycasting.html

这是我的整个交叉点测试应用程序的最终源代码.大多数相关代码都在getMouseRayProjection方法下的OpenGLRenderer类中.

MainActivity.java:

import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.app.Activity;
import android.content.Context;
import android.view.Menu;
import android.view.MotionEvent;

public class MainActivity extends Activity {

    private MyGLSurfaceView mGLSurfaceView;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        mGLSurfaceView = new MyGLSurfaceView(this);

        mGLSurfaceView.setEGLContextClientVersion(2);
        mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
        OpenGLRenderer renderer = new OpenGLRenderer(this);
        mGLSurfaceView.setRenderer(renderer);
        mGLSurfaceView.renderer = renderer;

        setContentView(mGLSurfaceView);
    }


    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        // Inflate the menu; this adds items to the action bar if it is present.
        getMenuInflater().inflate(R.menu.main, menu);
        return true;
    }

    @Override
    protected void onResume() {
        super.onResume();
        mGLSurfaceView.onResume();
    }

    @Override
    protected void onPause() {
        super.onPause();
        mGLSurfaceView.onPause();
    }

}

class MyGLSurfaceView extends GLSurfaceView {

    public OpenGLRenderer renderer;

    public float previousX, previousY;

    public MyGLSurfaceView(Context context)
    {
        super(context);
    }

    @Override
    public boolean onTouchEvent(MotionEvent e)
    {
        float x = e.getX();
        float y = e.getY();

        switch(e.getAction()) {
        case MotionEvent.ACTION_MOVE:
            float dx = x - previousX;
            float dy = y - previousY;

            renderer.onTouch(x, y);
        }

        previousX = x;
        previousY = y;
        return true;
    }
}

OpenGLRenderer.java:

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLU;
import android.opengl.Matrix;
import android.opengl.GLSurfaceView;
import android.os.SystemClock;
import android.util.Log;

public class OpenGLRenderer implements GLSurfaceView.Renderer {

    private final FloatBuffer triangleVertices;

    private FloatBuffer lineVertices;

    private final int bytesPerFloat = 4;

    private float[] viewMatrix = new float[16];

    private static Context context;

    private int mMVPMatrixHandle;

    private int mPositionHandle;

    private int mColorHandle;

    private float[] mProjectionMatrix = new float[16];

    private float[] mModelMatrix = new float[16];

    private float[] mMVPMatrix = new float[16];

    private float[] mMVMatrix = new float[16];

    private int[] viewport = new int[4];

    private final int strideBytes = 7 * bytesPerFloat;
    private final int lineStrideBytes = 3 * bytesPerFloat;

    private final int positionOffset = 0;

    private final int positionDataSize = 3;

    private final int colorOffset = 3;

    private final int colorDataSize = 4;

    private float width, height;

    private float[] lineStartPoint = new float[]{0, 0, 1f};

    private float[] lineEndPoint = new float[]{0, 0, 0};

    private float[] cameraPos = new float[]{0f, 0f, 2.5f};
    private float[] cameraLook = new float[]{0f, 0f, -5f};
    private float[] cameraUp = new float[]{0f, 1f, 0f};

    public OpenGLRenderer(Context context) {
        this.context = context;

        final float[] triangleVerticesData = {
                -0.5f, -0.25f, 0.0f,
                1.0f, 0.0f, 0.0f, 1.0f,

                0.5f, -0.25f, 0.0f,
                0.0f, 0.0f, 1.0f, 1.0f,

                0.0f, 0.559016994f, 0.0f,
                0.0f, 1.0f, 0.0f, 1.0f
        };

        triangleVertices = ByteBuffer.allocateDirect(triangleVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        triangleVertices.put(triangleVerticesData).position(0);

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

    @Override
    public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
        GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);

        Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);

        try {
            int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);

            if (vertexShaderHandle != 0)
            {
                GLES20.glShaderSource(vertexShaderHandle, readShader("vertexShader"));

                GLES20.glCompileShader(vertexShaderHandle);

                final int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                if (compileStatus[0] == 0)
                {
                    GLES20.glDeleteShader(vertexShaderHandle);
                    vertexShaderHandle = 0;
                }
            }

            if (vertexShaderHandle == 0)
            {
                throw new RuntimeException("Error creating vertex shader");
            }

            int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);

            if (fragmentShaderHandle != 0)
            {
                GLES20.glShaderSource(fragmentShaderHandle, readShader("fragmentShader"));

                GLES20.glCompileShader(fragmentShaderHandle);

                final int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                if (compileStatus[0] == 0)
                {
                    GLES20.glDeleteShader(fragmentShaderHandle);
                    fragmentShaderHandle = 0;
                }
            }
            if (fragmentShaderHandle == 0)
            {
                throw new RuntimeException("Error creating fragment shader.");
            }

            int programHandle = GLES20.glCreateProgram();

            if (programHandle != 0)
            {
                GLES20.glAttachShader(programHandle, vertexShaderHandle);
                GLES20.glAttachShader(programHandle, fragmentShaderHandle);

                GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
                GLES20.glBindAttribLocation(programHandle, 1, "a_Color");

                GLES20.glLinkProgram(programHandle);

                final int[] linkStatus = new int[1];
                GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);

                if (linkStatus[0] == 0)
                {
                    GLES20.glDeleteProgram(programHandle);
                    programHandle = 0;
                }
            }

            if (programHandle == 0)
            {
                throw new RuntimeException("Error creating program.");
            }

            mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
            mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
            mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");

            GLES20.glUseProgram(programHandle);
        } catch (IOException e)
        {
            Log.d("OpenGLES2Test", "The shader could not be read: " + e.getMessage());
        } catch (RuntimeException e)
        {
            Log.d("OpenGLES2Test", e.getMessage());
        }

        GLES20.glEnable(GLES20.GL_DEPTH_TEST);
        GLES20.glDepthFunc(GLES20.GL_LEQUAL);
        GLES20.glDepthMask(true);
    }

    @Override
    public void onSurfaceChanged(GL10 gl10, int width, int height) {
        GLES20.glViewport(0, 0, width/2, height/2);

        this.width = width;
        this.height = height;

        final float ratio = (float) width / height;
        final float left = -ratio;
        final float right = ratio;
        final float bottom = -1.0f;
        final float top = 1.0f;
        final float near = 1.0f;
        final float far = 10.0f;

        GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, viewport, 0);

        Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
    }

    @Override
    public void onDrawFrame(GL10 gl10) {
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        long time = SystemClock.uptimeMillis() % 10000L;

        GLES20.glViewport(0, 0, (int)(width), (int)(height));
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);

        Matrix.multiplyMM(mMVMatrix, 0, viewMatrix, 0, mModelMatrix, 0);

        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVMatrix, 0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);

        drawTriangle(triangleVertices);
        drawIntersectionLine();
    }

    private void drawTriangle(final FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
    }

    private void drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
    }

    private void moveIntersectionLineEndPoint(float[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

    public static String readShader(String filePath) throws IOException {
        BufferedReader reader = new BufferedReader(new InputStreamReader(context.getAssets().open(filePath)));
        StringBuilder sb = new StringBuilder();
        String line;
        while( ( line = reader.readLine() ) != null)
        {
            sb.append(line + "\n");
        }
        reader.close();
        return sb.toString();
    }

    public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] view, float[] projection)
    {
        float[] rayDirection = new float[4];

        float normalizedX = 2f * touchX/windowWidth - 1f;
        float normalizedY = 1f - 2f*touchY/windowHeight;
        float normalizedZ = 1.0f;

        float[] rayNDC = new float[]{normalizedX, normalizedY, normalizedZ};

        float[] rayClip = new float[]{rayNDC[0], rayNDC[1], -1f, 1f};

        float[] inverseProjection = new float[16];
        Matrix.invertM(inverseProjection, 0, projection, 0);
        float[] rayEye = multiplyMat4ByVec4(inverseProjection, rayClip);

        rayClip = new float[]{rayClip[0], rayClip[1], -1f, 0f};

        float[] inverseView = new float[16];
        Matrix.invertM(inverseView, 0, view, 0);
        float[] rayWorld4D = multiplyMat4ByVec4(inverseView, rayEye);
        float[] rayWorld = new float[]{rayWorld4D[0], rayWorld4D[1], rayWorld4D[2]};

        rayDirection = normalizeVector3(rayWorld);

        return rayDirection;
    }

    public float[] normalizeVector3(float[] vector3)
    {
        float[] normalizedVector = new float[3];
        float magnitude = (float) Math.sqrt((vector3[0] * vector3[0]) + (vector3[1] * vector3[1]) + (vector3[2] * vector3[2]));
        normalizedVector[0] = vector3[0] / magnitude;
        normalizedVector[1] = vector3[1] / magnitude;
        normalizedVector[2] = vector3[2] / magnitude;
        return normalizedVector;
    }

    /*
        public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
        {
            float[] rayDirection = new float[4];

            float normalizedX = 2 * touchX/windowWidth - 1;
            float normalizedY = 1 - 2*touchY/windowHeight;

            float[] unviewMatrix = new float[16];
            float[] viewMatrix = new float[16];
            Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
            Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);

            float[] nearPoint = multiplyMat4ByVec4(unviewMatrix, new float[]{normalizedX, normalizedY, 0, 1});
            float[] modelviewInverse = new float[16];
            Matrix.invertM(modelviewInverse, 0, modelView, 0);

            float[] cameraPos = new float[4];
            cameraPos[0] = modelviewInverse[12];
            cameraPos[1] = modelviewInverse[13];
            cameraPos[2] = modelviewInverse[14];
            cameraPos[3] = modelviewInverse[15];

            rayDirection[0] = (nearPoint[0] - cameraPos[0]);
            rayDirection[1] = (nearPoint[1] - cameraPos[1]);
            rayDirection[2] = (nearPoint[2] - cameraPos[2]);
            rayDirection[3] = (nearPoint[3] - cameraPos[3]);

            return rayDirection;
        }
     */

    /*
    public float[] getOGLPosition(int x, int y)
    {
        GLU.gluUnProject(x, y, 0, , modelOffset, project, projectOffset, view, viewOffset, obj, objOffset)
    }
    */

    public float[] getCameraPos(float[] modelView)
    {
        float[] modelviewInverse = new float[16];
        Matrix.invertM(modelviewInverse, 0, modelView, 0);
        float[] cameraPos = new float[4];
        cameraPos[0] = modelviewInverse[12];
        cameraPos[1] = modelviewInverse[13];
        cameraPos[2] = modelviewInverse[14];
        cameraPos[3] = modelviewInverse[15];
        return cameraPos;
    }

    public String floatArrayAsString(float[] array)
    {
        StringBuilder sb = new StringBuilder();
        sb.append("[");
        for (Float f : array)
        {
            sb.append(f + ", ");
        }
        sb.deleteCharAt(sb.length() - 1);
        sb.deleteCharAt(sb.length() - 1);
        sb.append("]");
        return sb.toString();
    }

    public float[] getInverseMatrix(float[] originalMatrix)
    {
        float[] inverseMatrix = new float[16];
        Matrix.invertM(inverseMatrix, 0, originalMatrix, 0);
        return inverseMatrix;
    }

    public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
    {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
    }

    public void onTouch(float touchX, float touchY)
    {
        float[] mouseRayProjection = getMouseRayProjection(touchX, touchY, width, height, mMVMatrix, mProjectionMatrix);
        Log.d("OpenGLES2Test", "Mouse Ray: " + floatArrayAsString(mouseRayProjection));
        //Log.d("OpenGLES2Test", "ModelView: " + floatArrayAsString(mMVMatrix));
        //Log.d("OpenGLES2Test", "ModelViewInverse: " + floatArrayAsString(getInverseMatrix(mMVMatrix)));
        //Log.d("OpenGLES2Test", "Mouse Coordinates: " + touchX + ", " + touchY);
        //Log.d("OpenGLES2Test", "Ray Coordinates: " + mouseRayProjection[0] + ", " + mouseRayProjection[1] + ", " + mouseRayProjection[2] + ", " + mouseRayProjection[3]);
        moveIntersectionLineEndPoint(mouseRayProjection);
    }
}

fragmentShader:

precision mediump float;

varying vec4 v_Color;

void main()
{
    gl_FragColor = v_Color;
}

vertexShader:

uniform mat4 u_MVPMatrix;

attribute vec4 a_Position;
attribute vec4 a_Color;

varying vec4 v_Color;

void main()
{
    v_Color = a_Color;
    gl_Position = u_MVPMatrix * a_Position;
}

标签:java,android,3d,matrix,opengl-es-2-0
来源: https://codeday.me/bug/20190529/1178196.html

本站声明: 1. iCode9 技术分享网(下文简称本站)提供的所有内容,仅供技术学习、探讨和分享;
2. 关于本站的所有留言、评论、转载及引用,纯属内容发起人的个人观点,与本站观点和立场无关;
3. 关于本站的所有言论和文字,纯属内容发起人的个人观点,与本站观点和立场无关;
4. 本站文章均是网友提供,不完全保证技术分享内容的完整性、准确性、时效性、风险性和版权归属;如您发现该文章侵犯了您的权益,可联系我们第一时间进行删除;
5. 本站为非盈利性的个人网站,所有内容不会用来进行牟利,也不会利用任何形式的广告来间接获益,纯粹是为了广大技术爱好者提供技术内容和技术思想的分享性交流网站。

专注分享技术,共同学习,共同进步。侵权联系[81616952@qq.com]

Copyright (C)ICode9.com, All Rights Reserved.

ICode9版权所有