Skip to content Skip to sidebar Skip to footer

Need To Play Video In Arcore

As we know in ARCore, we can detect 3d object while click on the horizontal plane surfaces. Instead of 3d object, I need to show Video when User is going to click the PLane Surface

Solution 1:

I did this by creating a new class called MovieClipRenderer - which modeled after the ObjectRenderer class in the HelloAR sample. This creates a quad geometry and renders the texture from the media player in the quad. The quad is anchored to a plane, so it does not move as the user looks around.

To test with, I used a stock movie from: https://www.videvo.net/video/chicken-on-green-screen/3435/ and added it to src/main/assets

Then I added the member variable for the renderer to HelloArActivity

privatefinalMovieClipRenderermMovieClipRenderer=newMovieClipRenderer();

In onSurfaceCreated() I initialized the renderer with the others

 mMovieClipRenderer.createOnGlThread();

To try it out, I made the first tap on a plane create the movie anchor by changing hit test code slightly to be:

if (mMovieAnchor == null) {
    mMovieAnchor = hit.createAnchor();
} else {
    mAnchors.add(hit.createAnchor());
}

Then at the bottom of onDrawFrame() I checked for the anchor and started playing it:

    if (mMovieAnchor != null) {
        // Draw chickens!
        if (!mMovieClipRenderer.isStarted()) {
            mMovieClipRenderer.play("chicken.mp4", this);
        }
        mMovieAnchor.getPose().toMatrix(mAnchorMatrix,0);
        mMovieClipRenderer.update(mAnchorMatrix, 0.25f);
        mMovieClipRenderer.draw(mMovieAnchor.getPose(), viewmtx, projmtx);
    }

The rendering class is pretty long, but is pretty standard GLES code of creating the OES texture and initializing the video player, creating the vertices of a quad and loading a fragment shader that draws an OES texture.

/**
 * Renders a movie clip with a green screen aware shader.
 * <p>
 * Copyright 2018 Google LLC
 * <p>
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * <p>
 * http://www.apache.org/licenses/LICENSE-2.0
 * <p>
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */publicclassMovieClipRendererimplementsSurfaceTexture.OnFrameAvailableListener {
  privatestaticfinalStringTAG= MovieClipRenderer.class.getSimpleName();

  // Quad geometryprivatestaticfinalintCOORDS_PER_VERTEX=3;
  privatestaticfinalintTEXCOORDS_PER_VERTEX=2;
  privatestaticfinalintFLOAT_SIZE=4;
  privatestaticfinalfloat[] QUAD_COORDS = newfloat[]{
          -1.0f, -1.0f, 0.0f,
          -1.0f, +1.0f, 0.0f,
          +1.0f, -1.0f, 0.0f,
          +1.0f, +1.0f, 0.0f,
  };

  privatestaticfinalfloat[] QUAD_TEXCOORDS = newfloat[]{
          0.0f, 1.0f,
          0.0f, 0.0f,
          1.0f, 1.0f,
          1.0f, 0.0f,
  };

  // Shader for a flat quad.privatestaticfinalStringVERTEX_SHADER="uniform mat4 u_ModelViewProjection;\n\n" +
      "attribute vec4 a_Position;\n" +
      "attribute vec2 a_TexCoord;\n\n" +
      "varying vec2 v_TexCoord;\n\n" +
      "void main() {\n" +
      "   gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);\n" +
     "   v_TexCoord = a_TexCoord;\n" +
     "}";

  // The fragment shader samples the video texture, blending to//  transparent for the green screen//  color.  The color was determined by sampling a screenshot//  of the video in an image editor.privatestaticfinalStringFRAGMENT_SHADER="#extension GL_OES_EGL_image_external : require\n" +
      "\n" +
      "precision mediump float;\n" +
      "varying vec2 v_TexCoord;\n" +
      "uniform samplerExternalOES sTexture;\n" +
      "\n" +
      "void main() {\n" +
      "    //TODO make this a uniform variable - " +
      " but this is the color of the background. 17ad2b\n" +
      "  vec3 keying_color = vec3(23.0f/255.0f, 173.0f/255.0f, 43.0f/255.0f);\n" +
      "  float thresh = 0.4f; // 0 - 1.732\n" +
      "  float slope = 0.2;\n" +
      "  vec3 input_color = texture2D(sTexture, v_TexCoord).rgb;\n" +
      "  float d = abs(length(abs(keying_color.rgb - input_color.rgb)));\n" +
      "  float edge0 = thresh * (1.0f - slope);\n" +
      "  float alpha = smoothstep(edge0,thresh,d);\n" +
      "  gl_FragColor = vec4(input_color, alpha);\n" +
      "}";

  // Geometry data in GLES friendly data structure.private FloatBuffer mQuadVertices;
  private FloatBuffer mQuadTexCoord;

  // Shader program id and parameters.privateint mQuadProgram;
  privateint mQuadPositionParam;
  privateint mQuadTexCoordParam;
  privateint mModelViewProjectionUniform;
  privateintmTextureId= -1;

  // Matrix for the location and perspective of the quad.privatefloat[] mModelMatrix = newfloat[16];

  // Media player,  texture and other bookkeeping.private MediaPlayer player;
  private SurfaceTexture videoTexture;
  privatebooleanframeAvailable=false;
  privatebooleanstarted=false;
  privateboolean done;
  privateboolean prepared;
  privatestatic Handler handler;


  // Lock used for waiting if the player was not yet created.privatefinalObjectlock=newObject();

  /**
   * Update the model matrix based on the location and scale to draw the quad.
   */publicvoidupdate(float[] modelMatrix, float scaleFactor) {
    float[] scaleMatrix = newfloat[16];
    Matrix.setIdentityM(scaleMatrix, 0);
    scaleMatrix[0] = scaleFactor;
    scaleMatrix[5] = scaleFactor;
    scaleMatrix[10] = scaleFactor;
    Matrix.multiplyMM(mModelMatrix, 0, modelMatrix, 0, scaleMatrix, 0);
  }

  /**
   * Initialize the GLES objects.  
   * This is called from the GL render thread to make sure
   * it has access to the EGLContext.
   */publicvoidcreateOnGlThread() {

    // 1 texture to hold the video frame.int textures[] = newint[1];
    GLES20.glGenTextures(1, textures, 0);
    mTextureId = textures[0];
    intmTextureTarget= GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
    GLES20.glBindTexture(mTextureTarget, mTextureId);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_S,
       GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_T,
       GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MIN_FILTER,
       GLES20.GL_NEAREST);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MAG_FILTER,
       GLES20.GL_NEAREST);

    videoTexture = newSurfaceTexture(mTextureId);
    videoTexture.setOnFrameAvailableListener(this);

    // Make a quad to hold the movieByteBufferbbVertices= ByteBuffer.allocateDirect(
         QUAD_COORDS.length * FLOAT_SIZE);
    bbVertices.order(ByteOrder.nativeOrder());
    mQuadVertices = bbVertices.asFloatBuffer();
    mQuadVertices.put(QUAD_COORDS);
    mQuadVertices.position(0);

    intnumVertices=4;
    ByteBufferbbTexCoords= ByteBuffer.allocateDirect(
            numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
    bbTexCoords.order(ByteOrder.nativeOrder());
    mQuadTexCoord = bbTexCoords.asFloatBuffer();
    mQuadTexCoord.put(QUAD_TEXCOORDS);
    mQuadTexCoord.position(0);

    intvertexShader= loadGLShader(TAG, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
    intfragmentShader= loadGLShader(TAG,
         GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);

    mQuadProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(mQuadProgram, vertexShader);
    GLES20.glAttachShader(mQuadProgram, fragmentShader);
    GLES20.glLinkProgram(mQuadProgram);
    GLES20.glUseProgram(mQuadProgram);

    ShaderUtil.checkGLError(TAG, "Program creation");

    mQuadPositionParam = GLES20.glGetAttribLocation(mQuadProgram, "a_Position");
    mQuadTexCoordParam = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord");
    mModelViewProjectionUniform = GLES20.glGetUniformLocation(
            mQuadProgram, "u_ModelViewProjection");

    ShaderUtil.checkGLError(TAG, "Program parameters");

    Matrix.setIdentityM(mModelMatrix, 0);

    initializeMediaPlayer();
  }

  publicvoiddraw(Pose pose, float[] cameraView, float[] cameraPerspective) {
    if (done || !prepared) {
      return;
    }
    synchronized (this) {
      if (frameAvailable) {
        videoTexture.updateTexImage();
        frameAvailable = false;
      }
    }

    float[] modelMatrix = newfloat[16];
    pose.toMatrix(modelMatrix, 0);

    float[] modelView = newfloat[16];
    float[] modelViewProjection = newfloat[16];
    Matrix.multiplyMM(modelView, 0, cameraView, 0, mModelMatrix, 0);
    Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, modelView, 0);

    ShaderUtil.checkGLError(TAG, "Before draw");

    GLES20.glEnable(GL10.GL_BLEND);
    GLES20.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
    GLES20.glUseProgram(mQuadProgram);

    // Set the vertex positions.
    GLES20.glVertexAttribPointer(
            mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
            false, 0, mQuadVertices);
    // Set the texture coordinates.
    GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false, 0, mQuadTexCoord);

    // Enable vertex arrays
    GLES20.glEnableVertexAttribArray(mQuadPositionParam);
    GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);
    GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false,
                              modelViewProjection, 0);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    // Disable vertex arrays
    GLES20.glDisableVertexAttribArray(mQuadPositionParam);
    GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);

    ShaderUtil.checkGLError(TAG, "Draw");
  }

  privatevoidinitializeMediaPlayer() {
    if (handler == null)
      handler = newHandler(Looper.getMainLooper());

    handler.post(newRunnable() {
      @Overridepublicvoidrun() {
        synchronized (lock) {
          player = newMediaPlayer();
          lock.notify();
        }
      }
    });
  }

  @OverridepublicvoidonFrameAvailable(SurfaceTexture surfaceTexture) {
    synchronized (this) {
      frameAvailable = true;
    }
  }

  publicbooleanplay(final String filename, Context context)throws FileNotFoundException {
    // Wait for the player to be created.if (player == null) {
      synchronized (lock) {
        while (player == null) {
          try {
            lock.wait();
          } catch (InterruptedException e) {
            returnfalse;
          }
        }
      }
    }

    player.reset();
    done = false;

    player.setOnPreparedListener(newMediaPlayer.OnPreparedListener() {
      @OverridepublicvoidonPrepared(MediaPlayer mp) {
        prepared = true;
        mp.start();
      }
    });
    player.setOnErrorListener(newMediaPlayer.OnErrorListener() {
      @OverridepublicbooleanonError(MediaPlayer mp, int what, int extra) {
        done = true;
        Log.e("VideoPlayer",
            String.format("Error occured: %d, %d\n", what, extra));
        returnfalse;
      }
    });

    player.setOnCompletionListener(newMediaPlayer.OnCompletionListener() {
      @OverridepublicvoidonCompletion(MediaPlayer mp) {
        done = true;
      }
    });

    player.setOnInfoListener(newMediaPlayer.OnInfoListener() {
      @OverridepublicbooleanonInfo(MediaPlayer mediaPlayer, int i, int i1) {
        returnfalse;
      }
    });

    try {
      AssetManagerassets= context.getAssets();
      AssetFileDescriptordescriptor= assets.openFd(filename);
      player.setDataSource(descriptor.getFileDescriptor(),
                           descriptor.getStartOffset(),
                           descriptor.getLength());
      player.setSurface(newSurface(videoTexture));
      player.prepareAsync();
      synchronized (this) {
        started = true;
      }
    } catch (IOException e) {
      Log.e(TAG, "Exception preparing movie", e);
      returnfalse;
    }

    returntrue;
  }

  publicsynchronizedbooleanisStarted() {
    return started;
  }

  staticintloadGLShader(String tag, int type, String code) {
    intshader= GLES20.glCreateShader(type);
    GLES20.glShaderSource(shader, code);
    GLES20.glCompileShader(shader);

    // Get the compilation status.finalint[] compileStatus = newint[1];
    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

    // If the compilation failed, delete the shader.if (compileStatus[0] == 0) {
      Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
      GLES20.glDeleteShader(shader);
      shader = 0;
    }

    if (shader == 0) {
      thrownewRuntimeException("Error creating shader.");
    }

    return shader;
  }
}

Post a Comment for "Need To Play Video In Arcore"