4

我很難弄清楚如何使用Rajawali播放360視頻。爲了達到這個目的,我嘗試了所有可以在互聯網上找到的解決方案,但是我失敗了。如何使用RajawaliVR或Rajawali播放360視頻

首先,我使用Rajawali紙板,讓MainActivity從CardboardActivity延伸。同時,在MyRenderer班上,我讓這個班從RajawaliCardboardRenderer班擴展而來。在MyRenderer類,我overrided的initScene()功能:

protected void initScene() { 
    StreamingTexture mTexture = null; 
    if (externalMemoryAvailable()) 
    { 
     mVideoPath = Environment.getExternalStorageDirectory().getAbsolutePath()+"/testVideo.mp4"; 
     try{ 
      mPlayer = new MediaPlayer(); 
      mPlayer.setDataSource(mVideoPath); 
     }catch(IllegalArgumentException e){ 
     e.printStackTrace(); 
     } catch (SecurityException e) { 
      // TODO Auto-generated catch block 
      e.printStackTrace(); 
     } catch (IllegalStateException e) { 
      // TODO Auto-generated catch block 
      e.printStackTrace(); 
     } catch (IOException e) { 
      // TODO Auto-generated catch block 
      e.printStackTrace(); 
     } 
     try { 
      mPlayer.prepare(); 
     } catch (IOException t) { 
      t.printStackTrace(); 
     } 
     mTexture = new StreamingTexture("video", mPlayer); 
    } 
    Sphere sphere = createPhotoSphereWithTexture(mTexture); 
    getCurrentScene().addChild(sphere); 
    getCurrentCamera().setPosition(Vector3.ZERO); 
    getCurrentCamera().setFieldOfView(75); 
} 

private Sphere createPhotoSphereWithTexture(ATexture texture) { 
    Material material = new Material(); 
    material.setColor(0); 
    try { 
     material.addTexture(texture); 
    } catch (ATexture.TextureException e) { 
     throw new RuntimeException(e); 
    } 
    Sphere sphere = new Sphere(50, 64, 32); 
    sphere.setScaleX(-1); 
    sphere.setMaterial(material); 
    return sphere; 
} 

該程序可以在沒有任何錯誤運行,但屏幕是黑色的,沒有圖像。
我想問我該怎麼做才能改善我的節目,以及爲什麼我應該通過使用Rajawali播放視頻。誰能幫我?

回答

9

我成功播放視頻與拉賈瓦利類。

public class VideoRenderer extends RajawaliCardboardRenderer { 

    Context mContext; 

    private MediaPlayer mMediaPlayer; 
    private StreamingTexture mVideoTexture; 

    public VideoRenderer(Context context) { 
     super(context); 
     mContext = context; 
    } 

    @Override 
    protected void initScene() { 

     mMediaPlayer = MediaPlayer.create(getContext(), 
       R.raw.video); 
     mMediaPlayer.setLooping(true); 

     mVideoTexture = new StreamingTexture("sintelTrailer", mMediaPlayer); 
     Material material = new Material(); 
     material.setColorInfluence(0); 
     try { 
      material.addTexture(mVideoTexture); 
     } catch (ATexture.TextureException e) { 
      e.printStackTrace(); 
     } 

     Sphere sphere = new Sphere(50, 64, 32); 
     sphere.setScaleX(-1); 
     sphere.setMaterial(material); 

     getCurrentScene().addChild(sphere); 

     getCurrentCamera().setPosition(Vector3.ZERO); 

     getCurrentCamera().setFieldOfView(75); 

     mMediaPlayer.start(); 

    } 

    @Override 
    protected void onRender(long ellapsedRealtime, double deltaTime) { 
     super.onRender(ellapsedRealtime, deltaTime); 
     mVideoTexture.update(); 
    } 

    @Override 
    public void onPause() { 
     super.onPause(); 
     if (mMediaPlayer != null) 
      mMediaPlayer.pause(); 
    } 

    @Override 
    public void onResume() { 
     super.onResume(); 
     if (mMediaPlayer != null) 
      mMediaPlayer.start(); 
    } 

    @Override 
    public void onRenderSurfaceDestroyed(SurfaceTexture surfaceTexture) { 
     super.onRenderSurfaceDestroyed(surfaceTexture); 
     mMediaPlayer.stop(); 
     mMediaPlayer.release(); 
    } 
    public void nextVideo(String nextVideoPath){ 
     try{ 
      mMediaPlayer.stop(); 
      mMediaPlayer.reset(); 

      mMediaPlayer.setDataSource(nextVideoPath); 
      mMediaPlayer.prepare(); 
      mMediaPlayer.start(); 

     }catch (Exception e){ 
     e.printStackTrace(); 
     } 
    } 
} 
+0

我已經通過使用Unity + CardBoard + EasyMovieTexture實現了它。我會嘗試你的解決方案,非常感謝你! –

+0

偉大的工作!你如何動態改變視頻紋理? –

+0

你的意思是改變下一個視頻嗎? –

1

我覺得你的主要錯誤是調用媒體播放器MediaPlayer.prepare(),而不是MediaPlayer.prepareAsync()
你必須考慮到不同狀態MediaPlayer在經過時的視頻播放。在這裏您可以鏈接到state diagram。視頻播放器完成一切準備工作後,您只能撥打MediaPlayer.start(),以便視頻開始播放。
我正在與Rajawali一起工作(360視頻的視頻播放器),到目前爲止,我已經實現了在正常的陀螺儀和觸摸模式下重現它們,但是我發現很多問題可以使它工作谷歌紙板集成,所以我現在試圖做我自己的「sideBySide」渲染器。

如果我的評論還不夠,這裏有一個我目前用來在Sphere上以流媒體紋理的形式重現視頻的代碼示例。這是overrided方法initScene()的一部分在延伸RajawaliRenderer

//create a 100 segment sphere 
    earthSphere = new Sphere(1, 100, 100); 
    //try to set the mediaPLayer data source 
    mMediaPlayer = new MediaPlayer(); 
    try{ 
     mMediaPlayer.setDataSource(context, Uri.parse("android.resource://" + context.getPackageName() + "/" + R.raw.pyrex)); 
    }catch(IOException ex){ 
     Log.e("ERROR","couldn attach data source to the media player"); 
    } 
    mMediaPlayer.setLooping(true); //enable video looping 
    video = new StreamingTexture("pyrex",mMediaPlayer); //create video texture 
    mMediaPlayer.prepareAsync(); //prepare the player (asynchronous) 
    mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { 
     @Override 
     public void onPrepared(MediaPlayer mp) { 
      mp.start(); //start the player only when it is prepared 
     } 
    }); 
    //add textture to a new material 
    Material material = new Material(); 
    material.setColorInfluence(0f); 
    try{ 
     material.addTexture(video); 
    }catch(ATexture.TextureException ex){ 
     Log.e("ERROR","texture error when adding video to material"); 
    } 
    //set the material to the sphere 
    earthSphere.setMaterial(material); 
    earthSphere.setPosition(0, 0, 0); 
    //add the sphere to the rendering scene 
    getCurrentScene().addChild(earthSphere); 
+0

非常感謝你!你的回答對我來說非常有用! –

+0

如果答案對您有幫助,您可以對它們進行加註。此外,對於適合您的答案,您可以將其標記爲「已接受」,以便其他人更好地看到它,作者得到一些反饋。歡迎你,順便說一句=)希望你的工作。 – SMalpica

0

既然你想播放360視頻,你需要一些方向跟蹤器。 這裏是紙板活動的例子。

public class CardboardRendererExample extends Renderer implements CardboardView.StereoRenderer { 
public static final int FIELD_OF_VIEW = 90; 
public static final float PLANE_WIDTH = 64.0f; 
public static final float PLANE_HEIGHT = 36.0f; 
public static final float PLANE_DISTANCE = -64.0f; 

private final MediaPlayer mMediaPlayer; 
protected StreamingTexture mStreamingTexture; 

protected Quaternion mOrientation = Quaternion.getIdentity(); 
protected Quaternion mEyeOrientation = Quaternion.getIdentity(); 

protected float[] mHeadView = new float[16]; 
private Matrix4 mEyeMatrix = new Matrix4(); 
private Vector3 mEyePosition = new Vector3(); 
private Matrix4 mHeadViewMatrix4 = new Matrix4(); 

public CardboardRendererExample(Context context, MediaPlayer mediaPlayer) { 
    super(context); 

    mMediaPlayer = mediaPlayer; 
} 

@Override 
protected void initScene() { 
    getCurrentCamera().setPosition(Vector3.ZERO); 
    getCurrentCamera().setFieldOfView(FIELD_OF_VIEW); 

    mStreamingTexture = new StreamingTexture("give_it_some_name", mMediaPlayer); 
    mStreamingTexture.shouldRecycle(true); 
    setSceneCachingEnabled(true); 

    final Plane projectionScreen = new Plane(PLANE_WIDTH, PLANE_HEIGHT, 64, 64); 
    final Material material = new Material(); 
    material.setColor(0); 
    material.setColorInfluence(0f); 
    try { 
     material.addTexture(mStreamingTexture); 
    } catch (ATexture.TextureException e) { 
     e.printStackTrace(); 
     throw new RuntimeException(e); 
    } 

    projectionScreen.setDoubleSided(true); 
    projectionScreen.setMaterial(material); 
    projectionScreen.setTransparent(true); 
    projectionScreen.setPosition(0, 0, PLANE_DISTANCE); 
    getCurrentScene().addChild(projectionScreen); 

    getCurrentScene().addChild(projectionScreen); 
} 

@Override 
public void onNewFrame(HeadTransform headTransform) { 
    headTransform.getHeadView(mHeadView, 0); 

    mHeadViewMatrix4.setAll(mHeadView).inverse(); 
    mOrientation.fromMatrix(mHeadViewMatrix4); 
} 

@Override 
public void onDrawEye(Eye eye) { 
    getCurrentCamera().updatePerspective(
      eye.getFov().getLeft(), 
      eye.getFov().getRight(), 
      eye.getFov().getBottom(), 
      eye.getFov().getTop()); 

    mEyeMatrix.setAll(eye.getEyeView()); 
    mEyeOrientation.fromMatrix(mEyeMatrix); 
    getCurrentCamera().setOrientation(mEyeOrientation); 
    mEyePosition = mEyeMatrix.getTranslation(mEyePosition).inverse(); 
    getCurrentCamera().setPosition(mEyePosition); 

    super.onRenderFrame(null); 
} 

@Override 
public void onFinishFrame(Viewport viewport) { 
} 

@Override 
public void onSurfaceChanged(int width, int height) { 
    super.onRenderSurfaceSizeChanged(null, width, height); 
} 

@Override 
public void onSurfaceCreated(EGLConfig eglConfig) { 
    super.onRenderSurfaceCreated(eglConfig, null, -1, -1); 
} 

@Override 
public void onRenderSurfaceCreated(EGLConfig config, GL10 gl, int width, int height) { 
    super.onRenderSurfaceCreated(config, gl, width, height); 
} 

@Override 
public void onRendererShutdown() { 
} 

@Override 
protected void onRender(long elapsedRealTime, double deltaTime) { 
    super.onRender(elapsedRealTime, deltaTime); 
    if (mStreamingTexture != null) { 
     mStreamingTexture.update(); 
    } 
} 

@Override 
public void onOffsetsChanged(float xOffset, float yOffset, float xOffsetStep, float yOffsetStep, int xPixelOffset, int yPixelOffset) { 
} 

@Override 
public void onTouchEvent(MotionEvent event) { 
} 

}

或者你可以在

com.google.vrtoolkit.cardboard.sensors.HeadTracker 

當然,你可以擺脫所有這些領域,但他們應該讓GC生活更輕鬆基於(例如)實現您的跟蹤器。