com.google.vrtoolkit.cardboard.sensors.HeadTracker Java Examples
The following examples show how to use
com.google.vrtoolkit.cardboard.sensors.HeadTracker.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CardboardView.java From Cardboard with Apache License 2.0 | 5 votes |
private void init(Context context) { setEGLContextClientVersion(2); setPreserveEGLContextOnPause(true); WindowManager windowManager = (WindowManager) context .getSystemService("window"); this.mHeadTracker = new HeadTracker(context); this.mHmd = new HeadMountedDisplay(windowManager.getDefaultDisplay()); }
Example #2
Source File: CardboardImplementation.java From gdx-vr with Apache License 2.0 | 5 votes |
public CardboardImplementation(Activity activity) { VirtualReality.implementation = this; VirtualReality.head = new Head(); VirtualReality.body = new Body(); VirtualReality.renderer = new VirtualRealityRenderer(); headTracker = new HeadTracker(activity); headTracker.startTracking(); HeadMountedDisplay hmd = new HeadMountedDisplay(activity.getWindowManager().getDefaultDisplay()); VirtualReality.headMountedDisplay = new CardboardHMD(hmd); VirtualReality.distortionRenderer = new CardboardDistortionRenderer(hmd, new DistortionRenderer()); Gdx.app.addLifecycleListener(new LifecycleListener() { @Override public void resume() { } @Override public void pause() { } @Override public void dispose() { headTracker.stopTracking(); } }); }
Example #3
Source File: MyGLRenderer.java From myMediaCodecPlayer-for-FPV with MIT License | 4 votes |
@Override public void onSurfaceCreated(GL10 glUnused, EGLConfig config) { GLES20.glClearColor(0.0f, 0.0f, 0.07f, 0.0f); // mProgram = OpenGLHelper.createProgram(MyGLRendererHelper.getVertexShader(), MyGLRendererHelper.getFragmentShader()); maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); OpenGLHelper.checkGlError("glGetAttribLocation aPosition"); maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); OpenGLHelper.checkGlError("glGetAttribLocation aTextureCoord"); maMVPMatrixHandle=GLES20.glGetUniformLocation(mProgram,"uMVPMatrix"); OpenGLHelper.checkGlError("glGetAttribLocation uMVPMatrix"); //we have to create the texture for the overdraw,too GLES20.glGenTextures(2, textures, 0); mTextureID = textures[0]; //I don't know why,but it seems like when you use both external and normal textures,you have to use normal textures for the first, //and the external texture for the second unit; bug ? GLES20.glActiveTexture(GLES20.GL_TEXTURE1); GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID); OpenGLHelper.checkGlError("glBindTexture mTextureID"); GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); //mSurfaceTexture = new SurfaceTexture(mTextureID); //Enable double buffering,because MediaCodec and OpenGL don't have any synchronisation ? //For me,it seems like db hasn't really implemented or has no effect mSurfaceTexture = new SurfaceTexture(mTextureID,false); mDecoderSurface=new Surface(mSurfaceTexture); mDecoder=new UdpReceiverDecoderThread(mDecoderSurface,5000, mContext); mDecoder.startDecoding(); mOSD=new MyOSDReceiverRenderer(mContext,textures,mLeftEyeViewM,mRightEyeViewM,mProjM,videoFormat, modelDistance, videoDistance); mOSD.startReceiving(); mHeadTracker=HeadTracker.createFromContext(mContext); mHeadTracker.setNeckModelEnabled(true); final Phone.PhoneParams phoneParams = PhoneParams.readFromExternalStorage(); if (phoneParams != null) { this.mHeadTracker.setGyroBias(phoneParams.gyroBias); } if (headTracking) { mHeadTracker.startTracking(); } if(swapIntervallZero){ EGL14.eglSwapInterval(EGL14.eglGetCurrentDisplay(), 0); } }