JPCT-AE integrated with Vuforia 3.0.5

Started by Hadesskywalker, July 16, 2014, 05:44:29 PM

Previous topic - Next topic

Hadesskywalker

Hi :)

I'm a litle frustrated, because I can't get jpct-ae working with Vuforia 3.0.5 with the new Java API.
With Vuforia 2.6 in C++, it worked perfectly after following the steps from the wiki (http://www.jpct.net/wiki/index.php/Integrating_JPCT-AE_with_Vuforia), but with the new java version the cube rotates all over the place.
Does anyone have a git project or a tutorial I could look at, because I'm quite stuck.

Thak you for your help :)

Hadesskywalker

This is what I have got so far. The Cube is not stying in one position, but is circling a center position.

ImageTargetRenderer

public class ImageTargetRenderer implements GLSurfaceView.Renderer
{
private static final String LOGTAG = "ImageTargetRenderer";

private SampleApplicationSession vuforiaAppSession;
private ImageTargets mActivity;

private Renderer mRenderer;

boolean mIsActive = false;


private World world;
private Light sun;
private Object3D cube;
private Camera cam;
private FrameBuffer fb;
private  float[] modelViewMatrix;
private  float[] invTranspMV;
private float fovx = 0;
private float fovy = 0;

public ImageTargetRenderer(ImageTargets activity,
SampleApplicationSession session)
{
mActivity = activity;
vuforiaAppSession = session;

Config.farPlane =  7000;
Config.maxPolysVisible = 5000;

world = new World();
world.setAmbientLight(20, 20, 20);

sun = new Light(world);
sun.setIntensity(250, 250, 250);

cube = Primitives.getCube(20);
cube.build();
world.addObject(cube);

cam = world.getCamera();

SimpleVector sv = new SimpleVector();
sv.set(cube.getTransformedCenter());
sv.y -= 100;
sv.z -= 100;
sun.setPosition(sv);

MemoryHelper.compact();

}


// Called to draw the current frame.
@Override
public void onDrawFrame(GL10 gl)
{
if (!mIsActive)
return;

// Call our function to render content
renderFrame();

updateCamera(modelViewMatrix);

world.renderScene(fb);
world.draw(fb);
fb.display();

// GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mRenderer.end();
}


// Called when the surface is created or recreated.
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
Log.d(LOGTAG, "GLRenderer.onSurfaceCreated");

initRendering();

// Call Vuforia function to (re)initialize rendering after first use
// or after OpenGL ES context was lost (e.g. after onPause/onResume):
vuforiaAppSession.onSurfaceCreated();
}


// Called when the surface changed size.
@Override
public void onSurfaceChanged(GL10 gl, int width, int height)
{
Log.d(LOGTAG, "GLRenderer.onSurfaceChanged");

if (fb != null) {
fb.dispose();
}
fb = new FrameBuffer(width, height);
Config.viewportOffsetAffectsRenderTarget=true;

// Call Vuforia function to handle render surface size changes:
vuforiaAppSession.onSurfaceChanged(width, height);

CameraCalibration cameraCalibration = CameraDevice.getInstance().getCameraCalibration();
Vec2F size = cameraCalibration.getSize();
Vec2F focalLength = cameraCalibration.getFocalLength();
fovy = 2 * (float)Math.atan(0.5f * size.getData()[1] / focalLength.getData()[1]);
fovx = 2 * (float)Math.atan(0.5f * size.getData()[0] / focalLength.getData()[0]);
}


// Function for initializing the renderer.
private void initRendering()
{
mRenderer = Renderer.getInstance();

GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f
: 1.0f);

// Hide the Loading Dialog
mActivity.loadingDialogHandler
.sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);

}


// The render function.
private void renderFrame()
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

State state = mRenderer.begin();
mRenderer.drawVideoBackground();

GLES20.glEnable(GLES20.GL_DEPTH_TEST);

// handle face culling, we need to detect if we are using reflection
// to determine the direction of the culling
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glCullFace(GLES20.GL_BACK);
if (Renderer.getInstance().getVideoBackgroundConfig().getReflection() == VIDEO_BACKGROUND_REFLECTION.VIDEO_BACKGROUND_REFLECTION_ON)
GLES20.glFrontFace(GLES20.GL_CW); // Front camera
else
GLES20.glFrontFace(GLES20.GL_CCW); // Back camera


// did we find any trackables this frame?
for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
TrackableResult result = state.getTrackableResult(tIdx);
Trackable trackable = result.getTrackable();

Matrix34F poseMatrix = result.getPose();

Matrix44F modelViewMatrix_Vuforia = Tool
.convertPose2GLMatrix(poseMatrix);
modelViewMatrix = modelViewMatrix_Vuforia.getData().clone();
// if(mActivity.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT){
android.opengl.Matrix.rotateM(modelViewMatrix, 0, 180f, 1, 0, 0);
// }else{
// android.opengl.Matrix.rotateM(modelViewMatrix, 0, -180f, 1, 0, 0);
// }

Matrix44F inverseMV = SampleMath.Matrix44FInverse(modelViewMatrix_Vuforia);
Matrix44F invTranspMV = SampleMath.Matrix44FTranspose(inverseMV);
this.invTranspMV = invTranspMV.getData().clone();
}
}

public void updateCamera(float[] modelViewMatrix) {
if(modelViewMatrix != null){
Matrix m = new Matrix();
m.setDump(modelViewMatrix);
cam.setBack(m);
if(mActivity.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT){
cam.setFOV(fovy);
cam.setYFOV(fovx);
}else{
cam.setFOV(fovx);
cam.setYFOV(fovy);
}
}
}

// public void updateCameraWithInv(float[] invTranspMV){
// if(invTranspMV != null){
// //Camera orientation axis (camera viewing direction, camera right direction and camera up direction)
// float cam_right_x = invTranspMV[0];
// float cam_right_y = invTranspMV[1];
// float cam_right_z = invTranspMV[2];
//
// float cam_up_x = -invTranspMV[4];
// float cam_up_y = -invTranspMV[5];
// float cam_up_z = -invTranspMV[6];
//
// float cam_dir_x = invTranspMV[8];
// float cam_dir_y = invTranspMV[9];
// float cam_dir_z = invTranspMV[10];
//
// //Camera position
// float cam_x = invTranspMV[12];
// float cam_y = invTranspMV[13];
// float cam_z = invTranspMV[14];
//
// SimpleVector mCameraDirection = new SimpleVector(cam_dir_x, cam_dir_y, cam_dir_z);
// SimpleVector mCameraUp = new SimpleVector(cam_up_x, cam_up_y, cam_up_z);
// SimpleVector mCameraPosition = new SimpleVector(cam_x, cam_y, cam_z);
//
// cam.setOrientation(mCameraDirection, mCameraUp);
// cam.setPosition(mCameraPosition);
// }
// }

public void setTextures(Vector<Texture> textures)
{
}

}


configureVideoBackground in SampleAppSession:
ublic void setVideoSize(int videoWidth, int videoHeight) {

DisplayMetrics displaymetrics = new DisplayMetrics();
mActivity.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
int height = displaymetrics.heightPixels;
int width = displaymetrics.widthPixels;

int widestVideo = videoWidth > videoHeight? videoWidth: videoHeight;
int widestScreen = width > height? width: height;

float diff = (widestVideo - widestScreen ) / 2;

Config.viewportOffsetY = diff / widestScreen;
}


Has anyone any suggestions what I'm missing?

EgonOlsen

I'm not sure if i understand the exact problem. If the cube moves into the wrong direction when moving the phone, i would expect the matrix conversion to be wrong somehow. If the problem is that it moves even if the phone doesn't, it sounds more like a Vuforia related issue to me. In that case, i would check the values returned by Vuforia to see if the change when they shouldn't.

Hadesskywalker

The cube moves wrong when I move the phone.

If I hold the phone parallel to the marker:
Moving the phone to the RIGHT: moves the cube DOWN and slightly to the RIGHT
Moving the phone to the LEFT: moves the cube UP and slightly to the LEFT
Moving the phone UP: moves the cube UP and ca. 45 deg to the RIGHT
Moving the phone DOWN: moves the cube DOWN and ca. 45 deg to the LEFT

I just can't wrap my head around what transformation I have to aply.

EgonOlsen

Sounds twisted...so i assume that the translations are wrong? How about the rotations, i.e. if you are turning the phone instead of moving them? The part that makes the transformation between Vuforia and jPCT-AE...in which way does it differ between the native version and the java one. Maybe you can post some code snippets that show both ways?