Hi,
how can i rotate an 3D Model in Vuforia in Z axis?
Where i have to implement the code?
Thanks
how can i rotate an 3D Model in Vuforia in Z axis?
Where i have to implement the code?
Thanks
This section allows you to view all posts made by this member. Note that you can only see posts made in areas you currently have access to.
Show posts Menu
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_renderFrame(JNIEnv *, jobject)
{
jclass activityClass = env->GetObjectClass(obj); //We get the class of out activity
jmethodID updateMatrixMethod = env->GetMethodID(activityClass, "updateModelviewMatrix", "([F)V");
const QCAR::CameraCalibration& cameraCalibration = QCAR::CameraDevice::getInstance().getCameraCalibration();
QCAR::Vec2F size = cameraCalibration.getSize();
QCAR::Vec2F focalLength = cameraCalibration.getFocalLength();
float fovyRadians = 2 * atan(0.5f * size.data[1] / focalLength.data[1]);
float fovRadians = 2 * atan(0.5f * size.data[0] / focalLength.data[0]);
jmethodID fovMethod = env->GetMethodID(activityClass, "setFov", "(F)V");
jmethodID fovyMethod = env->GetMethodID(activityClass, "setFovy", "(F)V");
env->CallVoidMethod(obj, fovMethod, fovRadians);
env->CallVoidMethod(obj, fovyMethod, fovyRadians);
jfloatArray modelviewArray = env->NewFloatArray(16);
for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
// Get the trackable:
const QCAR::TrackableResult* result = state.getTrackableResult(tIdx);
const QCAR::Trackable& trackable = result->getTrackable();
QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose());
'''SampleUtils::rotatePoseMatrix(180.0f, 1.0f, 0, 0, &modelViewMatrix.data[0]);
// Passes the model view matrix to java
env->SetFloatArrayRegion(modelviewArray, 0, 16, modelViewMatrix.data);
env->CallVoidMethod(obj, updateMatrixMethod , modelviewArray);'''
}
env->DeleteLocalRef(modelviewArray);
// Clear color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Get the state from QCAR and mark the beginning of a rendering section
QCAR::State state = QCAR::Renderer::getInstance().begin();
// Explicitly render the Video Background
QCAR::Renderer::getInstance().drawVideoBackground();
// Did we find any trackables this frame?
for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
// Get the trackable:
const QCAR::TrackableResult* result = state.getTrackableResult(tIdx);
const QCAR::Trackable& trackable = result->getTrackable();
QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose());
}
QCAR::Renderer::getInstance().end();
}
//LOG("Java_com_qualcomm_QCARSamples_ImageTargets_GLRenderer_renderFrame");
// Clear color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Get the state from QCAR and mark the beginning of a rendering section
QCAR::State state = QCAR::Renderer::getInstance().begin();
// Explicitly render the Video Background
QCAR::Renderer::getInstance().drawVideoBackground();
#ifdef USE_OPENGL_ES_1_1
// Set GL11 flags:
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
#endif
glEnable(GL_DEPTH_TEST);
// We must detect if background reflection is active and adjust the culling direction.
// If the reflection is active, this means the post matrix has been reflected as well,
// therefore standard counter clockwise face culling will result in "inside out" models.
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON)
glFrontFace(GL_CW); //Front camera
else
glFrontFace(GL_CCW); //Back camera
// Did we find any trackables this frame?
for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
// Get the trackable:
const QCAR::TrackableResult* result = state.getTrackableResult(tIdx);
const QCAR::Trackable& trackable = result->getTrackable();
QCAR::Matrix44F modelViewMatrix =
QCAR::Tool::convertPose2GLMatrix(result->getPose());
// Choose the texture based on the target name:
int textureIndex;
if (strcmp(trackable.getName(), "chips") == 0)
{
textureIndex = 0;
}
else if (strcmp(trackable.getName(), "stones") == 0)
{
textureIndex = 1;
}
else
{
textureIndex = 2;
}
const Texture* const thisTexture = textures[textureIndex];
#ifdef USE_OPENGL_ES_1_1
// Load projection matrix:
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(projectionMatrix.data);
// Load model view matrix:
glMatrixMode(GL_MODELVIEW);
glLoadMatrixf(modelViewMatrix.data);
glTranslatef(0.f, 0.f, kObjectScale);
glScalef(kObjectScale, kObjectScale, kObjectScale);
// Draw object:
// glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
// glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoords[0]);
glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &teschtVerts[0]);
glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &teschtNormals[0]);
// glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,
// (const GLvoid*) &teschtIndices[0]);
glDrawArrays(GL_TRIANGLES, 0, teschtNumVerts);
#else
QCAR::Matrix44F modelViewProjection;
SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale,
&modelViewMatrix.data[0]);
SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale,
&modelViewMatrix.data[0]);
SampleUtils::multiplyMatrix(&projectionMatrix.data[0],
&modelViewMatrix.data[0] ,
&modelViewProjection.data[0]);
glUseProgram(shaderProgramID);
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &teschtVerts[0]);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &teschtNormals[0]);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &teapotTexCoords[0]);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
//glActiveTexture(GL_TEXTURE0);
// glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
// glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
(GLfloat*)&modelViewProjection.data[0] );
// glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,
// (const GLvoid*) &teschtIndices[0]);
glDrawArrays(GL_TRIANGLES, 0, teschtNumVerts);
SampleUtils::checkGlError("ImageTargets renderFrame");
#endif
}
glDisable(GL_DEPTH_TEST);
#ifdef USE_OPENGL_ES_1_1
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
#else
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
#endif
QCAR::Renderer::getInstance().end();
}
void
configureVideoBackground()
{
// Get the default video mode:
QCAR::CameraDevice& cameraDevice = QCAR::CameraDevice::getInstance();
QCAR::VideoMode videoMode = cameraDevice.
getVideoMode(QCAR::CameraDevice::MODE_DEFAULT);
// Configure the video background
QCAR::VideoBackgroundConfig config;
config.mEnabled = true;
config.mSynchronous = true;
config.mPosition.data[0] = 0.0f;
config.mPosition.data[1] = 0.0f;
if (isActivityInPortraitMode)
{
//LOG("configureVideoBackground PORTRAIT");
config.mSize.data[0] = videoMode.mHeight
* (screenHeight / (float)videoMode.mWidth);
config.mSize.data[1] = screenHeight;
if(config.mSize.data[0] < screenWidth)
{
LOG("Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = screenWidth *
(videoMode.mWidth / (float)videoMode.mHeight);
}
}
else
{
//LOG("configureVideoBackground LANDSCAPE");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = videoMode.mHeight
* (screenWidth / (float)videoMode.mWidth);
if(config.mSize.data[1] < screenHeight)
{
LOG("Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenHeight
* (videoMode.mWidth / (float)videoMode.mHeight);
config.mSize.data[1] = screenHeight;
}
}
LOG("Configure Video Background : Video (%d,%d), Screen (%d,%d), mSize (%d,%d)", videoMode.mWidth, videoMode.mHeight, screenWidth, screenHeight, config.mSize.data[0], config.mSize.data[1]);
// Set the config:
QCAR::Renderer::getInstance().setVideoBackgroundConfig(config);
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative");
// Store screen dimensions
screenWidth = width;
screenHeight = height;
// Handle to the activity class:
jclass activityClass = env->GetObjectClass(obj);
jmethodID getTextureCountMethodID = env->GetMethodID(activityClass,
"getTextureCount", "()I");
if (getTextureCountMethodID == 0)
{
LOG("Function getTextureCount() not found.");
return;
}
textureCount = env->CallIntMethod(obj, getTextureCountMethodID);
if (!textureCount)
{
LOG("getTextureCount() returned zero.");
return;
}
textures = new Texture*[textureCount];
jmethodID getTextureMethodID = env->GetMethodID(activityClass,
"getTexture", "(I)Lcom/qualcomm/QCARSamples/ImageTargets/Texture;");
if (getTextureMethodID == 0)
{
LOG("Function getTexture() not found.");
return;
}
// Register the textures
for (int i = 0; i < textureCount; ++i)
{
jobject textureObject = env->CallObjectMethod(obj, getTextureMethodID, i);
if (textureObject == NULL)
{
LOG("GetTexture() returned zero pointer");
return;
}
textures[i] = Texture::create(env, textureObject);
}
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative finished");
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative");
// Release texture resources
if (textures != 0)
{
for (int i = 0; i < textureCount; ++i)
{
delete textures[i];
textures[i] = NULL;
}
delete[]textures;
textures = NULL;
textureCount = 0;
}
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera(JNIEnv *,
jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera");
// Select the camera to open, set this to QCAR::CameraDevice::CAMERA_FRONT
// to activate the front camera instead.
QCAR::CameraDevice::CAMERA camera = QCAR::CameraDevice::CAMERA_DEFAULT;
// Initialize the camera:
if (!QCAR::CameraDevice::getInstance().init(camera))
return;
// Configure the video background
configureVideoBackground();
// Select the default mode:
if (!QCAR::CameraDevice::getInstance().selectVideoMode(
QCAR::CameraDevice::MODE_DEFAULT))
return;
// Start the camera:
if (!QCAR::CameraDevice::getInstance().start())
return;
// Uncomment to enable flash
//if(QCAR::CameraDevice::getInstance().setFlashTorchMode(true))
// LOG("IMAGE TARGETS : enabled torch");
// Uncomment to enable infinity focus mode, or any other supported focus mode
// See CameraDevice.h for supported focus modes
//if(QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_INFINITY))
// LOG("IMAGE TARGETS : enabled infinity focus");
// Start the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* imageTracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER);
if(imageTracker != 0)
imageTracker->start();
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera");
// Stop the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* imageTracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER);
if(imageTracker != 0)
imageTracker->stop();
QCAR::CameraDevice::getInstance().stop();
QCAR::CameraDevice::getInstance().deinit();
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix");
// Cache the projection matrix:
const QCAR::CameraCalibration& cameraCalibration =
QCAR::CameraDevice::getInstance().getCameraCalibration();
projectionMatrix = QCAR::Tool::getProjectionGL(cameraCalibration, 2.0f, 2500.0f);
}
// ----------------------------------------------------------------------------
// Activates Camera Flash
// ----------------------------------------------------------------------------
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_activateFlash(JNIEnv*, jobject, jboolean flash)
{
return QCAR::CameraDevice::getInstance().setFlashTorchMode((flash==JNI_TRUE)) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_autofocus(JNIEnv*, jobject)
{
return QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_TRIGGERAUTO) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setFocusMode(JNIEnv*, jobject, jint mode)
{
int qcarFocusMode;
switch ((int)mode)
{
case 0:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_NORMAL;
break;
case 1:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_CONTINUOUSAUTO;
break;
case 2:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_INFINITY;
break;
case 3:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_MACRO;
break;
default:
return JNI_FALSE;
}
return QCAR::CameraDevice::getInstance().setFocusMode(qcarFocusMode) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering");
// Define clear color
glClearColor(0.0f, 0.0f, 0.0f, QCAR::requiresAlpha() ? 0.0f : 1.0f);
// Now generate the OpenGL texture objects and add settings
for (int i = 0; i < textureCount; ++i)
{
glGenTextures(1, &(textures[i]->mTextureID));
glBindTexture(GL_TEXTURE_2D, textures[i]->mTextureID);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textures[i]->mWidth,
textures[i]->mHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
(GLvoid*) textures[i]->mData);
}
#ifndef USE_OPENGL_ES_1_1
shaderProgramID = SampleUtils::createProgramFromBuffer(cubeMeshVertexShader,
cubeFragmentShader);
vertexHandle = glGetAttribLocation(shaderProgramID,
"vertexPosition");
normalHandle = glGetAttribLocation(shaderProgramID,
"vertexNormal");
textureCoordHandle = glGetAttribLocation(shaderProgramID,
"vertexTexCoord");
mvpMatrixHandle = glGetUniformLocation(shaderProgramID,
"modelViewProjectionMatrix");
texSampler2DHandle = glGetUniformLocation(shaderProgramID,
"texSampler2D");
#endif
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering");
// Update screen dimensions
screenWidth = width;
screenHeight = height;
// Reconfigure the video background
configureVideoBackground();
}
#ifdef __cplusplus
}
#endif
Page created in 0.019 seconds with 13 queries.