What is nftSimple?
Loads NFT dataset names from a configuration file.
The example uses the “Pinball.jpg” image supplied in the “Misc/patterns” folder. ARToolKit NFT requires a fast device, preferably dual-core for good operation, e.g. Samsung Galaxy SII or similar. Build/deployment for Android API 9 (Android OS v2.3) or later is recommended.
nftSimple - NFT Example
For developers who are already familiar with the code of simpleLite, it will be useful to do a side-by-side comparison of the code of nftSimple.
The basic flow of program operations (grab a frame, track markers, render) is very similar,
however there are significant changes in how the marker information is handled, as well as the control flow in the tracking loop.
To choose a resolution no greater than 800×600. A resolution of 640×480 is perfectly acceptable for NFT, and the greater frame rate achievable by using this resolution rather than a higher resolution is of more advantage than a larger frame size.
1. simpleLite -- Linux版本
/*!
@typedef ARHandle
@abstract (description)
@discussion (description)
@field arDebug (description)
@field arPixelFormat (description)
@field arPixelSize (description)
@field arLabelingMode (description)
@field arLabelingThresh (description)
@field arImageProcMode
To query this value, call arGetImageProcMode(). To set this value, call arSetImageProcMode().
@field arPatternDetectionMode (description)
@field arMarkerExtractionMode (description)
@field arParamLT (description)
@field marker_num (description)
@field markerInfo (description)
@field marker2_num (description)
@field markerInfo2 (description)
@field history_num (description)
@field history (description)
@field labelInfo (description)
@field pattHandle (description)
@field pattRatio A value between 0.0 and 1.0, representing the proportion of the marker width which constitutes the pattern. In earlier versions, this value was fixed at 0.5.
@field matrixCodeType When matrix code pattern detection mode is active, indicates the type of matrix code to detect.
*/ typedef struct {
int arDebug;
AR_PIXEL_FORMAT arPixelFormat;
int arPixelSize;
int arLabelingMode;
int arLabelingThresh;
int arImageProcMode;
int arPatternDetectionMode;
int arMarkerExtractionMode;
ARParamLT *arParamLT;
int xsize;
int ysize;
int marker_num;
ARMarkerInfo markerInfo[AR_SQUARE_MAX];
int marker2_num;
ARMarkerInfo2 markerInfo2[AR_SQUARE_MAX];
int history_num;
ARTrackingHistory history[AR_SQUARE_MAX];
ARLabelInfo labelInfo;
ARPattHandle *pattHandle;
AR_LABELING_THRESH_MODE arLabelingThreshMode;
int arLabelingThreshAutoInterval;
int arLabelingThreshAutoIntervalTTL;
int arLabelingThreshAutoBracketOver;
int arLabelingThreshAutoBracketUnder;
ARImageProcInfo *arImageProcInfo;
ARdouble pattRatio;
AR_MATRIX_CODE_TYPE matrixCodeType;
} ARHandle;
ARHandle
/*
* simpleLite.c
*
* Copyright 2015 Daqri LLC. All Rights Reserved.
* Copyright 2002-2015 ARToolworks, Inc. All Rights Reserved.
*
* Author(s): Philip Lamb.
*
*/ // ============================================================================
// Includes
// ============================================================================ #include <stdio.h>
#include <string.h>
#ifdef _WIN32
# define snprintf _snprintf
#endif
#include <stdlib.h> // malloc(), free()
#ifdef __APPLE__
# include <GLUT/glut.h>
#else
# include <GL/glut.h>
#endif
#include <AR/config.h>
#include <AR/video.h>
#include <AR/param.h> // arParamDisp()
#include <AR/ar.h>
#include <AR/gsub_lite.h> // ============================================================================
// Constants
// ============================================================================ #define VIEW_SCALEFACTOR 1.0 // Units received from ARToolKit tracking will be multiplied by this factor before being used in OpenGL drawing.
#define VIEW_DISTANCE_MIN 40.0 // Objects closer to the camera than this will not be displayed. OpenGL units.
#define VIEW_DISTANCE_MAX 10000.0 // Objects further away from the camera than this will not be displayed. OpenGL units. // ============================================================================
// Global variables
// ============================================================================ // Preferences.
static int windowed = TRUE; // Use windowed (TRUE) or fullscreen mode (FALSE) on launch.
static int windowWidth = ; // Initial window width, also updated during program execution.
static int windowHeight = ; // Initial window height, also updated during program execution.
static int windowDepth = ; // Fullscreen mode bit depth.
static int windowRefresh= ; // Fullscreen mode refresh rate. Set to 0 to use default rate. // Image acquisition.
static ARUint8 *gARTImage = NULL;
static int gARTImageSavePlease = FALSE; // Marker detection.
static ARHandle *gARHandle = NULL;
static ARPattHandle *gARPattHandle = NULL;
static long gCallCountMarkerDetect = ; // Transformation matrix retrieval.
static AR3DHandle *gAR3DHandle = NULL;
static ARdouble gPatt_width = 80.0; // Per-marker, but we are using only 1 marker.
static ARdouble gPatt_trans[][]; // Per-marker, but we are using only 1 marker.
static int gPatt_found = FALSE; // Per-marker, but we are using only 1 marker.
static int gPatt_id; // Per-marker, but we are using only 1 marker. // Drawing.
static ARParamLT *gCparamLT = NULL;
static ARGL_CONTEXT_SETTINGS_REF gArglSettings = NULL;
static int gShowHelp = ;
static int gShowMode = ;
static int gDrawRotate = FALSE;
static float gDrawRotateAngle = ; // For use in drawing. // ============================================================================
// Function prototypes.
// ============================================================================ static void print(const char *text, const float x, const float y, int calculateXFromRightEdge, int calculateYFromTopEdge);
static void drawBackground(const float width, const float height, const float x, const float y);
static void printHelpKeys();
static void printMode(); // ============================================================================
// Functions
// ============================================================================ // Something to look at, draw a rotating colour cube.
static void DrawCube(void)
{
// Colour cube data.
int i;
float fSize = 40.0f;
const GLfloat cube_vertices [][] = {
/* +z */ {0.5f, 0.5f, 0.5f}, {0.5f, -0.5f, 0.5f}, {-0.5f, -0.5f, 0.5f}, {-0.5f, 0.5f, 0.5f},
/* -z */ {0.5f, 0.5f, -0.5f}, {0.5f, -0.5f, -0.5f}, {-0.5f, -0.5f, -0.5f}, {-0.5f, 0.5f, -0.5f} };
const GLubyte cube_vertex_colors [][] = {
{, , , }, {, , , }, {, , , }, {, , , },
{, , , }, {, , , }, {, , , }, {, , , } };
const GLubyte cube_faces [][] = { /* ccw-winding */
/* +z */ {, , , }, /* -y */ {, , , }, /* +y */ {, , , },
/* -x */ {, , , }, /* +x */ {, , , }, /* -z */ {, , , } }; glPushMatrix(); // Save world coordinate system.
glRotatef(gDrawRotateAngle, 0.0f, 0.0f, 1.0f); // Rotate about z axis.
glScalef(fSize, fSize, fSize);
glTranslatef(0.0f, 0.0f, 0.5f); // Place base of cube on marker surface.
glDisable(GL_LIGHTING);
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
glColorPointer(, GL_UNSIGNED_BYTE, , cube_vertex_colors);
glVertexPointer(, GL_FLOAT, , cube_vertices);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
for (i = ; i < ; i++) {
glDrawElements(GL_TRIANGLE_FAN, , GL_UNSIGNED_BYTE, &(cube_faces[i][]));
}
glDisableClientState(GL_COLOR_ARRAY);
glColor4ub(, , , );
for (i = ; i < ; i++) {
glDrawElements(GL_LINE_LOOP, , GL_UNSIGNED_BYTE, &(cube_faces[i][]));
}
glPopMatrix(); // Restore world coordinate system.
} static void DrawCubeUpdate(float timeDelta)
{
if (gDrawRotate) {
gDrawRotateAngle += timeDelta * 45.0f; // Rotate cube at 45 degrees per second.
if (gDrawRotateAngle > 360.0f) gDrawRotateAngle -= 360.0f;
}
} static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p, ARHandle **arhandle, AR3DHandle **ar3dhandle)
{
ARParam cparam;
int xsize, ysize;
AR_PIXEL_FORMAT pixFormat; // Open the video path.
if (arVideoOpen(vconf) < ) {
ARLOGe("setupCamera(): Unable to open connection to camera.\n");
return (FALSE);
} // Find the size of the window.
if (arVideoGetSize(&xsize, &ysize) < ) {
ARLOGe("setupCamera(): Unable to determine camera frame size.\n");
arVideoClose();
return (FALSE);
}
ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Get the format in which the camera is returning pixels.
pixFormat = arVideoGetPixelFormat();
if (pixFormat == AR_PIXEL_FORMAT_INVALID) {
ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n");
arVideoClose();
return (FALSE);
} // Load the camera parameters, resize for the window and init.
if (arParamLoad(cparam_name, , &cparam) < ) {
ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name);
arVideoClose();
return (FALSE);
}
if (cparam.xsize != xsize || cparam.ysize != ysize) {
ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
arParamChangeSize(&cparam, xsize, ysize, &cparam);
}
#ifdef DEBUG
ARLOG("*** Camera Parameter ***\n");
arParamDisp(&cparam);
#endif
if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) {
ARLOGe("setupCamera(): Error: arParamLTCreate.\n");
return (FALSE);
} if ((*arhandle = arCreateHandle(*cparamLT_p)) == NULL) {
ARLOGe("setupCamera(): Error: arCreateHandle.\n");
return (FALSE);
}
if (arSetPixelFormat(*arhandle, pixFormat) < ) {
ARLOGe("setupCamera(): Error: arSetPixelFormat.\n");
return (FALSE);
}
if (arSetDebugMode(*arhandle, AR_DEBUG_DISABLE) < ) {
ARLOGe("setupCamera(): Error: arSetDebugMode.\n");
return (FALSE);
}
if ((*ar3dhandle = ar3DCreateHandle(&cparam)) == NULL) {
ARLOGe("setupCamera(): Error: ar3DCreateHandle.\n");
return (FALSE);
} if (arVideoCapStart() != ) {
ARLOGe("setupCamera(): Unable to begin camera data capture.\n");
return (FALSE);
} return (TRUE);
} static int setupMarker(const char *patt_name, int *patt_id, ARHandle *arhandle, ARPattHandle **pattHandle_p)
{
if ((*pattHandle_p = arPattCreateHandle()) == NULL) {
ARLOGe("setupMarker(): Error: arPattCreateHandle.\n");
return (FALSE);
} // Loading only 1 pattern in this example.
if ((*patt_id = arPattLoad(*pattHandle_p, patt_name)) < ) {
ARLOGe("setupMarker(): Error loading pattern file %s.\n", patt_name);
arPattDeleteHandle(*pattHandle_p);
return (FALSE);
} arPattAttach(arhandle, *pattHandle_p); return (TRUE);
} static void cleanup(void)
{
arglCleanup(gArglSettings);
gArglSettings = NULL;
arPattDetach(gARHandle);
arPattDeleteHandle(gARPattHandle);
arVideoCapStop();
ar3DDeleteHandle(&gAR3DHandle);
arDeleteHandle(gARHandle);
arParamLTFree(&gCparamLT);
arVideoClose();
} static void Keyboard(unsigned char key, int x, int y)
{
int mode, threshChange = ;
AR_LABELING_THRESH_MODE modea; switch (key) {
case 0x1B: // Quit.
case 'Q':
case 'q':
cleanup();
exit();
break;
case ' ':
gDrawRotate = !gDrawRotate;
break;
case 'X':
case 'x':
arGetImageProcMode(gARHandle, &mode);
switch (mode) {
case AR_IMAGE_PROC_FRAME_IMAGE: mode = AR_IMAGE_PROC_FIELD_IMAGE; break;
case AR_IMAGE_PROC_FIELD_IMAGE:
default: mode = AR_IMAGE_PROC_FRAME_IMAGE; break;
}
arSetImageProcMode(gARHandle, mode);
break;
case 'C':
case 'c':
ARLOGe("*** Camera - %f (frame/sec)\n", (double)gCallCountMarkerDetect/arUtilTimer());
gCallCountMarkerDetect = ;
arUtilTimerReset();
break;
case 'a':
case 'A':
arGetLabelingThreshMode(gARHandle, &modea);
switch (modea) {
case AR_LABELING_THRESH_MODE_MANUAL: modea = AR_LABELING_THRESH_MODE_AUTO_MEDIAN; break;
case AR_LABELING_THRESH_MODE_AUTO_MEDIAN: modea = AR_LABELING_THRESH_MODE_AUTO_OTSU; break;
case AR_LABELING_THRESH_MODE_AUTO_OTSU: modea = AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE; break;
case AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE: modea = AR_LABELING_THRESH_MODE_AUTO_BRACKETING; break;
case AR_LABELING_THRESH_MODE_AUTO_BRACKETING:
default: modea = AR_LABELING_THRESH_MODE_MANUAL; break;
}
arSetLabelingThreshMode(gARHandle, modea);
break;
case '-':
threshChange = -;
break;
case '+':
case '=':
threshChange = +;
break;
case 'D':
case 'd':
arGetDebugMode(gARHandle, &mode);
arSetDebugMode(gARHandle, !mode);
break;
case 's':
case 'S':
if (!gARTImageSavePlease) gARTImageSavePlease = TRUE;
break;
case '?':
case '/':
gShowHelp++;
if (gShowHelp > ) gShowHelp = ;
break;
case 'm':
case 'M':
gShowMode = !gShowMode;
break;
default:
break;
}
if (threshChange) {
int threshhold;
arGetLabelingThresh(gARHandle, &threshhold);
threshhold += threshChange;
if (threshhold < ) threshhold = ;
if (threshhold > ) threshhold = ;
arSetLabelingThresh(gARHandle, threshhold);
} } static void mainLoop(void)
{
static int imageNumber = ;
static int ms_prev;
int ms;
float s_elapsed;
ARUint8 *image;
ARdouble err; int j, k; // Find out how long since mainLoop() last ran.
ms = glutGet(GLUT_ELAPSED_TIME);
s_elapsed = (float)(ms - ms_prev) * 0.001f;
if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
ms_prev = ms; // Update drawing. 怎么又画一次?Display不是有画么?
DrawCubeUpdate(s_elapsed); // Grab a video frame.
if ((image = arVideoGetImage()) != NULL) {
gARTImage = image; // Save the fetched image. if (gARTImageSavePlease) {
char imageNumberText[];
sprintf(imageNumberText, "image-%04d.jpg", imageNumber++);
if (arVideoSaveImageJPEG(gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, gARTImage, imageNumberText, , ) < ) {
ARLOGe("Error saving video image.\n");
}
gARTImageSavePlease = FALSE;
} gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Detect the markers in the video frame.
if (arDetectMarker(gARHandle, gARTImage) < ) { // -->
exit(-);
} // Check through the marker_info array for highest confidence
// visible marker matching our preferred pattern.
k = -;
for (j = ; j < gARHandle->marker_num; j++) {
if (gARHandle->markerInfo[j].id == gPatt_id) {
if (k == -) k = j; // First marker detected.
else if (gARHandle->markerInfo[j].cf > gARHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected.
}
} if (k != -) {
// Get the transformation between the marker and the real camera into gPatt_trans.
err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_width, gPatt_trans);
gPatt_found = TRUE;
} else {
gPatt_found = FALSE;
} // Tell GLUT the display has changed.
glutPostRedisplay();
}
} //
// This function is called on events when the visibility of the
// GLUT window changes (including when it first becomes visible).
//
static void Visibility(int visible)
{
if (visible == GLUT_VISIBLE) {
glutIdleFunc(mainLoop);
} else {
glutIdleFunc(NULL);
}
} //
// This function is called when the
// GLUT window is resized.
//
static void Reshape(int w, int h)
{
windowWidth = w;
windowHeight = h; glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(, , (GLsizei) w, (GLsizei) h); // Call through to anyone else who needs to know about window sizing here.
} //
// This function is called when the window needs redrawing.
//
static void Display(void)
{
ARdouble p[];
ARdouble m[]; // Select correct buffer for this context.
glDrawBuffer(GL_BACK);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglPixelBufferDataUpload(gArglSettings, gARTImage);
arglDispImage(gArglSettings);
gARTImage = NULL; // Invalidate image data. // Projection transformation.
arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
glLoadMatrixf(p);
#else
glLoadMatrixd(p);
#endif
glMatrixMode(GL_MODELVIEW); glEnable(GL_DEPTH_TEST); // Viewing transformation.
glLoadIdentity();
// Lighting and geometry that moves with the camera should go here.
// (I.e. must be specified before viewing transformations.)
//none if (gPatt_found) { // Calculate the camera position relative to the marker.
// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
arglCameraViewRH((const ARdouble (*)[])gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
glLoadMatrixf(m);
#else
glLoadMatrixd(m);
#endif // All lighting and geometry to be drawn relative to the marker goes here.
DrawCube(); } // gPatt_found // Any 2D overlays go here.
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(, (GLdouble)windowWidth, , (GLdouble)windowHeight, -1.0, 1.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glDisable(GL_LIGHTING);
glDisable(GL_DEPTH_TEST); //
// Draw help text and mode.
//
if (gShowMode) {
printMode();
}
if (gShowHelp) {
if (gShowHelp == ) {
printHelpKeys();
}
} glutSwapBuffers();
} int main(int argc, char** argv)
{
char glutGamemode[];
char cparam_name[] = "Data/camera_para.dat";
char vconf[] = "";
char patt_name[] = "Data/hiro.patt"; //
// Library inits.
// glutInit(&argc, argv); //
// Video setup.
// if (!setupCamera(cparam_name, vconf, &gCparamLT, &gARHandle, &gAR3DHandle)) {
ARLOGe("main(): Unable to set up AR camera.\n");
exit(-);
} //
// Graphics setup.
// // Set up GL context(s) for OpenGL to draw into.
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH);
if (!windowed) {
if (windowRefresh) sprintf(glutGamemode, "%ix%i:%i@%i", windowWidth, windowHeight, windowDepth, windowRefresh);
else sprintf(glutGamemode, "%ix%i:%i", windowWidth, windowHeight, windowDepth);
glutGameModeString(glutGamemode);
glutEnterGameMode();
} else {
glutInitWindowSize(windowWidth, windowHeight);
glutCreateWindow(argv[]);
} // Setup ARgsub_lite library for current OpenGL context.
if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) {
ARLOGe("main(): arglSetupForCurrentContext() returned error.\n");
cleanup();
exit(-);
}
arglSetupDebugMode(gArglSettings, gARHandle);
arUtilTimerReset(); // Load marker(s).
if (!setupMarker(patt_name, &gPatt_id, gARHandle, &gARPattHandle)) {
ARLOGe("main(): Unable to set up AR marker.\n");
cleanup();
exit(-);
} // Register GLUT event-handling callbacks.
// NB: mainLoop() is registered by Visibility.
glutDisplayFunc(Display);
glutReshapeFunc(Reshape);
glutVisibilityFunc(Visibility);
glutKeyboardFunc(Keyboard); glutMainLoop(); return ();
} //
// The following functions provide the onscreen help text and mode info.
// static void print(const char *text, const float x, const float y, int calculateXFromRightEdge, int calculateYFromTopEdge)
{
int i, len;
GLfloat x0, y0; if (!text) return; if (calculateXFromRightEdge) {
x0 = windowWidth - x - (float)glutBitmapLength(GLUT_BITMAP_HELVETICA_10, (const unsigned char *)text);
} else {
x0 = x;
}
if (calculateYFromTopEdge) {
y0 = windowHeight - y - 10.0f;
} else {
y0 = y;
}
glRasterPos2f(x0, y0); len = (int)strlen(text);
for (i = ; i < len; i++) glutBitmapCharacter(GLUT_BITMAP_HELVETICA_10, text[i]);
} static void drawBackground(const float width, const float height, const float x, const float y)
{
GLfloat vertices[][]; vertices[][] = x; vertices[][] = y;
vertices[][] = width + x; vertices[][] = y;
vertices[][] = width + x; vertices[][] = height + y;
vertices[][] = x; vertices[][] = height + y;
glLoadIdentity();
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glVertexPointer(, GL_FLOAT, , vertices);
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(0.0f, 0.0f, 0.0f, 0.5f); // 50% transparent black.
glDrawArrays(GL_TRIANGLE_FAN, , );
glColor4f(1.0f, 1.0f, 1.0f, 1.0f); // Opaque white.
//glLineWidth(1.0f);
//glDrawArrays(GL_LINE_LOOP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisable(GL_BLEND);
} static void printHelpKeys()
{
int i;
GLfloat w, bw, bh;
const char *helpText[] = {
"Keys:\n",
" ? or / Show/hide this help.",
" q or [esc] Quit program.",
" d Activate / deactivate debug mode.",
" m Toggle display of mode info.",
" a Toggle between available threshold modes.",
" - and + Switch to manual threshold mode, and adjust threshhold up/down by 5.",
" x Change image processing mode.",
" c Calulcate frame rate.",
};
#define helpTextLineCount (sizeof(helpText)/sizeof(char *)) bw = 0.0f;
for (i = ; i < helpTextLineCount; i++) {
w = (float)glutBitmapLength(GLUT_BITMAP_HELVETICA_10, (unsigned char *)helpText[i]);
if (w > bw) bw = w;
}
bh = helpTextLineCount * 10.0f /* character height */+ (helpTextLineCount - ) * 2.0f /* line spacing */;
drawBackground(bw, bh, 2.0f, 2.0f); for (i = ; i < helpTextLineCount; i++) print(helpText[i], 2.0f, (helpTextLineCount - - i)*12.0f + 2.0f, , );;
} static void printMode()
{
int len, thresh, line, mode, xsize, ysize;
AR_LABELING_THRESH_MODE threshMode;
ARdouble tempF;
char text[], *text_p; glColor3ub(, , );
line = ; // Image size and processing mode.
arVideoGetSize(&xsize, &ysize);
arGetImageProcMode(gARHandle, &mode);
if (mode == AR_IMAGE_PROC_FRAME_IMAGE) text_p = "full frame";
else text_p = "even field only";
snprintf(text, sizeof(text), "Processing %dx%d video frames %s", xsize, ysize, text_p);
print(text, 2.0f, (line - )*12.0f + 2.0f, , );
line++; // Threshold mode, and threshold, if applicable.
arGetLabelingThreshMode(gARHandle, &threshMode);
switch (threshMode) {
case AR_LABELING_THRESH_MODE_MANUAL: text_p = "MANUAL"; break;
case AR_LABELING_THRESH_MODE_AUTO_MEDIAN: text_p = "AUTO_MEDIAN"; break;
case AR_LABELING_THRESH_MODE_AUTO_OTSU: text_p = "AUTO_OTSU"; break;
case AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE: text_p = "AUTO_ADAPTIVE"; break;
case AR_LABELING_THRESH_MODE_AUTO_BRACKETING: text_p = "AUTO_BRACKETING"; break;
default: text_p = "UNKNOWN"; break;
}
snprintf(text, sizeof(text), "Threshold mode: %s", text_p);
if (threshMode != AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE) {
arGetLabelingThresh(gARHandle, &thresh);
len = (int)strlen(text);
snprintf(text + len, sizeof(text) - len, ", thresh=%d", thresh);
}
print(text, 2.0f, (line - )*12.0f + 2.0f, , );
line++; // Border size, image processing mode, pattern detection mode.
arGetBorderSize(gARHandle, &tempF);
snprintf(text, sizeof(text), "Border: %0.1f%%", tempF*100.0);
arGetPatternDetectionMode(gARHandle, &mode);
switch (mode) {
case AR_TEMPLATE_MATCHING_COLOR: text_p = "Colour template (pattern)"; break;
case AR_TEMPLATE_MATCHING_MONO: text_p = "Mono template (pattern)"; break;
case AR_MATRIX_CODE_DETECTION: text_p = "Matrix (barcode)"; break;
case AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX: text_p = "Colour template + Matrix (2 pass, pattern + barcode)"; break;
case AR_TEMPLATE_MATCHING_MONO_AND_MATRIX: text_p = "Mono template + Matrix (2 pass, pattern + barcode "; break;
default: text_p = "UNKNOWN"; break;
}
len = (int)strlen(text);
snprintf(text + len, sizeof(text) - len, ", Pattern detection mode: %s", text_p);
print(text, 2.0f, (line - )*12.0f + 2.0f, , );
line++; // Window size.
snprintf(text, sizeof(text), "Drawing into %dx%d window", windowWidth, windowHeight);
print(text, 2.0f, (line - )*12.0f + 2.0f, , );
line++; }
接下来,就是令人高潮的 arDetectMarker.c,爱你一万年!
就俩函数:
- int arDetectMarker( ARHandle *arHandle, ARUint8 *dataPtr );
- static void confidenceCutoff(ARHandle *arHandle);
Let's go.
int arDetectMarker( ARHandle *arHandle, ARUint8 *dataPtr )
{
ARdouble rarea, rlen, rlenmin;
ARdouble diff, diffmin;
int cid, cdir;
int i, j, k;
int detectionIsDone = ;
int threshDiff; #if DEBUG_PATT_GETID
cnt = ;
#endif arHandle->marker_num = ; if (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_BRACKETING) {
if (arHandle->arLabelingThreshAutoIntervalTTL > ) {
arHandle->arLabelingThreshAutoIntervalTTL--;
} else {
int thresholds[];
int marker_nums[]; thresholds[] = arHandle->arLabelingThresh + arHandle->arLabelingThreshAutoBracketOver;
if (thresholds[] > ) thresholds[] = ;
thresholds[] = arHandle->arLabelingThresh - arHandle->arLabelingThreshAutoBracketUnder;
if (thresholds[] < ) thresholds[] = ;
thresholds[] = arHandle->arLabelingThresh; for (i = ; i < ; i++) {
if (arLabeling(dataPtr, arHandle->xsize, arHandle->ysize, arHandle->arPixelFormat, arHandle->arDebug, arHandle->arLabelingMode, thresholds[i], arHandle->arImageProcMode, &(arHandle->labelInfo), NULL) < ) return -;
if (arDetectMarker2(arHandle->xsize, arHandle->ysize, &(arHandle->labelInfo), arHandle->arImageProcMode, AR_AREA_MAX, AR_AREA_MIN, AR_SQUARE_FIT_THRESH, arHandle->markerInfo2, &(arHandle->marker2_num)) < ) return -;
if (arGetMarkerInfo(dataPtr, arHandle->xsize, arHandle->ysize, arHandle->arPixelFormat, arHandle->markerInfo2, arHandle->marker2_num, arHandle->pattHandle, arHandle->arImageProcMode, arHandle->arPatternDetectionMode, &(arHandle->arParamLT->paramLTf), arHandle->pattRatio, arHandle->markerInfo, &(arHandle->marker_num), arHandle->matrixCodeType) < ) return -;
marker_nums[i] = arHandle->marker_num;
} if (arHandle->arDebug == AR_DEBUG_ENABLE) ARLOGe("Auto threshold (bracket) marker counts -[%3d: %3d] [%3d: %3d] [%3d: %3d]+.\n", thresholds[], marker_nums[], thresholds[], marker_nums[], thresholds[], marker_nums[]); // If neither of the bracketed values was superior, then change the size of the bracket.
if (marker_nums[] <= marker_nums[] && marker_nums[] <= marker_nums[]) {
if (arHandle->arLabelingThreshAutoBracketOver < arHandle->arLabelingThreshAutoBracketUnder) {
arHandle->arLabelingThreshAutoBracketOver++;
} else if (arHandle->arLabelingThreshAutoBracketOver > arHandle->arLabelingThreshAutoBracketUnder) {
arHandle->arLabelingThreshAutoBracketUnder++;
} else {
arHandle->arLabelingThreshAutoBracketOver++;
arHandle->arLabelingThreshAutoBracketUnder++;
}
if ((thresholds[] + arHandle->arLabelingThreshAutoBracketOver) >= ) arHandle->arLabelingThreshAutoBracketOver = ; // If the bracket has hit the end of the range, reset it.
if ((thresholds[] - arHandle->arLabelingThreshAutoBracketOver) <= ) arHandle->arLabelingThreshAutoBracketUnder = ; // If a bracket has hit the end of the range, reset it.
detectionIsDone = ;
} else {
arHandle->arLabelingThresh = (marker_nums[] >= marker_nums[] ? thresholds[] : thresholds[]);
threshDiff = arHandle->arLabelingThresh - thresholds[];
if (threshDiff > ) {
arHandle->arLabelingThreshAutoBracketOver = threshDiff;
arHandle->arLabelingThreshAutoBracketUnder = ;
} else {
arHandle->arLabelingThreshAutoBracketOver = ;
arHandle->arLabelingThreshAutoBracketUnder = -threshDiff;
}
if (arHandle->arDebug == AR_DEBUG_ENABLE) ARLOGe("Auto threshold (bracket) adjusted threshold to %d.\n", arHandle->arLabelingThresh);
}
arHandle->arLabelingThreshAutoIntervalTTL = arHandle->arLabelingThreshAutoInterval;
}
} if (!detectionIsDone) {
#if !AR_DISABLE_THRESH_MODE_AUTO_ADAPTIVE
if (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE) { int ret;
ret = arImageProcLumaHistAndBoxFilterWithBias(arHandle->arImageProcInfo, dataPtr, AR_LABELING_THRESH_ADAPTIVE_KERNEL_SIZE_DEFAULT, AR_LABELING_THRESH_ADAPTIVE_BIAS_DEFAULT);
if (ret < ) return (ret); ret = arLabeling(arHandle->arImageProcInfo->image, arHandle->arImageProcInfo->imageX, arHandle->arImageProcInfo->imageY,
AR_PIXEL_FORMAT_MONO, arHandle->arDebug, arHandle->arLabelingMode,
, AR_IMAGE_PROC_FRAME_IMAGE,
&(arHandle->labelInfo), arHandle->arImageProcInfo->image2);
if (ret < ) return (ret); } else { // !adaptive
#endif if (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_MEDIAN || arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_OTSU) {
// Do an auto-threshold operation.
if (arHandle->arLabelingThreshAutoIntervalTTL > ) {
arHandle->arLabelingThreshAutoIntervalTTL--;
} else {
int ret;
unsigned char value;
if (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_MEDIAN) ret = arImageProcLumaHistAndCDFAndMedian(arHandle->arImageProcInfo, dataPtr, &value);
else ret = arImageProcLumaHistAndOtsu(arHandle->arImageProcInfo, dataPtr, &value);
if (ret < ) return (ret);
if (arHandle->arDebug == AR_DEBUG_ENABLE && arHandle->arLabelingThresh != value) ARLOGe("Auto threshold (%s) adjusted threshold to %d.\n", (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_MEDIAN ? "median" : "Otsu"), value);
arHandle->arLabelingThresh = value;
arHandle->arLabelingThreshAutoIntervalTTL = arHandle->arLabelingThreshAutoInterval;
}
} if( arLabeling(dataPtr, arHandle->xsize, arHandle->ysize,
arHandle->arPixelFormat, arHandle->arDebug, arHandle->arLabelingMode,
arHandle->arLabelingThresh, arHandle->arImageProcMode,
&(arHandle->labelInfo), NULL) < ) {
return -;
} #if !AR_DISABLE_THRESH_MODE_AUTO_ADAPTIVE
}
#endif if( arDetectMarker2( arHandle->xsize, arHandle->ysize,
&(arHandle->labelInfo), arHandle->arImageProcMode,
AR_AREA_MAX, AR_AREA_MIN, AR_SQUARE_FIT_THRESH,
arHandle->markerInfo2, &(arHandle->marker2_num) ) < ) {
return -;
} if( arGetMarkerInfo(dataPtr, arHandle->xsize, arHandle->ysize, arHandle->arPixelFormat,
arHandle->markerInfo2, arHandle->marker2_num,
arHandle->pattHandle, arHandle->arImageProcMode,
arHandle->arPatternDetectionMode, &(arHandle->arParamLT->paramLTf), arHandle->pattRatio,
arHandle->markerInfo, &(arHandle->marker_num),
arHandle->matrixCodeType ) < ) {
return -;
}
} // !detectionIsDone // If history mode is not enabled, just perform a basic confidence cutoff.
if (arHandle->arMarkerExtractionMode == AR_NOUSE_TRACKING_HISTORY) {
confidenceCutoff(arHandle);
return ;
} /*------------------------------------------------------------*/ // For all history records, check every identified marker, to see if the position and size of the marker
// as recorded in the history record is very similar to one of the identified markers.
// If it is, and the history record has a higher confidence value, then use the pattern matching
// information (marker ID, confidence, and direction) info from the history instead.
for( i = ; i < arHandle->history_num; i++ ) {
rlenmin = 0.5;
cid = -;
for( j = ; j < arHandle->marker_num; j++ ) {
rarea = (ARdouble)arHandle->history[i].marker.area / (ARdouble)arHandle->markerInfo[j].area;
if( rarea < 0.7 || rarea > 1.43 ) continue;
rlen = ( (arHandle->markerInfo[j].pos[] - arHandle->history[i].marker.pos[])
* (arHandle->markerInfo[j].pos[] - arHandle->history[i].marker.pos[])
+ (arHandle->markerInfo[j].pos[] - arHandle->history[i].marker.pos[])
* (arHandle->markerInfo[j].pos[] - arHandle->history[i].marker.pos[]) )
/ arHandle->markerInfo[j].area;
if( rlen < rlenmin ) {
rlenmin = rlen;
cid = j;
}
}
if (cid >= ) {
if (arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_COLOR || arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_MONO || arHandle->arPatternDetectionMode == AR_MATRIX_CODE_DETECTION) {
if (arHandle->markerInfo[cid].cf < arHandle->history[i].marker.cf) {
arHandle->markerInfo[cid].cf = arHandle->history[i].marker.cf;
arHandle->markerInfo[cid].id = arHandle->history[i].marker.id;
diffmin = 10000.0 * 10000.0;
cdir = -;
for( j = ; j < ; j++ ) {
diff = ;
for( k = ; k < ; k++ ) {
diff += (arHandle->history[i].marker.vertex[k][] - arHandle->markerInfo[cid].vertex[(j+k)%][])
* (arHandle->history[i].marker.vertex[k][] - arHandle->markerInfo[cid].vertex[(j+k)%][])
+ (arHandle->history[i].marker.vertex[k][] - arHandle->markerInfo[cid].vertex[(j+k)%][])
* (arHandle->history[i].marker.vertex[k][] - arHandle->markerInfo[cid].vertex[(j+k)%][]);
}
if( diff < diffmin ) {
diffmin = diff;
cdir = (arHandle->history[i].marker.dir - j + ) % ;
}
}
arHandle->markerInfo[cid].dir = cdir;
// Copy the id, cf, and dir back to the appropriate mode-dependent values too.
if (arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_COLOR || arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_MONO) {
arHandle->markerInfo[cid].idPatt = arHandle->markerInfo[cid].id;
arHandle->markerInfo[cid].cfPatt = arHandle->markerInfo[cid].cf;
arHandle->markerInfo[cid].dirPatt = arHandle->markerInfo[cid].dir;
} else {
arHandle->markerInfo[cid].idMatrix = arHandle->markerInfo[cid].id;
arHandle->markerInfo[cid].cfMatrix = arHandle->markerInfo[cid].cf;
arHandle->markerInfo[cid].dirMatrix = arHandle->markerInfo[cid].dir;
}
}
} else if (arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX) {
if (arHandle->markerInfo[cid].cfPatt < arHandle->history[i].marker.cfPatt || arHandle->markerInfo[cid].cfMatrix < arHandle->history[i].marker.cfMatrix) {
arHandle->markerInfo[cid].cfPatt = arHandle->history[i].marker.cfPatt;
arHandle->markerInfo[cid].idPatt = arHandle->history[i].marker.idPatt;
arHandle->markerInfo[cid].cfMatrix = arHandle->history[i].marker.cfMatrix;
arHandle->markerInfo[cid].idMatrix = arHandle->history[i].marker.idMatrix;
diffmin = 10000.0 * 10000.0;
cdir = -;
for( j = ; j < ; j++ ) {
diff = ;
for( k = ; k < ; k++ ) {
diff += (arHandle->history[i].marker.vertex[k][] - arHandle->markerInfo[cid].vertex[(j+k)%][])
* (arHandle->history[i].marker.vertex[k][] - arHandle->markerInfo[cid].vertex[(j+k)%][])
+ (arHandle->history[i].marker.vertex[k][] - arHandle->markerInfo[cid].vertex[(j+k)%][])
* (arHandle->history[i].marker.vertex[k][] - arHandle->markerInfo[cid].vertex[(j+k)%][]);
}
if( diff < diffmin ) {
diffmin = diff;
cdir = j;
}
}
arHandle->markerInfo[cid].dirPatt = (arHandle->history[i].marker.dirPatt - cdir + ) % ;
arHandle->markerInfo[cid].dirMatrix = (arHandle->history[i].marker.dirMatrix - cdir + ) % ;
}
}
else return -; // Unsupported arPatternDetectionMode.
} // cid >= 0
} confidenceCutoff(arHandle); // Age all history records (and expire old records, i.e. where count >= 4).
for( i = j = ; i < arHandle->history_num; i++ ) {
arHandle->history[i].count++;
if( arHandle->history[i].count < ) {
if (i != j) arHandle->history[j] = arHandle->history[i];
j++;
}
}
arHandle->history_num = j; // Save current marker info in history.
for( i = ; i < arHandle->marker_num; i++ ) {
if( arHandle->markerInfo[i].id < ) continue; // Check if an ARTrackingHistory record already exists for this marker ID.
for( j = ; j < arHandle->history_num; j++ ) {
if( arHandle->history[j].marker.id == arHandle->markerInfo[i].id ) break;
}
if( j == arHandle->history_num ) { // If a pre-existing ARTrackingHistory record was not found,
if( arHandle->history_num == AR_SQUARE_MAX ) break; // exit if we've filled all available history slots,
arHandle->history_num++; // Otherwise count the newly created record.
}
arHandle->history[j].marker = arHandle->markerInfo[i]; // Save the marker info.
arHandle->history[j].count = ; // Reset count to indicate info is fresh.
} if( arHandle->arMarkerExtractionMode == AR_USE_TRACKING_HISTORY_V2 ) {
return ;
} for( i = ; i < arHandle->history_num; i++ ) {
for( j = ; j < arHandle->marker_num; j++ ) {
rarea = (ARdouble)arHandle->history[i].marker.area / (ARdouble)arHandle->markerInfo[j].area;
if( rarea < 0.7 || rarea > 1.43 ) continue;
rlen = ( (arHandle->markerInfo[j].pos[] - arHandle->history[i].marker.pos[])
* (arHandle->markerInfo[j].pos[] - arHandle->history[i].marker.pos[])
+ (arHandle->markerInfo[j].pos[] - arHandle->history[i].marker.pos[])
* (arHandle->markerInfo[j].pos[] - arHandle->history[i].marker.pos[]) )
/ arHandle->markerInfo[j].area;
if( rlen < 0.5 ) break;
}
if( j == arHandle->marker_num ) {
arHandle->markerInfo[arHandle->marker_num] = arHandle->history[i].marker;
arHandle->marker_num++;
}
} return ;
}
arDetectMarker
2. nftSimple -- Linux版本
static void mainLoop(void)
{
static int ms_prev;
int ms;
float s_elapsed;
ARUint8 *image; // NFT results.
static int detectedPage = -; // -2 Tracking not inited, -1 tracking inited OK, >= 0 tracking online on page.
static float trackingTrans[][]; int i, j, k; // Find out how long since mainLoop() last ran.
ms = glutGet(GLUT_ELAPSED_TIME);
s_elapsed = (float)(ms - ms_prev) * 0.001f;
if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
ms_prev = ms; // Update drawing.
DrawCubeUpdate(s_elapsed); // Grab a video frame.
if ((image = arVideoGetImage()) != NULL) {
gARTImage = image; // Save the fetched image.
gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Run marker detection on frame
if (threadHandle) {
// Perform NFT tracking.
float err;
int ret;
int pageNo; if( detectedPage == - ) {
trackingInitStart( threadHandle, gARTImage ); //--> (1)
detectedPage = -;
}
if( detectedPage == - ) {
ret = trackingInitGetResult( threadHandle, trackingTrans, &pageNo); //--> (2) identify object
if( ret == ) {
if (pageNo >= && pageNo < surfaceSetCount) {
ARLOGd("Detected page %d.\n", pageNo);
detectedPage = pageNo;
ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans); //--> (3)
} else {
ARLOGe("Detected bad page %d.\n", pageNo);
detectedPage = -;
}
} else if( ret < ) {
ARLOGd("No page detected.\n");
detectedPage = -;
}
}
if( detectedPage >= && detectedPage < surfaceSetCount) {
if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gARTImage, trackingTrans, &err) < ) { //--> track object
ARLOGd("Tracking lost.\n");
detectedPage = -;
} else {
ARLOGd("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - );
}
}
} else {
ARLOGe("Error: threadHandle\n");
detectedPage = -;
} // Update markers.
for (i = ; i < markersNFTCount; i++) {
markersNFT[i].validPrev = markersNFT[i].valid;
if (markersNFT[i].pageNo >= && markersNFT[i].pageNo == detectedPage) {
markersNFT[i].valid = TRUE;
for (j = ; j < ; j++) for (k = ; k < ; k++) markersNFT[i].trans[j][k] = trackingTrans[j][k];
}
else markersNFT[i].valid = FALSE;
if (markersNFT[i].valid) { // Filter the pose estimate.
if (markersNFT[i].ftmi) {
if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < ) {
ARLOGe("arFilterTransMat error with marker %d.\n", i);
}
} if (!markersNFT[i].validPrev) {
// Marker has become visible, tell any dependent objects.
// --->
} // We have a new pose, so set that.
arglCameraViewRH((const ARdouble (*)[])markersNFT[i].trans, markersNFT[i].pose.T, VIEW_SCALEFACTOR);
// Tell any dependent objects about the update.
// ---> } else { if (markersNFT[i].validPrev) {
// Marker has ceased to be visible, tell any dependent objects.
// --->
}
}
} // Tell GLUT the display has changed.
glutPostRedisplay();
}
}
(1)
int trackingInitStart( THREAD_HANDLE_T *threadHandle, ARUint8 *imagePtr )
{
TrackingInitHandle *trackingInitHandle; if (!threadHandle || !imagePtr) {
ARLOGe("trackingInitStart(): Error: NULL threadHandle or imagePtr.\n");
return (-);
} trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
if (!trackingInitHandle) {
ARLOGe("trackingInitStart(): Error: NULL trackingInitHandle.\n");
return (-);
}
memcpy( trackingInitHandle->imagePtr, imagePtr, trackingInitHandle->imageSize );
// 填充 *threadHandle,貌似就复制了个图
// 填好了给threadStartSignal,设置好进程信号
threadStartSignal( threadHandle ); return ;
}
(2)
typedef struct {
KpmHandle *kpmHandle; // KPM-related data.
ARUint8 *imagePtr; // Pointer to image being tracked.
int imageSize; // Bytes per image.
float trans[][]; // Transform containing pose of tracked image.
int page; // Assigned page number of tracked image.
int flag; // Tracked successfully.
} TrackingInitHandle;
THREAD_HANDLE_T
int trackingInitGetResult( THREAD_HANDLE_T *threadHandle, float trans[][], int *page )
{
TrackingInitHandle *trackingInitHandle;
int i, j; if (!threadHandle || !trans || !page) {
ARLOGe("trackingInitGetResult(): Error: NULL threadHandle or trans or page.\n");
return (-);
} if( threadGetStatus( threadHandle ) == ) return ;
threadEndWait( threadHandle );
trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
if (!trackingInitHandle) return (-);
if( trackingInitHandle->flag ) {
for (j = ; j < ; j++) {
for (i = ; i < ; i++)
trans[j][i] = trackingInitHandle->trans[j][i]; //感觉关键部分是另一个thread干的,这个thread就接收下结果
}
*page = trackingInitHandle->page;
return ;
} return -;
}
(3)
int ar2SetInitTrans( AR2SurfaceSetT *surfaceSet, float trans[][] )
{
int i, j; if( surfaceSet == NULL ) return -;
surfaceSet->contNum = ;
for( j = ; j < ; j++ ) {
for( i = ; i < ; i++ )
surfaceSet->trans1[j][i] = trans[j][i];
}
surfaceSet->prevFeature[].flag = -; return ;
}
接下来,跟踪“干活儿”的那个线程。
int main(int argc, char** argv)
{
char glutGamemode[];
const char *cparam_name = "Data2/camera_para.dat";
char vconf[] = "";
const char markerConfigDataFilename[] = "Data2/markers.dat"; #ifdef DEBUG
arLogLevel = AR_LOG_LEVEL_DEBUG;
#endif //
// Library inits.
// glutInit(&argc, argv); //
// Video setup.
// #ifdef _WIN32
CoInitialize(NULL);
#endif if (!setupCamera(cparam_name, vconf, &gCparamLT)) {
ARLOGe("main(): Unable to set up AR camera.\n");
exit(-);
} //
// AR init.
// // Create the OpenGL projection from the calibrated camera parameters.
arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); if (!initNFT(gCparamLT, arVideoGetPixelFormat())) { // --> (1)
ARLOGe("main(): Unable to init NFT.\n");
exit(-);
} //
// Graphics setup.
// // Set up GL context(s) for OpenGL to draw into.
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH);
if (!prefWindowed) {
if (prefRefresh) sprintf(glutGamemode, "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh);
else sprintf(glutGamemode, "%ix%i:%i", prefWidth, prefHeight, prefDepth);
glutGameModeString(glutGamemode);
glutEnterGameMode();
} else {
glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize);
glutCreateWindow(argv[]);
} // Setup ARgsub_lite library for current OpenGL context.
if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) {
ARLOGe("main(): arglSetupForCurrentContext() returned error.\n");
cleanup();
exit(-);
}
arUtilTimerReset(); //
// Markers setup.
// // Load marker(s).
newMarkers(markerConfigDataFilename, &markersNFT, &markersNFTCount); // --> (2)
if (!markersNFTCount) {
ARLOGe("Error loading markers from config. file '%s'.\n", markerConfigDataFilename);
cleanup();
exit(-);
}
ARLOGi("Marker count = %d\n", markersNFTCount); // Marker data has been loaded, so now load NFT data.
if (!loadNFTData()) { // --> (3)
ARLOGe("Error loading NFT data.\n");
cleanup();
exit(-);
} // Start the video.
if (arVideoCapStart() != ) {
ARLOGe("setupCamera(): Unable to begin camera data capture.\n");
return (FALSE);
} // Register GLUT event-handling callbacks.
// NB: mainLoop() is registered by Visibility.
glutDisplayFunc(Display);
glutReshapeFunc(Reshape);
glutVisibilityFunc(Visibility);
glutKeyboardFunc(Keyboard); glutMainLoop(); return ();
}
(1)
// Modifies globals: kpmHandle, ar2Handle.
static int initNFT(ARParamLT *cparamLT, AR_PIXEL_FORMAT pixFormat)
{
ARLOGd("Initialising NFT.\n");
//
// NFT init.
// // KPM init. (Key Point Matching)
kpmHandle = kpmCreateHandle(cparamLT, pixFormat);
if (!kpmHandle) {
ARLOGe("Error: kpmCreateHandle.\n");
return (FALSE);
}
//kpmSetProcMode( kpmHandle, KpmProcHalfSize ); // AR2 init.
if( (ar2Handle = ar2CreateHandle(cparamLT, pixFormat, AR2_TRACKING_DEFAULT_THREAD_NUM)) == NULL ) {
ARLOGe("Error: ar2CreateHandle.\n");
kpmDeleteHandle(&kpmHandle);
return (FALSE);
}
if (threadGetCPU() <= ) {
ARLOGi("Using NFT tracking settings for a single CPU.\n");
ar2SetTrackingThresh(ar2Handle, 5.0);
ar2SetSimThresh(ar2Handle, 0.50);
ar2SetSearchFeatureNum(ar2Handle, );
ar2SetSearchSize(ar2Handle, 6);
ar2SetTemplateSize1(ar2Handle, );
ar2SetTemplateSize2(ar2Handle, );
} else {
ARLOGi("Using NFT tracking settings for more than one CPU.\n");
ar2SetTrackingThresh(ar2Handle, 5.0);
ar2SetSimThresh(ar2Handle, 0.50);
ar2SetSearchFeatureNum(ar2Handle, );
ar2SetSearchSize(ar2Handle, 12);
ar2SetTemplateSize1(ar2Handle, );
ar2SetTemplateSize2(ar2Handle, );
}
// NFT dataset loading will happen later.
return (TRUE);
}
(2)
typedef struct _ARMarkerNFT {
// ARMarker protected
bool valid;
bool validPrev;
ARdouble trans[][];
ARPose pose;
ARdouble marker_width;
ARdouble marker_height;
// ARMarker private
ARFilterTransMatInfo *ftmi;
ARdouble filterCutoffFrequency;
ARdouble filterSampleRate;
// ARMarkerNFT
int pageNo;
char *datasetPathname;
} ARMarkerNFT;
ARMarkerNFT
void newMarkers(const char *markersConfigDataFilePathC, ARMarkerNFT **markersNFT_out, int *markersNFTCount_out)
{
FILE *fp;
char buf[MAXPATHLEN], buf1[MAXPATHLEN];
int tempI;
ARMarkerNFT *markersNFT;
int markersNFTCount;
ARdouble tempF;
int i;
char markersConfigDataDirC[MAXPATHLEN];
size_t markersConfigDataDirCLen; if (!markersConfigDataFilePathC || markersConfigDataFilePathC[] == '\0' || !markersNFT_out || !markersNFTCount_out) return; // Load the marker data file.
ARLOGd("Opening marker config. data file from path '%s'.\n", markersConfigDataFilePathC);
arUtilGetDirectoryNameFromPath(markersConfigDataDirC, markersConfigDataFilePathC, MAXPATHLEN, ); // 1 = add '/' at end.
markersConfigDataDirCLen = strlen(markersConfigDataDirC);
if ((fp = fopen(markersConfigDataFilePathC, "r")) == NULL) {
ARLOGe("Error: unable to locate marker config data file '%s'.\n", markersConfigDataFilePathC);
return;
} // First line is number of markers to read.
get_buff(buf, MAXPATHLEN, fp, );if (sscanf(buf, "%d", &tempI) != ) {
ARLOGe("Error in marker configuration data file; expected marker count.\n");
fclose(fp);
return;
} arMallocClear(markersNFT, ARMarkerNFT, tempI);
markersNFTCount = tempI; ARLOGd("Reading %d marker configuration(s).\n", markersNFTCount); for (i = ; i < markersNFTCount; i++) { // Read marker name.
if (!get_buff(buf, MAXPATHLEN, fp, )) {
ARLOGe("Error in marker configuration data file; expected marker name.\n");
break;
} // Read marker type.
if (!get_buff(buf1, MAXPATHLEN, fp, )) {
ARLOGe("Error in marker configuration data file; expected marker type.\n");
break;
} // Interpret marker type, and read more data.
if (strcmp(buf1, "SINGLE") == ) {
ARLOGe("Error in marker configuration data file; SINGLE markers not supported in this build.\n");
} else if (strcmp(buf1, "MULTI") == ) {
ARLOGe("Error in marker configuration data file; MULTI markers not supported in this build.\n");
} else if (strcmp(buf1, "NFT") == ) {
markersNFT[i].valid = markersNFT[i].validPrev = FALSE;
arMalloc(markersNFT[i].datasetPathname, char, markersConfigDataDirCLen + strlen(buf) + );
strcpy( markersNFT[i].datasetPathname, markersConfigDataDirC);
strcpy( markersNFT[i].datasetPathname + markersConfigDataDirCLen, buf);
markersNFT[i].pageNo = -;
} else {
ARLOGe("Error in marker configuration data file; unsupported marker type %s.\n", buf1);
} // Look for optional tokens. A blank line marks end of options.
while (get_buff(buf, MAXPATHLEN, fp, ) && (buf[] != '\0')) {
if (strncmp(buf, "FILTER", ) == ) {
markersNFT[i].filterCutoffFrequency = AR_FILTER_TRANS_MAT_CUTOFF_FREQ_DEFAULT;
markersNFT[i].filterSampleRate = AR_FILTER_TRANS_MAT_SAMPLE_RATE_DEFAULT;
if (strlen(buf) != ) {
if (sscanf(&buf[],
#ifdef ARDOUBLE_IS_FLOAT
"%f"
#else
"%lf"
#endif
, &tempF) == ) markersNFT[i].filterCutoffFrequency = tempF;
}
markersNFT[i].ftmi = arFilterTransMatInit(markersNFT[i].filterSampleRate, markersNFT[i].filterCutoffFrequency);
}
// Unknown tokens are ignored.
}
}
fclose(fp); // If not all markers were read, an error occurred.
if (i < markersNFTCount) { // Clean up.
for (; i >= ; i--) {
if (markersNFT[i].datasetPathname) free(markersNFT[i].datasetPathname);
if (markersNFT[i].ftmi) arFilterTransMatFinal(markersNFT[i].ftmi);
}
free(markersNFT); *markersNFTCount_out = ;
*markersNFT_out = NULL;
return;
} *markersNFTCount_out = markersNFTCount;
*markersNFT_out = markersNFT;
}
(3)
// References globals: markersNFTCount
// Modifies globals: threadHandle, surfaceSet[], surfaceSetCount, markersNFT[]
static int loadNFTData(void)
{
int i;
KpmRefDataSet *refDataSet; // If data was already loaded, stop KPM tracking thread and unload previously loaded data.
if (threadHandle) {
ARLOGi("Reloading NFT data.\n");
unloadNFTData();
} else {
ARLOGi("Loading NFT data.\n");
} refDataSet = NULL; for (i = ; i < markersNFTCount; i++) {
// Load KPM data.
KpmRefDataSet *refDataSet2;
ARLOGi("Reading %s.fset3\n", markersNFT[i].datasetPathname);
if (kpmLoadRefDataSet(markersNFT[i].datasetPathname, "fset3", &refDataSet2) < ) {
ARLOGe("Error reading KPM data from %s.fset3\n", markersNFT[i].datasetPathname);
markersNFT[i].pageNo = -;
continue;
}
markersNFT[i].pageNo = surfaceSetCount;
ARLOGi(" Assigned page no. %d.\n", surfaceSetCount);
if (kpmChangePageNoOfRefDataSet(refDataSet2, KpmChangePageNoAllPages, surfaceSetCount) < ) {
ARLOGe("Error: kpmChangePageNoOfRefDataSet\n");
exit(-);
}
if (kpmMergeRefDataSet(&refDataSet, &refDataSet2) < ) {
ARLOGe("Error: kpmMergeRefDataSet\n");
exit(-);
}
ARLOGi(" Done.\n"); // Load AR2 data.
ARLOGi("Reading %s.fset\n", markersNFT[i].datasetPathname); if ((surfaceSet[surfaceSetCount] = ar2ReadSurfaceSet(markersNFT[i].datasetPathname, "fset", NULL)) == NULL ) {
ARLOGe("Error reading data from %s.fset\n", markersNFT[i].datasetPathname);
}
ARLOGi(" Done.\n"); surfaceSetCount++;
if (surfaceSetCount == PAGES_MAX) break;
}
if (kpmSetRefDataSet(kpmHandle, refDataSet) < ) {
ARLOGe("Error: kpmSetRefDataSet\n");
exit(-);
}
kpmDeleteRefDataSet(&refDataSet); // Start the KPM tracking thread.
threadHandle = trackingInitInit(kpmHandle); // --> (4)
if (!threadHandle) exit(-); ARLOGi("Loading of NFT data complete.\n");
return (TRUE);
}
(4)
THREAD_HANDLE_T *trackingInitInit( KpmHandle *kpmHandle )
{
TrackingInitHandle *trackingInitHandle;
THREAD_HANDLE_T *threadHandle; if (!kpmHandle) {
ARLOGe("trackingInitInit(): Error: NULL KpmHandle.\n");
return (NULL);
} trackingInitHandle = (TrackingInitHandle *)malloc(sizeof(TrackingInitHandle));
if( trackingInitHandle == NULL ) return NULL;
trackingInitHandle->kpmHandle = kpmHandle;
trackingInitHandle->imageSize = kpmHandleGetXSize(kpmHandle) * kpmHandleGetYSize(kpmHandle) * arUtilGetPixelSize(kpmHandleGetPixelFormat(kpmHandle));
trackingInitHandle->imagePtr = (ARUint8 *)malloc(trackingInitHandle->imageSize);
trackingInitHandle->flag = ; threadHandle = threadInit(, trackingInitHandle, trackingInitMain);
return threadHandle;
}
typedef struct {
KpmHandle *kpmHandle; // KPM-related data.
ARUint8 *imagePtr; // Pointer to image being tracked.
int imageSize; // Bytes per image.
float trans[][]; // Transform containing pose of tracked image.
int page; // Assigned page number of tracked image.
int flag; // Tracked successfully.
} TrackingInitHandle;
THREAD_HANDLE_T *threadInit( int ID, void *arg, void *(*start_routine)(THREAD_HANDLE_T*) )
{
THREAD_HANDLE_T *flag;
int err;
#if !defined(_WINRT) && !defined(ARUTIL_DISABLE_PTHREADS)
pthread_t thread;
pthread_attr_t attr;
#endif
if ((flag = malloc(sizeof(THREAD_HANDLE_T))) == NULL) return NULL; flag->ID = ID;
flag->startF = ;
flag->endF = ;
flag->busyF = ;
flag->arg = arg;
pthread_mutex_init( &(flag->mut), NULL );
pthread_cond_init( &(flag->cond1), NULL );
pthread_cond_init( &(flag->cond2), NULL ); #if !defined(_WINRT) && !defined(ARUTIL_DISABLE_PTHREADS)
pthread_attr_init(&attr); // 初始化一个线程对象的属性
pthread_attr_setdetachstate(&attr, ); // Preclude the need to do pthread_join on the thread after it exits.
err = pthread_create(&thread, &attr, (void *(*)(void*))start_routine, flag); // --> (5)
pthread_attr_destroy(&attr);
#elif defined(_WIN32)
# ifdef _WINRT
err = arCreateDetachedThreadWinRT(start_routine, flag);
# else
struct start_routine_proxy_arg *srpa_p = malloc(sizeof(struct start_routine_proxy_arg));
srpa_p->start_routine = start_routine;
srpa_p->arg = flag;
err = (_beginthread(start_routine_proxy, 0, srpa_p) == -1L);
# endif
#else
# error No routine available to create a thread.
#endif
if (err == ) {
return flag;
} else {
threadFree(&flag);
return NULL;
} }
(5)
static void *trackingInitMain( THREAD_HANDLE_T *threadHandle )
{
TrackingInitHandle *trackingInitHandle;
KpmHandle *kpmHandle;
KpmResult *kpmResult = NULL;
int kpmResultNum;
ARUint8 *imagePtr;
float err;
int i, j, k; if (!threadHandle) {
ARLOGe("Error starting tracking thread: empty THREAD_HANDLE_T.\n");
return (NULL);
}
trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
if (!threadHandle) {
ARLOGe("Error starting tracking thread: empty trackingInitHandle.\n");
return (NULL);
}
kpmHandle = trackingInitHandle->kpmHandle;
imagePtr = trackingInitHandle->imagePtr;
if (!kpmHandle || !imagePtr) {
ARLOGe("Error starting tracking thread: empty kpmHandle/imagePtr.\n");
return (NULL);
}
ARLOGi("Start tracking thread.\n"); kpmGetResult( kpmHandle, &kpmResult, &kpmResultNum ); // 单独提取出kpmHandle中的两个元素 for 方便
/* 总算找到了你!下章见。 */
for(;;)
{
if( threadStartWait(threadHandle) < ) break; kpmMatching(kpmHandle, imagePtr);
trackingInitHandle->flag = ;
for( i = ; i < kpmResultNum; i++ )
{
if( kpmResult[i].camPoseF != ) continue;
ARLOGd("kpmGetPose OK.\n");
if( trackingInitHandle->flag == || err > kpmResult[i].error )
{ // Take the first or best result.
trackingInitHandle->flag = ;
trackingInitHandle->page = kpmResult[i].pageNo;
for (j = ; j < ; j++) for (k = ; k < ; k++) trackingInitHandle->trans[j][k] = kpmResult[i].camPose[j][k];
err = kpmResult[i].error;
}
} threadEndSignal(threadHandle);
} ARLOGi("End tracking thread.\n");
return (NULL);
}
见:[Artoolkit] kpmMatching Analysis of nftSimple