【ARToolkit】关于如何制作标识卡patt

首先给大家讲一下,为什么我们的摄像头可以识别出标识卡,并在标识卡上出现自己需要的虚拟物体?

原理是这样的,其实总共有个 步骤。第一步:首先我们利用OpenGL的知识来画出一个虚拟物体,用来画出这个虚拟物体的代码要被放在 OpenGL来实现。第二步:制作出一个新模板,也就是制作出一个自己的标识卡。要创建一个新的模板,首先应打印空白的模板(位于下方第一个图)。这只是一个黑方块, 中间是空的白色方块。接着为需要的模板创建一个黑白或者彩色的、适合这个中心的方块 的图像,并把它打印出来。好的模板应该是不对称,而且没有很细微的细节的模板。图 2展示了一下样本模板。将做好的新模板粘在黑方块里 


        【ARToolkit】关于如何制作标识卡patt_第1张图片           【ARToolkit】关于如何制作标识卡patt_第2张图片

第三步:也就是我们将要重点讲的一步。首先我们要为新的模板命名。我们会以上篇文章中的simpletest为例给大家讲解。

在上篇文章中讲到simpletest的例子中,我们需要把

char           *patt_name      = "Data/patt.hiro";代码改为char           *patt_name      = "Data/yourpatt.hiro";

其中,两行代码唯一的不同就是点patt和yourpatt,这就是识别出simpletest的标识卡和我们将要制作的标识卡,当然,你也可以把yourpatt换成别的英文名字。

其中制作标识卡的代码我将贴在下方

#if defined(__sgi)
char            *vconf = "-size=FULL";
#elif defined(__linux)
#  if defined(AR_INPUT_GSTREAMER)
char *vconf = "videotestsrc";
#  elif defined(AR_INPUT_V4L)
char            *vconf = "-width=640 -height=480";
#  elif defined(AR_INPUT_1394CAM)
char            *vconf = "-mode=640x480_YUV411";
#  elif defined(AR_INPUT_DV)
char            *vconf = "";
#  endif
#elif defined(_WIN32)
char *vconf = "Data\\WDM_camera_flipV.xml";
#elif defined(__APPLE__)
char *vconf = "-width=640 -height=480";
#else
char *vconf = "";
#endif


// Image acquisition.
static ARUint8 *gARTImage = NULL;
static ARUint8 *gARTsaveImage = NULL;


// Marker detection.
static int gARTThreshhold = 100;
static ARMarkerInfo* gTarget  = NULL;




// Drawing.
static ARParam gARTCparam;
static ARGL_CONTEXT_SETTINGS_REF gArglSettings = NULL;


void lineSeg(double x1, double y1, double x2, double y2, ARGL_CONTEXT_SETTINGS_REF contextSettings, ARParam cparam, double zoom)
{
int enable;
    float   ox, oy;
    double  xx1, yy1, xx2, yy2;

if (!contextSettings) return;
arglDistortionCompensationGet(contextSettings, &enable);
    if (arglDrawModeGet(contextSettings) == AR_DRAW_BY_TEXTURE_MAPPING && enable) {
        xx1 = x1;  yy1 = y1;
        xx2 = x2;  yy2 = y2;
    } else {
        arParamIdeal2Observ(cparam.dist_factor, x1, y1, &xx1, &yy1);
        arParamIdeal2Observ(cparam.dist_factor, x2, y2, &xx2, &yy2);
    }

    xx1 *= zoom; yy1 *= zoom;
    xx2 *= zoom; yy2 *= zoom;

ox = 0;
oy = cparam.ysize - 1;
glBegin(GL_LINES);
glVertex2f(ox + xx1, oy - yy1);
glVertex2f(ox + xx2, oy - yy2);
glEnd();
    glFlush();
}


static int setupCamera(ARParam *cparam)
{
    ARParam  wparam;
    char     name1[256], name2[256];
int xsize, ysize;


    printf("Enter camera parameter filename");
    printf("(Data/camera_para.dat): ");
    if (fgets(name1, 256, stdin) == NULL) exit(0);
    if (sscanf(name1, "%s", name2) != 1) {
        strcpy(name2, "Data/camera_para.dat");
    }


// Load the camera parameters.
if (arParamLoad(name2, 1, &wparam) < 0 ) {
        printf("Parameter load error !!\n");
        return (FALSE);
    }

    // Open the video path.
    if (arVideoOpen(vconf) < 0) {
    fprintf(stderr, "setupCamera(): Unable to open connection to camera.\n");
    return (FALSE);
}

    // Find the size of the window.
    if (arVideoInqSize(&xsize, &ysize) < 0) return (FALSE);
    fprintf(stdout, "Camera image size (x,y) = (%d,%d)\n", xsize, ysize);

// Resize for the window and init.
    arParamChangeSize(&wparam, xsize, ysize, cparam);
    fprintf(stdout, "*** Camera Parameter ***\n");
    arParamDisp(cparam);

    arInitCparam(cparam);

if (arVideoCapStart() != 0) {
    fprintf(stderr, "setupCamera(): Unable to begin camera data capture.\n");
return (FALSE);
}

return (TRUE);
}


static void Quit(void)
{
free(gARTsaveImage); gARTsaveImage = NULL;
arglCleanup(gArglSettings);
arVideoCapStop();
arVideoClose();
exit(0);
}


static void Keyboard(unsigned char key, int x, int y)
{
switch (key) {
case 0x1B: // Quit.
case 'Q':
case 'q':
Quit();
break;
case 'T':
case 't':
printf("Enter new threshold value (default = 100): ");
scanf("%d", &gARTThreshhold); while (getchar()!='\n');
printf("\n");
break;
case '?':
case '/':
printf("Keys:\n");
printf(" q or [esc]    Quit demo.\n");
printf(" t             Enter new binarization threshold value.\n");
printf(" ? or /        Show this help.\n");
printf("\nAdditionally, the ARVideo library supplied the following help text:\n");
arVideoDispOption();
break;
default:
break;
    }
}


static void Mouse(int button, int state, int x, int y)
{
    char   name1[256], name2[256];

if (state == GLUT_DOWN) {
if (button == GLUT_RIGHT_BUTTON) {
Quit();
} else if (button == GLUT_MIDDLE_BUTTON) {
printf("Enter new threshold value (default = 100): ");
scanf("%d", &gARTThreshhold); while (getchar() != '\n');
printf("\n");
} else if (button == GLUT_LEFT_BUTTON && gARTsaveImage && gTarget) {
printf("Enter filename: ");
if (fgets(name1, 256, stdin) == NULL) return;
if (sscanf(name1, "%s", name2) != 1 ) return;
if (arSavePatt(gARTsaveImage, gTarget, name2) < 0) {
printf("ERROR!!\n");
} else {
printf("  Saved\n");
}
}
}
}


static void Idle(void)
{
static int ms_prev;
int ms;
float s_elapsed;
ARUint8 *image;
    int             areamax;
ARMarkerInfo    *marker_info; // Pointer to array holding the details of detected markers.
    int             marker_num; // Count of number of markers detected.
    int             i;

// Find out how long since Idle() last ran.
ms = glutGet(GLUT_ELAPSED_TIME);
s_elapsed = (float)(ms - ms_prev) * 0.001;
if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
ms_prev = ms;

// Grab a video frame.
if ((image = arVideoGetImage()) != NULL) {
gARTImage = image;

if (arDetectMarker(gARTImage, gARTThreshhold, &marker_info, &marker_num) < 0) {
Quit();
}

areamax = 0;
gTarget = NULL;
for (i = 0; i < marker_num; i++) {
if (marker_info[i].area > areamax) {
areamax = marker_info[i].area;
gTarget = &(marker_info[i]);
}
}
memcpy(gARTsaveImage, gARTImage,  gARTCparam.xsize * gARTCparam.ysize * AR_PIX_SIZE_DEFAULT);

// Tell GLUT the display has changed.
glutPostRedisplay();
}
}


//
// This function is called on events when the visibility of the
// GLUT window changes (including when it first becomes visible).
//
static void Visibility(int visible)
{
if (visible == GLUT_VISIBLE) {
glutIdleFunc(Idle);
} else {
glutIdleFunc(NULL);
}
}


//
// This function is called when the
// GLUT window is resized.
//
static void Reshape(int w, int h)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, (GLsizei) w, (GLsizei) h);

glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();

// Call through to anyone else who needs to know about window sizing here.
}


static void beginOrtho2D(int xsize, int ysize) {
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
gluOrtho2D(0.0, xsize, 0.0, ysize);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
}


static void endOrtho2D(void) {
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
}


//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
// Select correct buffer for this context.
glDrawBuffer(GL_BACK);
glClear(GL_COLOR_BUFFER_BIT); // Clear the buffers for new frame.

arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings); // zoom = 1.0.
arVideoCapNext();
gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().


if (gTarget != NULL) {
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glDisable(GL_TEXTURE_2D);
beginOrtho2D(gARTCparam.xsize, gARTCparam.ysize);
        glLineWidth(2.0f);
        glColor3d(0.0, 1.0, 0.0);
        lineSeg(gTarget->vertex[0][0], gTarget->vertex[0][1],
gTarget->vertex[1][0], gTarget->vertex[1][1], gArglSettings, gARTCparam, 1.0);
        lineSeg(gTarget->vertex[3][0], gTarget->vertex[3][1],
gTarget->vertex[0][0], gTarget->vertex[0][1], gArglSettings, gARTCparam, 1.0);
        glColor3d(1.0, 0.0, 0.0);
        lineSeg(gTarget->vertex[1][0], gTarget->vertex[1][1],
gTarget->vertex[2][0], gTarget->vertex[2][1], gArglSettings, gARTCparam, 1.0);
        lineSeg(gTarget->vertex[2][0], gTarget->vertex[2][1],
gTarget->vertex[3][0], gTarget->vertex[3][1], gArglSettings, gARTCparam, 1.0);
endOrtho2D();
    }


glutSwapBuffers();
}


int main(int argc, char *argv[])
{
// ----------------------------------------------------------------------------
// Library inits.
//

glutInit(&argc, argv);

// ----------------------------------------------------------------------------
// Hardware setup.
//

if (!setupCamera(&gARTCparam)) {
fprintf(stderr, "main(): Unable to set up AR camera.\n");
exit(-1);
}

// ----------------------------------------------------------------------------
// Library setup.
//

// Set up GL context(s) for OpenGL to draw into.
    glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE);
glutInitWindowSize(gARTCparam.xsize, gARTCparam.ysize);
glutCreateWindow(argv[0]);

// Setup argl library for current context.
if ((gArglSettings = arglSetupForCurrentContext()) == NULL) {
fprintf(stderr, "main(): arglSetupForCurrentContext() returned error.\n");
exit(-1);
}

arMalloc(gARTsaveImage, ARUint8, gARTCparam.xsize * gARTCparam.ysize * AR_PIX_SIZE_DEFAULT);

// Register GLUT event-handling callbacks.
// NB: Idle() is registered by Visibility.
glutDisplayFunc(Display);
glutReshapeFunc(Reshape);
glutVisibilityFunc(Visibility);
glutKeyboardFunc(Keyboard);
    glutMouseFunc(Mouse);

glutMainLoop();

return (0);
}

运行程序后,系 统会提示你输入一个摄像机的参数文件夹名字,要输入:Data/camera_para.dat。

然后出现的虚拟图像是这样的【ARToolkit】关于如何制作标识卡patt_第3张图片,其中的标识卡内容则是自己制作的新的标识。


如 图 中所示。一旦图像被找到且方位正确,单击鼠标左键。接着系统会提示你输入一个模 板的文件名字。比如说,输入 patt.yourpatt。 一旦文件名字被输入,系统就生成了一个该模板的位图图像,位图图像被复制到以这 个文件名命名的文件中。接下来这个将被用在 ARToolKit 的模板匹配中。为了使用这个新 模板,这些数据要被拷贝到文件目录 bin/Data 下。重新编译 simpletest 后,现在,你就可 以使用你自己的模板了。其中Data,wrl,lib文件夹都是要与第一个ARToolkit测试程序一样,都要加入到.c文件目录


上述的代码中,只要把画出虚拟图形的代码改成自己想要出现的虚拟图像的代码就可以完成各种虚拟图像与标识卡的制作。

虚拟图像可以制作出各种各样的图像,标识卡也有多种多样


















你可能感兴趣的:(AR)