EasyAR

AR

EasyAR iOS的SDK功能:

匹配场景图片,在匹配成功的图片上加载图像,3D模型,视频;

EasyAR iOS的SDK每个类的功能:

http://m.blog.csdn.net/article/details?id=49615303

类名 描述
base.hpp 基类,是大多数EasyAR类的基类。
augmenter.hpp Augmenter是一个渲染器,它从tracker获取frame,然后将camera的图像作为AR场景的背景渲染出来。
barcode.hpp BarCodeScanner实现了二维码扫描与识别功能。
camera.hpp CameraDevice实现了一个camera设备。CameraCalibration存储了camera的标定数据
frame.hpp Frame用来存储跟踪到的数据。这个数据包括当前camera的图像,跟踪到的target和其它一些信息。
image.hpp Image存储了图像数据,用来表示内存中的图像。Image以byte数组的方式提供了对原始数据的访问,同时也提供了访问width/height/stride等信息的接口。
imagetarget.hpp ImageTarget表示平面图像的target,它可以被ImageTracker所跟踪。
imagetracker.hpp ImageTracker实现了target的检测和跟踪。
matrix.hpp Matrix表示m x n的矩阵。
player.hpp VideoPlayer是视频播放类。EasyAR支持普通的视频、透明视频和流媒体播放。视频内容会被渲染到传入SetRenderTexture的texture上。
storage.hpp StorageType表示图像、json文件、视频或其它文件的存放位置。StorageType指定了文件存放的根目录,你可以在所有相关接口中使用相对于这个根目录的相对路径。
target.hpp Target是EasyAR里面所有可以被ImageTracker或其它算法跟踪的目标的基类。target的所有属性要在加载之后才会有效。
utility.hpp 自我理解:sdk初始化类
iOS EasyAR大概步骤:

配置好环境:

  1. 注册sdk;
  2. 初始化openGL绘制上下文;
  3. 初始化渲染器Augmenter;
  4. 初始化CameraDevice相机,渲染器关联相机;
  5. 加载带匹配的图片;
  6. 打开相机,渲染相机图片到openGL,开始匹配;
  7. 匹配跟踪成功。
iOS EasyAR实现步骤:
//
//  CameraView.m
//  EasyAR
//
//  Created by lv on 2016/12/27.
//  Copyright © 2016年 Albert. All rights reserved.
//

#import "CameraView.h"
#include "easyar/camera.hpp"
#import 
#import 
#import 
#import 
#import 
#import 
#import 
#import 
#include 
#import 
#import "renderer.hpp"
#import "SDWebImageManager.h"
@interface CameraView ()
{
    EasyAR::CameraDevice _cameraDevice;    //相机
    EasyAR::ImageTarget _tar;
    EasyAR::Augmenter _augmenter;//渲染器
    EasyAR::Vec2I view_size;
    EasyAR::Vec4I _viewport;
    EasyAR::samples::Renderer  _renderer;
    EasyAR::ImageTracker _tracker;
    CALayer *_imageLayer;
    BOOL _portrait;//设备方向
    
    
}
@property(nonnull,retain)CADisplayLink * displayLink;//定时器。
@property(nonatomic, strong) CAEAGLLayer * eaglLayer;
@property(nonatomic, strong) EAGLContext *context;//上下文
@property(nonatomic) GLuint colorRenderBuffer;
@end

@implementation CameraView

+ (Class)layerClass
{
    return [CAEAGLLayer class];
}
- (instancetype)initWithFrame:(CGRect)frame
{
    self = [super initWithFrame:frame];
    if (self) {
        
        [self initAll];
    }
    return self;
}
- (void)initAll {
    
    _portrait = NO;
    _augmenter = EasyAR::Augmenter();//创建渲染器
    [self setupGL];
    _renderer.init();
    [self initCamera];

}

- (void)resize:(CGRect)frame orientation:(UIInterfaceOrientation)orientation
{
    BOOL isPortrait = NO;
    switch (orientation)
    {
        case UIInterfaceOrientationPortrait:
        case UIInterfaceOrientationPortraitUpsideDown:
            isPortrait = YES;
            break;
        case UIInterfaceOrientationLandscapeLeft:
        case UIInterfaceOrientationLandscapeRight:
            isPortrait = NO;
            break;
        default:
            break;
    }
    [self setPortrait:isPortrait];
    [self resizeGL:frame.size.width height:frame.size.height];
}

- (void)setOrientation:(UIInterfaceOrientation)orientation
{
    switch (orientation)
    {
        case UIInterfaceOrientationPortrait:
            EasyAR::setRotationIOS(270);
            break;
        case UIInterfaceOrientationPortraitUpsideDown:
            EasyAR::setRotationIOS(90);
            break;
        case UIInterfaceOrientationLandscapeLeft:
            EasyAR::setRotationIOS(180);
            break;
        case UIInterfaceOrientationLandscapeRight:
            EasyAR::setRotationIOS(0);
            break;
        default:
            break;
    }
}

-(void)setPortrait:(BOOL)portrait
{
    _portrait = portrait;
}

-(void)resizeGL:(float )width height:(float)height
{
    EasyAR::Vec2I size = EasyAR::Vec2I(1, 1);
    if(_cameraDevice.isOpened())
        size = _cameraDevice.size();
    if (size[0] == 0 || size[1] == 0)
        return;
    if(_portrait)
        std::swap(size[0], size[1]);
    float scaleRatio = std::max((float)width / (float)size[0], (float)height / (float)size[1]);
    EasyAR::Vec2I viewport_size = EasyAR::Vec2I((int)(size[0] * scaleRatio), (int)(size[1] * scaleRatio));
    _viewport = EasyAR::Vec4I(0, height - viewport_size[1], viewport_size[0], viewport_size[1]);
}

//开始扫描
-(void)start
{

    BOOL isOpen = [self cameraStart];
    
    NSLog(@"打开相机 = %zd",isOpen);
}

//扫描中
- (void)displayLinkCallback:(CADisplayLink*)displayLink
{
    
    [self render];

    glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
    [_context presentRenderbuffer:GL_RENDERBUFFER];
}

//openGL绘制层
- (void)setupGL
{
    _eaglLayer = (CAEAGLLayer*) self.layer;
    _eaglLayer.opaque = YES;
    
    EAGLRenderingAPI api = kEAGLRenderingAPIOpenGLES2;
    _context = [[EAGLContext alloc] initWithAPI:api];
    if (!_context)
        NSLog(@"Failed to initialize OpenGLES 2.0 context");
    if (![EAGLContext setCurrentContext:_context])
        NSLog(@"Failed to set current OpenGL context");
    
    GLuint frameBuffer;
    glGenFramebuffers(1, &frameBuffer);
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
    
    glGenRenderbuffers(1, &_colorRenderBuffer);
    glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
    [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:_eaglLayer];
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorRenderBuffer);
    
    int width, height;
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &width);
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &height);
    
    GLuint depthRenderBuffer;
    glGenRenderbuffers(1, &depthRenderBuffer);
    glBindRenderbuffer(GL_RENDERBUFFER, depthRenderBuffer);
    glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height);
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderBuffer);
}

//核心部分:从augmenter中取出相机图像绘制到屏幕中,并检测匹配跟踪
-(void)render
{
    glClearColor(0.f, 0.f, 0.f, 1.f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    EasyAR::Frame frame = _augmenter.newFrame();
    if(view_size[0] > 0){
        int width = view_size[0];
        int height = view_size[1];
        EasyAR::Vec2I size = EasyAR::Vec2I(1, 1);
        if (_cameraDevice && _cameraDevice.isOpened())
            size = _cameraDevice.size();
        if(_portrait)
            std::swap(size[0], size[1]);
        float scaleRatio = std::max((float)width / (float)size[0], (float)height / (float)size[1]);
        EasyAR::Vec2I viewport_size = EasyAR::Vec2I((int)(size[0] * scaleRatio), (int)(size[1] * scaleRatio));
        if(_portrait){
         
            _viewport = EasyAR::Vec4I(0, height - viewport_size[1], viewport_size[0], viewport_size[1]);
        }
        else
        {
            NSLog(@"_portrait==>%dx==>%dy==>%dz==>%d",_portrait,(width - height),viewport_size[0],viewport_size[1]);
            _viewport = EasyAR::Vec4I(0, width - height, viewport_size[0], viewport_size[1]);
        }
        if(_cameraDevice && _cameraDevice.isOpened())
            view_size[0] = -1;
    }
    _augmenter.setViewPort(_viewport);
    _augmenter.drawVideoBackground();
    
    glViewport(_viewport[0], _viewport[1], _viewport[2], _viewport[3]);//调用glViewPort函数来决定视见区域,告诉OpenGL应把渲染之后的图形绘制在窗体的哪个部位
 
    for (int i = 0; i < frame.targets().size(); ++i) {
        EasyAR::AugmentedTarget::Status status = frame.targets()[i].status();
        
        //图像匹配成功
        if(status == EasyAR::AugmentedTarget::kTargetStatusTracked) {
            
            //加载3D模型
//            EasyAR::Matrix44F projectionMatrix = EasyAR:: getProjectionGL(_cameraDevice.cameraCalibration(), 0.2f, 500.f);
//            EasyAR::Matrix44F cameraview = getPoseGL(frame.targets()[i].pose());
//            EasyAR::ImageTarget target = frame.targets()[i].target().cast_dynamic();
//            _renderer.render(projectionMatrix, cameraview, target.size());
            
            
//            NSLog(@"status===%zd",status);
//            if (![self.layer.sublayers  containsObject:_imageLayer])
//            {
//                _imageLayer = (CALayer *)[self initBonus:[UIImage imageNamed:@"Bonus"]];
//                [_imageLayer removeFromSuperlayer];
//                [self.layer addSublayer:_imageLayer];
//                [self bonusAnimation];
//            }

            
            _matchBlock(status);
        }
     
    }
}

- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event{
    CGPoint point = [[touches anyObject] locationInView:self];
    CGPoint imgaelayerPiont = [_imageLayer convertPoint:point fromLayer:self.layer];
    if ([self.layer containsPoint:imgaelayerPiont]) {
        
        [_imageLayer removeFromSuperlayer];
        
    }

}

-(id)initBonus:(UIImage *)image
{
    CALayer * imageLayer = [CALayer layer];
    imageLayer.frame = CGRectMake((SIZE.width-128)/2, (SIZE.height-192)/2, 128, 192);
    imageLayer.cornerRadius = 10.0;
    imageLayer.masksToBounds = YES;
    imageLayer.contents = (id)image.CGImage;
    return (id)imageLayer;
    
}

-(void)bonusAnimation
{
    CAKeyframeAnimation *theAnimation = [CAKeyframeAnimation animation];
    
    //    CATransform3DMakeRotation(CGFloat angle, CGFloat x, CGFloat y, CGFloat z); 第一个参数是旋转角度,后面三个参数形成一个围绕其旋转的向量,起点位置由UIView的center属性标识。
    theAnimation.values = [NSArray arrayWithObjects:
                           [NSValue valueWithCATransform3D:CATransform3DMakeRotation(0, 0, 0.5, 0)],
                           [NSValue valueWithCATransform3D:CATransform3DMakeRotation(3.13, 0, 0.5, 0)],
                           [NSValue valueWithCATransform3D:CATransform3DMakeRotation(6.26, 0, 0.5, 0)],
                           nil];
    theAnimation.cumulative = YES;
    //    每个帧的时间=总duration/(values.count - 1)
    // 间隔时间 频率
    theAnimation.duration = 1;
    // 重复次数
    theAnimation.repeatCount = 0;
    // 取消反弹// 告诉在动画结束的时候不要移除
    theAnimation.removedOnCompletion = NO;
    // 始终保持最新的效果
    theAnimation.fillMode = kCAFillModeForwards;
    theAnimation.delegate = self;
    _imageLayer.zPosition = 50;
    [_imageLayer addAnimation:theAnimation forKey:@"transform"];
    

}

#pragma mark 初始化相机
-(BOOL)initCamera
{
    BOOL status = YES;
    status = _cameraDevice.open();
    _cameraDevice.setSize( EasyAR::Vec2I(1280, 720));
    _cameraDevice.setFocusMode(EasyAR::CameraDevice::kFocusModeContinousauto);
    status = _augmenter.attachCamera(_cameraDevice);
    status = _tracker.attachCamera(_cameraDevice);
//    [self loadImage:@"namecard.jpg"];
    
    NSString *savePath = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/test.jpg"];
//    NSString *savePath = @"$(SRCROOT)/Documents/test.jpg";
    NSLog(@"savePath= %@",savePath);
//    [self downLoadImg:strURL savePath:savePath imgDownFinish:^(id image) {
    [self loadImage:savePath];
//    }];
    

//    [self loadImagePath];

//    [self loadImagePath];
    [self loadImage:savePath];

    

    
    
    return status;
}

-(BOOL)cameraStart
{
    BOOL status = YES;
    status = _cameraDevice.start();
    status = _tracker.start();
    self.displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
    [self.displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
    
    return status;
}

#pragma mark 加载图片
-(void)loadImage:(NSString *)path
{
    NSArray *arrName = [path componentsSeparatedByString:@"."];
    
    NSString *str=[NSString stringWithFormat:@"{\"images\":[{\"image\":\"%@\",\"name\":\"%@\"}]}",path,@"test"];
    NSLog(@"strJson=%@",str);
    BOOL readImage = _tar.load([str UTF8String], EasyAR::kStorageAbsolute | EasyAR::kStorageJson| EasyAR::kStorageApp);
    
    _tracker.loadTarget(_tar, nil);
    NSLog(@"读取图片 = %zd",readImage);
}

-(void)loadImagePath
{
    NSString *jstr = [NSString stringWithFormat:@"{\"images\":[{\"image\":\"%@\",\"name\":\"%@\"}]}",@"namecard.jpg",@"namecard"];
    NSLog(@"jstr==>%@",jstr);
    BOOL readImage = _tar.load([jstr UTF8String],  EasyAR::kStorageAssets | EasyAR::kStorageJson);
     NSLog(@"读取图片 = %zd",readImage);
    _tracker.loadTarget(_tar,0);
    
}

-(void)downLoadImg:(NSString *)strURL savePath:(NSString *)savePath imgDownFinish:(imgDownFinish)finish
{
    [[SDWebImageManager sharedManager] downloadImageWithURL:[NSURL URLWithString:strURL] options:0
                                                   progress:^(NSInteger receivedSize, NSInteger expectedSize)
     {
         //处理下载进度
     } completed:^(UIImage *image, NSError *error, SDImageCacheType cacheType, BOOL finished, NSURL *imageURL) {
         
         if (error)
         {
             NSLog(@"error is %@",error);
             
           
             if (finish) {
                 finish(nil);
             }
             
         }
         if (image)
         {
             
             NSArray *arrType = [strURL componentsSeparatedByString:@"."];
             NSData *data = nil;
             
             if ([[arrType lastObject] isEqualToString:@"png"]||[[arrType lastObject] isEqualToString:@"PNG"])
             {
                 data = UIImageJPEGRepresentation(image,1);
             }
             
             if ([[arrType lastObject] isEqualToString:@"jpg"]||[[arrType lastObject] isEqualToString:@"JPG"])
             {
                 data = UIImagePNGRepresentation(image);
             }
             
             
             if (data) {
                 if([data writeToFile:savePath atomically:YES])
                 {
                     NSLog(@"保存成功");
                 }
                 
                 if (finish) {
                     finish(image);
                 }
             }
             else
             {
                 if (finish) {
                     finish(nil);
                 }
             }
         }
     
     }];
}

-(void)dealloc
{

}

-(void)clear
{
    _tracker.stop();
    _cameraDevice.stop();
    _cameraDevice.close();
    _cameraDevice.clear();
    _augmenter.clear();
}

@end

你可能感兴趣的:(EasyAR)