Instantly share code, notes, and snippets.

@num3ric num3ric/CinderARKit.cpp Secret
Last active Jan 14, 2019

Embed
What would you like to do?
Minimal arkit cinder wrapper
#include "CinderARKit.h"
#include "cinder/Log.h"
#include "cinder/app/AppBase.h"
#include "cinder/app/RendererGl.h"
#import <Foundation/Foundation.h>
#import <ARKit/ARKit.h>
using namespace ci;
using namespace ARKit;
static const NSUUID* toUUIDFromUidString( const std::string& uid )
{
NSString* uidString = [NSString stringWithUTF8String:uid.c_str()];
return [[NSUUID alloc] initWithUUIDString:uidString];
}
static const std::string getUidStringFromUUID( NSUUID* uid )
{
NSString* uidString = uid.UUIDString;
return std::string([uidString UTF8String], [uidString lengthOfBytesUsingEncoding:NSUTF8StringEncoding]);
}
static const std::string getUidStringFromAnchor( ARAnchor* anchor )
{
return getUidStringFromUUID( anchor.identifier );
}
template <typename T, typename U >
const U static inline convert( const T& t ) {
U tmp;
memcpy(&tmp, &t, sizeof(U));
return tmp;
}
const glm::mat4 static inline toMat4( const matrix_float4x4& mat ) { return convert<matrix_float4x4, glm::mat4>(mat); }
const glm::vec4 static inline toVec4( const vector_float4& vec ) { return convert<vector_float4, glm::vec4>(vec); }
const glm::vec3 static inline toVec3( const vector_float3& vec ) { return convert<vector_float3, glm::vec3>(vec); }
const glm::vec2 static inline toVec2( const vector_float2& vec ) { return convert<vector_float2, glm::vec2>(vec); }
namespace ARKit {
struct CVTextures {
CVOpenGLESTextureRef lumaTexture;
CVOpenGLESTextureRef chromaTexture;
CVOpenGLESTextureCacheRef videoTextureCache;
CVTextures() {
CVReturn err = 0;
auto* renderer = dynamic_cast<ci::app::RendererGl*>( ci::app::AppBase::get()->getRenderer().get() );
if( renderer ) {
EAGLContext* context = renderer->getEaglContext();
err = ::CVOpenGLESTextureCacheCreate( kCFAllocatorDefault, NULL, context, NULL, &videoTextureCache );
if( err ) {
NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
return;
}
}
else {
throw std::exception();
}
}
~CVTextures() {
if( lumaTexture ) {
::CFRelease(lumaTexture);
lumaTexture = NULL;
}
if( chromaTexture ) {
::CFRelease(chromaTexture);
chromaTexture = NULL;
}
if( videoTextureCache ) {
::CVOpenGLESTextureCacheFlush( videoTextureCache, 0 );
::CFRelease( videoTextureCache );
videoTextureCache = NULL;
}
}
};
void Camera::setViewMatrix( const ci::mat4& m )
{
mArViewMatrix = m;
mModelViewCached = false;
}
void Camera::setProjectionMatrix( const ci::mat4& m )
{
mArProjectionMatrix = m;
mProjectionCached = false;
}
void Camera::calcViewMatrix() const
{
mViewMatrix = mArViewMatrix;
mModelViewCached = true;
}
void Camera::calcInverseView() const
{
if( ! mModelViewCached ) {
calcViewMatrix();
}
mInverseModelViewMatrix = ci::inverse( mViewMatrix );
mInverseModelViewCached = true;
}
void Camera::calcProjection() const
{
mProjectionMatrix = mArProjectionMatrix;
mProjectionCached = true;
}
}
ARConfiguration * Session::Configuration::get() const
{
ARConfiguration * config = nil;
if( mTrackingType == ARKit::TrackingType::OrientationTracking ) {
config = [AROrientationTrackingConfiguration new];
}
else if( mTrackingType == ARKit::TrackingType::FaceTracking ) {
config = [ARFaceTrackingConfiguration new];
}
else if( mTrackingType == ARKit::TrackingType::WorldTracking ) {
ARWorldTrackingConfiguration* wtc = [ARWorldTrackingConfiguration new];
switch( mPlaneDetection ) {
case (ARKit::PlaneDetection::None): wtc.planeDetection = ARPlaneDetectionNone; break;
case (ARKit::PlaneDetection::Horizontal): wtc.planeDetection = ARPlaneDetectionHorizontal; break;
case (ARKit::PlaneDetection::Vertical): wtc.planeDetection = ARPlaneDetectionVertical; break;
}
if( mImageTrackingEnabled )
wtc.detectionImages = [ARReferenceImage referenceImagesInGroupNamed:@"AR Resources" bundle: nil];
config = wtc;
}
else {
CI_LOG_E( "Not a valid configuration." );
}
return config;
}
Session::Session( const Session::Configuration& config )
{
mARSession = [ARSession new];
[mARSession runWithConfiguration:config.get()];
mYCbCrToRGBProg = createCameraRGBProg();
mYCbCrToRGBProg->uniform( "SamplerY", 0 );
mYCbCrToRGBProg->uniform( "SamplerUV", 1 );
mDebugFaceGlslProg = createDebugFaceGlslProg();
}
Session::~Session()
{
[mARSession release];
}
void Session::pause()
{
[mARSession pause];
}
float Session::getAmbientLightIntensity( ARFrame * frame ) const
{
return frame.lightEstimate.ambientIntensity;
}
float Session::getAmbientColorTemperature( ARFrame * frame ) const
{
return frame.lightEstimate.ambientColorTemperature;
}
ARFrame * Session::getCurrentFrame() const
{
return mARSession.currentFrame;
}
const AnchorID Session::addAnchorRelativeToWorld( glm::vec3 position )
{
matrix_float4x4 translation = matrix_identity_float4x4;
translation.columns[3].x = position.x;
translation.columns[3].z = position.y;
translation.columns[3].z = position.z;
ARAnchor *anchor = [[ARAnchor alloc] initWithTransform:translation];
NSUUID* anchorID = anchor.identifier;
[mARSession addAnchor:anchor];
return getUidStringFromUUID( anchorID );
}
const AnchorID Session::addAnchorRelativeToCamera( ARFrame * frame, glm::vec3 offset )
{
matrix_float4x4 translation = matrix_identity_float4x4;
translation.columns[3].x = offset.x;
translation.columns[3].z = offset.y;
translation.columns[3].z = offset.z;
matrix_float4x4 transform = matrix_multiply(frame.camera.transform, translation);
ARAnchor *anchor = [[ARAnchor alloc] initWithTransform:transform];
NSUUID* anchorID = anchor.identifier;
[mARSession addAnchor:anchor];
return getUidStringFromUUID( anchorID );
}
std::vector<glm::vec3> Session::getRawFeaturePoints( ARFrame * frame ) const
{
std::vector<glm::vec3> points;
for( int i=0; i < frame.rawFeaturePoints.count; ++i ) {
points.push_back( toVec3( frame.rawFeaturePoints.points[i] ) );
}
return points;
}
std::vector<Anchor> Session::getCustomAnchors( ARFrame * frame ) const
{
std::vector<Anchor> anchors;
for( ARAnchor* anchor in [frame anchors] ) {
if( [anchor isKindOfClass:[ARAnchor class]] ) {
anchors.emplace_back( ARKit::Anchor{ getUidStringFromAnchor( anchor ), toMat4( anchor.transform ) } );
}
}
return anchors;
}
std::vector<PlaneAnchor> Session::getPlaneAnchors( ARFrame * frame ) const
{
std::vector<PlaneAnchor> planeAnchors;
for( ARAnchor* anchor in [frame anchors] ) {
if( [anchor isKindOfClass:[ARPlaneAnchor class]] ) {
ARPlaneAnchor* pa = (ARPlaneAnchor*)anchor;
planeAnchors.emplace_back( ARKit::PlaneAnchor{ getUidStringFromAnchor( pa ), toMat4( pa.transform ), toVec3( pa.center ), toVec3( pa.extent ) } );
}
}
return planeAnchors;
}
std::vector<ImageAnchor> Session::getImageAnchors( ARFrame * frame ) const
{
std::vector<ImageAnchor> imageAnchors;
for( ARAnchor* anchor in [frame anchors] ) {
if( [anchor isKindOfClass:[ARImageAnchor class]] ) {
ARImageAnchor* ia = (ARImageAnchor*)anchor;
CGSize physSize = [[ia referenceImage] physicalSize];
const auto uid = getUidStringFromAnchor( ia );
imageAnchors.emplace_back( ImageAnchor{ uid, toMat4( ia.transform ), glm::vec2(physSize.width, physSize.height), [[[ia referenceImage] name] UTF8String] } );
}
}
return imageAnchors;
}
FaceAnchor Session::getFaceAnchor( ARFrame * frame ) const
{
FaceAnchor faceAnchor;
for( ARAnchor* anchor in [frame anchors] ) {
if( [anchor isKindOfClass:[ARFaceAnchor class]] ) {
ARFaceAnchor* fa = (ARFaceAnchor*)anchor;
faceAnchor.mUid = getUidStringFromAnchor( fa );
faceAnchor.mIsTracked = fa.isTracked;
for( NSUInteger i = 0; i < fa.geometry.triangleCount * 3; i += 3 ) {
int16_t i0 = fa.geometry.triangleIndices[i];
int16_t i1 = fa.geometry.triangleIndices[i+1];
int16_t i2 = fa.geometry.triangleIndices[i+2];
faceAnchor.mTriMesh->appendTriangle((uint32_t)i0, (uint32_t)i1, (uint32_t)i2);
}
faceAnchor.mTransform = toMat4( fa.transform );
for( id key in fa.blendShapes ) {
faceAnchor.mBlendShapes[ std::string([key UTF8String]) ] = [[fa.blendShapes objectForKey:key] floatValue];
}
faceAnchor.mTriMesh->getBufferPositions().clear();
for( NSUInteger i = 0; i < fa.geometry.vertexCount; i++ ) {
auto vertex = fa.geometry.vertices[i];
faceAnchor.mTriMesh->appendPosition( vec3( vertex.x, vertex.y, vertex.z ) );
}
faceAnchor.mTriMesh->recalculateNormals();
break;
}
}
return faceAnchor;
}
std::vector<glm::mat4> Session::hitTest( ARFrame * frame, const glm::vec2& point, HitTestType type ) const
{
std::vector<glm::mat4> result;
NSArray<ARHitTestResult*>* hits = [frame hitTest:CGPointMake( point.x, point.y ) types:(ARHitTestResultType)type];
for( ARHitTestResult* hit in hits ) {
result.push_back( toMat4( hit.worldTransform ) );
}
return result;
}
const mat4 Session::getViewMatrix( ARFrame * frame ) const
{
auto orientation = [[UIApplication sharedApplication] statusBarOrientation];
return toMat4( [frame.camera viewMatrixForOrientation:orientation] );
}
const mat4 Session::getProjectionMatrix( ARFrame * frame, float near, float far ) const
{
auto orientation = [[UIApplication sharedApplication] statusBarOrientation];
auto viewBounds = [[UIScreen mainScreen] bounds];
CGSize viewportSize = viewBounds.size;
return toMat4([frame.camera projectionMatrixForOrientation:orientation
viewportSize:viewportSize
zNear:near
zFar:far]);
}
ARKit::Camera Session::getCamera( ARFrame* frame, float near, float far ) const
{
ARKit::Camera camera;
camera.setViewMatrix( getViewMatrix( frame ) );
camera.setProjectionMatrix( getProjectionMatrix( frame ) );
return camera;
}
static ci::gl::Texture2dRef getTexture2d( CVOpenGLESTextureRef* glesTex, CVPixelBufferRef* pixelBuffer, GLsizei width, GLsizei height )
{
GLenum target = ::CVOpenGLESTextureGetTarget( *glesTex );
GLuint name = ::CVOpenGLESTextureGetName( *glesTex );
bool topDown = ::CVOpenGLESTextureIsFlipped( *glesTex );
auto texture = gl::Texture2d::create( target, name, width, height, true );
vec2 lowerLeft, lowerRight, upperRight, upperLeft;
::CVOpenGLESTextureGetCleanTexCoords( *glesTex, &lowerLeft.x, &lowerRight.x, &upperRight.x, &upperLeft.x );
if( target == GL_TEXTURE_2D ) {
texture->setCleanBounds( Area( (int32_t)(upperLeft.x * width), (int32_t)(upperLeft.y * height),
(int32_t)(lowerRight.x * width ), (int32_t)(lowerRight.y * height ) ) );
} else {
texture->setCleanBounds( Area( (int32_t)upperLeft.x, (int32_t)upperLeft.y, (int32_t)lowerRight.x, (int32_t)lowerRight.y ) );
}
texture->setWrap( GL_CLAMP_TO_EDGE, GL_CLAMP_TO_EDGE );
texture->setTopDown( topDown );
return texture;
}
bool Session::readCapture( ARFrame * frame, YUVTexture* texture )
{
if( frame == nil || CVPixelBufferGetPlaneCount( frame.capturedImage ) < 2 ) {
return false;
}
// Capture pixel YCbCr
if( frame.capturedImage != nil ) {
if( mCVTexturesImpl ) {
if( mCVTexturesImpl->lumaTexture ) {
CFRelease( mCVTexturesImpl->lumaTexture );
mCVTexturesImpl->lumaTexture = NULL;
}
if( mCVTexturesImpl->chromaTexture ) {
CFRelease( mCVTexturesImpl->chromaTexture );
mCVTexturesImpl->chromaTexture = NULL;
}
// Periodic texture cache flush every frame
CVOpenGLESTextureCacheFlush( mCVTexturesImpl->videoTextureCache, 0);
}
else {
mCVTexturesImpl = std::unique_ptr<CVTextures>( new ARKit::CVTextures );
}
CVPixelBufferRef pixelBuffer = frame.capturedImage;
GLsizei width = (GLsizei)::CVPixelBufferGetWidthOfPlane( pixelBuffer, 0 );
GLsizei height = (GLsizei)::CVPixelBufferGetHeightOfPlane( pixelBuffer, 0 );
mCameraSize = vec2( (float)::CVPixelBufferGetWidth( pixelBuffer ), (float)::CVPixelBufferGetHeight( pixelBuffer ) );
// Y-plane
{
gl::ScopedActiveTexture push{ 0 };
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
mCVTexturesImpl->videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_R8,
width,
height,
GL_RED,
GL_UNSIGNED_BYTE,
0,
&mCVTexturesImpl->lumaTexture);
if (err) {
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
texture->lumaTexture = getTexture2d( &mCVTexturesImpl->lumaTexture, &pixelBuffer, width, height );
}
// UV-plane
{
gl::ScopedActiveTexture push{ 1 };
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
mCVTexturesImpl->videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RG8,
width/2,
height/2,
GL_RG,
GL_UNSIGNED_BYTE,
1,
&mCVTexturesImpl->chromaTexture );
if (err) {
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
texture->chromaTexture = getTexture2d( &mCVTexturesImpl->chromaTexture, &pixelBuffer, width/2, height/2 );
}
}
return true;
}
bool Session::isInterfaceInPortraitOrientation() const
{
return UIInterfaceOrientationIsPortrait( [[UIApplication sharedApplication] statusBarOrientation] );
}
void Session::drawCapture( const YUVTexture& yuvTexture, Area area )
{
bool rotate = false;
auto cameraRect = Rectf( vec2( 0.0f ), mCameraSize );
gl::ScopedMatrices matScp;
if( isInterfaceInPortraitOrientation() ) {
cameraRect = Rectf( vec2( 0.0f ), vec2( mCameraSize.y, mCameraSize.x ));
rotate = true;
}
gl::ScopedGlslProg glslProg( mYCbCrToRGBProg );
mYCbCrToRGBProg->uniform( "u_Rotate", rotate );
gl::ScopedTextureBind tex0{ yuvTexture.lumaTexture, 0 };
gl::ScopedTextureBind tex1{ yuvTexture.chromaTexture, 1 };
gl::drawSolidRect( cameraRect.getCenteredFill( area, true ) );
}
gl::GlslProgRef Session::createCameraRGBProg()
{
return gl::GlslProg::create( gl::GlslProg::Format()
.vertex( CI_GLSL(100, precision mediump float;
uniform mat4 ciModelViewProjection;
uniform bool u_Rotate;
attribute vec4 ciPosition;
attribute vec2 ciTexCoord0;
varying vec2 v_TexCoord;
const float rotTheta = -3.1415926536 * 0.5;
const mat2 rot = mat2(cos(rotTheta), -sin(rotTheta), sin(rotTheta), cos(rotTheta));
void main()
{
v_TexCoord = ciTexCoord0;
if (u_Rotate)
{
v_TexCoord -= vec2( 0.5 );
v_TexCoord = rot * v_TexCoord;
v_TexCoord += vec2( 0.5 );
}
gl_Position = ciModelViewProjection * ciPosition;
}))
.fragment( CI_GLSL(100, precision mediump float;
uniform sampler2D SamplerY;
uniform sampler2D SamplerUV;
varying vec2 v_TexCoord;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
vec2 coord = v_TexCoord;
coord.y = 1.0 - coord.y;
yuv.x = texture2D(SamplerY, coord).r;
yuv.yz = texture2D(SamplerUV, coord).rg - vec2(0.5, 0.5);
// Using BT.709 which is the standard for HDTV
rgb = mat3( 1, 1, 1,
0, -.18732, 1.8556,
1.57481, -.46813, 0) * yuv;
gl_FragColor = vec4(rgb, 1);
})));
}
gl::GlslProgRef Session::createDebugFaceGlslProg()
{
gl::GlslProgRef glsl;
try {
glsl = gl::GlslProg::create( gl::GlslProg::Format()
.vertex( CI_GLSL(100, precision mediump float;
uniform mat4 ciModelViewProjection;
uniform mat3 ciNormalMatrix;
attribute vec4 ciPosition;
attribute vec3 ciNormal;
varying vec3 vNormal;
void main()
{
vNormal = ciNormalMatrix * ciNormal;
gl_Position = ciModelViewProjection * ciPosition;
}))
.fragment( CI_GLSL(100, precision mediump float;
varying vec3 vNormal;
void main()
{
vec3 n = normalize( vNormal );
vec3 rgb = vec3( 0.5 * dot( normalize( vec3(-1,0,1) ), n ) + 0.5 );
gl_FragColor = vec4( vec3(1,1,1) * rgb, 0.75 );
})));
}
catch( std::exception& exc ) {
CI_LOG_EXCEPTION( "", exc );
}
return glsl;
}
#pragma once
#include "cinder/gl/gl.h"
#include "cinder/Noncopyable.h"
#include "cinder/TriMesh.h"
#include "cinder/Camera.h"
#include <unordered_map>
//#import <ARKit/ARKit.h>
#if defined( __OBJC__ )
@class ARFrame;
@class ARSession;
@class ARConfiguration;
#else
class ARFrame;
class ARSession;
class ARConfiguration;
#endif
namespace ARKit {
enum class TrackingType
{
OrientationTracking,
WorldTracking,
FaceTracking,
//ImageTracking,
//ObjectScanning
};
enum class PlaneDetection
{
None,
Horizontal,
Vertical,
};
enum class HitTestType {
/** Result type from intersecting the nearest feature point. */
FeaturePoint = (1 << 0),
/** Result type from intersecting a horizontal plane estimate, determined for the current frame. */
EstimatedHorizontalPlane = (1 << 1),
/** Result type from intersecting a vertical plane estimate, determined for the current frame. */
EstimatedVerticalPlane = (1 << 2),
/** Result type from intersecting with an existing plane anchor. */
ExistingPlane = (1 << 3),
/** Result type from intersecting with an existing plane anchor, taking into account the plane’s extent. */
ExistingPlaneUsingExtent = (1 << 4),
/** Result type from intersecting with an existing plane anchor, taking into account the plane’s geometry. */
ExistingPlaneUsingGeometry = (1 << 5),
};
typedef std::string AnchorID;
/** An anchor point that will be tracked by ARKit*/
class Anchor
{
public:
Anchor() = default;
Anchor( std::string uid, glm::mat4 transform )
: mUid( uid ),
mTransform( transform ),
mIsTracked{ true }
{}
virtual ~Anchor() {}
AnchorID mUid;
glm::mat4 mTransform;
bool mIsTracked = false;
};
/** An anchor point that includes plane position and extents*/
class PlaneAnchor : public Anchor
{
public:
PlaneAnchor() = default;
PlaneAnchor( std::string uid, glm::mat4 transform, glm::vec3 center, glm::vec3 extent )
: Anchor{ uid, transform },
mCenter( center ),
mExtent( extent ) {}
virtual ~PlaneAnchor() {}
glm::vec3 mCenter;
glm::vec3 mExtent;
};
/** An anchor point from a tracked image*/
class ImageAnchor : public Anchor
{
public:
ImageAnchor() = default;
ImageAnchor( std::string uid, glm::mat4 transform, glm::vec2 physicalSize, std::string imageName )
: Anchor{ uid, transform },
mPhysicalSize( physicalSize ),
mImageName (imageName){}
virtual ~ImageAnchor() {}
glm::vec2 mPhysicalSize;
std::string mImageName;
};
/** An anchor point from a tracked image*/
class FaceAnchor : public Anchor
{
public:
FaceAnchor()
: Anchor{}
, mTriMesh{ ci::TriMesh::create() }
{ }
virtual ~FaceAnchor() {}
ci::TriMeshRef mTriMesh = nullptr;
std::unordered_map<std::string, float> mBlendShapes;
};
class Camera : public ci::CameraPersp {
public:
void setViewMatrix( const ci::mat4& m );
void setProjectionMatrix( const ci::mat4& m );
protected:
virtual void calcViewMatrix() const override;
virtual void calcInverseView() const override;
virtual void calcProjection() const override;
private:
ci::mat4 mArViewMatrix, mArProjectionMatrix;
};
struct CVTextures;
struct YUVTexture {
ci::gl::Texture2dRef lumaTexture;
ci::gl::Texture2dRef chromaTexture;
};
typedef std::shared_ptr<class Session> SessionRef;
class Session : public cinder::Noncopyable {
public:
class Configuration {
public:
Configuration() = default;
Configuration& trackingType( TrackingType newType ) { mTrackingType = newType; return *this; }
Configuration& planeDetection( PlaneDetection detectionType ) { mPlaneDetection = detectionType; return *this; }
Configuration& imageTrackingEnabled( bool enabled ) { mImageTrackingEnabled = enabled; return *this; }
ARConfiguration * get() const;
private:
TrackingType mTrackingType = TrackingType::WorldTracking;
PlaneDetection mPlaneDetection = PlaneDetection::None;
bool mImageTrackingEnabled = false;
};
Session() = default;
Session( const Configuration& config );
~Session();
void pause();
ARFrame * getCurrentFrame() const;
const AnchorID addAnchorRelativeToWorld( glm::vec3 position );
const AnchorID addAnchorRelativeToCamera( ARFrame * frame, glm::vec3 offset );
std::vector<glm::vec3> getRawFeaturePoints( ARFrame * frame ) const;
std::vector<Anchor> getCustomAnchors( ARFrame * frame ) const;
std::vector<PlaneAnchor> getPlaneAnchors( ARFrame * frame ) const;
std::vector<ImageAnchor> getImageAnchors( ARFrame * frame ) const;
FaceAnchor getFaceAnchor( ARFrame * frame ) const;
std::vector<glm::mat4> hitTest( ARFrame * frame, const glm::vec2& point, HitTestType type ) const;
bool isInterfaceInPortraitOrientation() const;
const glm::mat4 getViewMatrix( ARFrame * frame ) const;
const glm::mat4 getProjectionMatrix( ARFrame * frame, float near = 0.001f, float far = 1000.0f ) const;
ARKit::Camera getCamera( ARFrame * frame, float near = 0.001f, float far = 1000.0f ) const;
float getAmbientLightIntensity( ARFrame * frame ) const;
float getAmbientColorTemperature( ARFrame * frame ) const;
bool readCapture( ARFrame * frame, YUVTexture* texture );
void drawCapture( const YUVTexture& yuvTexture, ci::Area area );
ci::gl::GlslProgRef mDebugFaceGlslProg;
private:
/** Creates the shaders to draw RGB camera capture */
static ci::gl::GlslProgRef createCameraRGBProg();
static ci::gl::GlslProgRef createDebugFaceGlslProg();
ARSession* mARSession;
YUVTexture mYUVTexture;
std::unique_ptr<CVTextures> mCVTexturesImpl;
glm::vec2 mCameraSize;
// Shaders to draw the camera image from YCbCr to RGB
ci::gl::GlslProgRef mYCbCrToRGBProg;
};
} // namespace ARKit
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment