Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Apply to Structure SDK 0.10.1 Scanner sample for a fixed position placement cube
---
Scanner/ViewController+OpenGL.mm | 14 ++++++++++----
Scanner/ViewController+SLAM.mm | 28 ++++++++++++++++++++--------
Scanner/ViewController.h | 1 +
Scanner/ViewController.mm | 24 +++++++++++++++++++++++-
4 files changed, 54 insertions(+), 13 deletions(-)
diff --git a/Scanner/ViewController+OpenGL.mm b/Scanner/ViewController+OpenGL.mm
index 39cdca4..f854259 100644
--- a/Scanner/ViewController+OpenGL.mm
+++ b/Scanner/ViewController+OpenGL.mm
@@ -214,13 +214,19 @@ - (void)renderSceneForDepthFrame:(STDepthFrame*)depthFrame colorFrameOrNil:(STCo
}
// Highlighted depth values inside the current volume area.
- [_display.cubeRenderer renderHighlightedDepthWithCameraPose:cameraViewpoint alpha:alpha];
+// [_display.cubeRenderer renderHighlightedDepthWithCameraPose:cameraViewpoint alpha:alpha];
+ [_display.cubeRenderer renderHighlightedDepthWithCameraPose:_slamState.cubePose alpha:alpha];
// Render the wireframe cube corresponding to the current scanning volume.
- [_display.cubeRenderer renderCubeOutlineWithCameraPose:cameraViewpoint
- depthTestEnabled:false
- occlusionTestEnabled:true];
+// [_display.cubeRenderer renderCubeOutlineWithCameraPose:cameraViewpoint
+// depthTestEnabled:false
+// occlusionTestEnabled:true];
}
+ // Render the wireframe cube corresponding to the current scanning volume.
+ [_display.cubeRenderer renderCubeOutlineWithCameraPose:_slamState.cubePose
+ depthTestEnabled:false
+ occlusionTestEnabled:true];
+
break;
}
diff --git a/Scanner/ViewController+SLAM.mm b/Scanner/ViewController+SLAM.mm
index 88c8cdc..cdf57f6 100644
--- a/Scanner/ViewController+SLAM.mm
+++ b/Scanner/ViewController+SLAM.mm
@@ -57,15 +57,24 @@ - (void)setupSLAM
_slamState.scene = [[STScene alloc] initWithContext:_display.context];
// Initialize the camera pose tracker.
+// NSDictionary* trackerOptions = @{
+// kSTTrackerTypeKey: _dynamicOptions.depthAndColorTrackerIsOn ? @(STTrackerDepthAndColorBased) : @(STTrackerDepthBased),
+// kSTTrackerTrackAgainstModelKey: @TRUE, // tracking against the model is much better for close range scanning.
+// kSTTrackerQualityKey: @(STTrackerQualityAccurate),
+// kSTTrackerBackgroundProcessingEnabledKey: @YES,
+// kSTTrackerSceneTypeKey: @(STTrackerSceneTypeObject),
+// kSTTrackerLegacyKey: @(!_dynamicOptions.improvedTrackingIsOn)
+// };
+
NSDictionary* trackerOptions = @{
- kSTTrackerTypeKey: _dynamicOptions.depthAndColorTrackerIsOn ? @(STTrackerDepthAndColorBased) : @(STTrackerDepthBased),
+ kSTTrackerTypeKey: @(STTrackerDepthAndColorBased),
kSTTrackerTrackAgainstModelKey: @TRUE, // tracking against the model is much better for close range scanning.
kSTTrackerQualityKey: @(STTrackerQualityAccurate),
kSTTrackerBackgroundProcessingEnabledKey: @YES,
kSTTrackerSceneTypeKey: @(STTrackerSceneTypeObject),
- kSTTrackerLegacyKey: @(!_dynamicOptions.improvedTrackingIsOn)
+ kSTTrackerLegacyKey: @YES
};
-
+
// Initialize the camera pose tracker.
_slamState.tracker = [[STTracker alloc] initWithScene:_slamState.scene options:trackerOptions];
@@ -130,13 +139,15 @@ - (void)setupMapper
// Here, we set a larger volume bounds size when mapping in high resolution.
const float lowResolutionVolumeBounds = 125;
- const float highResolutionVolumeBounds = 200;
-
+// const float highResolutionVolumeBounds = 200;
+ const float highResolutionVolumeBounds = 500;
+
float voxelSizeInMeters = _slamState.volumeSizeInMeters.x /
(_dynamicOptions.highResMapping ? highResolutionVolumeBounds : lowResolutionVolumeBounds);
// Avoid voxels that are too small - these become too noisy.
- voxelSizeInMeters = keepInRange(voxelSizeInMeters, 0.003, 0.2);
+// voxelSizeInMeters = keepInRange(voxelSizeInMeters, 0.003, 0.2);
+ voxelSizeInMeters = keepInRange(voxelSizeInMeters, 0.001, 0.2);
// Compute the volume bounds in voxels, as a multiple of the volume resolution.
GLKVector3 volumeBounds;
@@ -324,8 +335,9 @@ - (void)processDepthFrame:(STDepthFrame *)depthFrame
}
// Tell the cube renderer whether there is a support plane or not.
- [_display.cubeRenderer setCubeHasSupportPlane:_slamState.cameraPoseInitializer.lastOutput.hasSupportPlane];
-
+// [_display.cubeRenderer setCubeHasSupportPlane:_slamState.cameraPoseInitializer.lastOutput.hasSupportPlane];
+ [_display.cubeRenderer setCubeHasSupportPlane:NO];
+
// Enable the scan button if the pose initializer could estimate a pose.
self.scanButton.enabled = _slamState.cameraPoseInitializer.lastOutput.hasValidPose;
break;
diff --git a/Scanner/ViewController.h b/Scanner/ViewController.h
index 75036d9..5c7dd47 100644
--- a/Scanner/ViewController.h
+++ b/Scanner/ViewController.h
@@ -99,6 +99,7 @@ struct SlamData
STMapper *mapper;
STCameraPoseInitializer *cameraPoseInitializer;
GLKMatrix4 initialDepthCameraPose = GLKMatrix4Identity;
+ GLKMatrix4 cubePose = GLKMatrix4Identity;
STKeyFrameManager *keyFrameManager;
ScannerState scannerState;
diff --git a/Scanner/ViewController.mm b/Scanner/ViewController.mm
index 063c23e..f253a91 100644
--- a/Scanner/ViewController.mm
+++ b/Scanner/ViewController.mm
@@ -223,6 +223,13 @@ - (void)enterCubePlacementState
_slamState.scannerState = ScannerStateCubePlacement;
[self updateIdleTimer];
+
+ if (_slamState.cameraPoseInitializer != nil)
+ {
+ float xOffset = _slamState.cameraPoseInitializer.volumeSizeInMeters.x / 2.0;
+ float yOffset = _slamState.cameraPoseInitializer.volumeSizeInMeters.y / 2.0;
+ _slamState.cubePose = GLKMatrix4Translate(GLKMatrix4Identity, xOffset, yOffset, -0.4);
+ }
}
- (void)enterScanningState
@@ -244,7 +251,18 @@ - (void)enterScanningState
// Prepare the mapper for the new scan.
[self setupMapper];
- _slamState.tracker.initialCameraPose = _slamState.initialDepthCameraPose;
+// _slamState.tracker.initialCameraPose = _slamState.initialDepthCameraPose;
+
+ // Make sure the pose is in color camera coordinates in case we are not using registered depth.
+ GLKMatrix4 colorCameraPoseInSensorCoordinateSpace = GLKMatrix4();
+ [[STSensorController sharedController] colorCameraPoseInSensorCoordinateFrame:colorCameraPoseInSensorCoordinateSpace.m];
+ bool invertible;
+ GLKMatrix4 colorCameraInversePose = GLKMatrix4Invert (colorCameraPoseInSensorCoordinateSpace, &invertible);
+ if (invertible)
+ {
+ _slamState.cubePose = GLKMatrix4Multiply(_slamState.cubePose, colorCameraInversePose);
+ _slamState.tracker.initialCameraPose = _slamState.cubePose;
+ }
// We will lock exposure during scanning to ensure better coloring.
_captureSession.properties = STCaptureSessionPropertiesLockAllColorCameraPropertiesToCurrent();
@@ -404,6 +422,10 @@ - (void)adjustVolumeSize:(GLKVector3)volumeSize
_slamState.cameraPoseInitializer.volumeSizeInMeters = volumeSize;
[_display.cubeRenderer adjustCubeSize:_slamState.volumeSizeInMeters];
+
+ float xOffset = _slamState.cameraPoseInitializer.volumeSizeInMeters.x / 2.0;
+ float yOffset = _slamState.cameraPoseInitializer.volumeSizeInMeters.y / 2.0;
+ _slamState.cubePose = GLKMatrix4Translate(GLKMatrix4Identity, xOffset, yOffset, -0.4);
}
- (IBAction)scanButtonPressed:(id)sender
--
2.21.0 (Apple Git-122)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.