diff --git a/ScanDepthRender.xcodeproj/project.pbxproj b/ScanDepthRender.xcodeproj/project.pbxproj index e289994..bd1929d 100644 --- a/ScanDepthRender.xcodeproj/project.pbxproj +++ b/ScanDepthRender.xcodeproj/project.pbxproj @@ -8,6 +8,8 @@ /* Begin PBXBuildFile section */ 6F482D9118CE1C4E00C11D9E /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A6ABFC3418C8FF5000BF0851 /* Main.storyboard */; }; + 8B2EB8821C15B81700715157 /* Structure.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8B2EB8811C15B81700715157 /* Structure.framework */; }; + 8B2EB8841C15CBB100715157 /* libz.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 8B2EB8831C15CBB100715157 /* libz.tbd */; }; A6ABFC1F18C8FF5000BF0851 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A6ABFC1E18C8FF5000BF0851 /* Foundation.framework */; }; A6ABFC2118C8FF5000BF0851 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A6ABFC2018C8FF5000BF0851 /* CoreGraphics.framework */; }; A6ABFC2318C8FF5000BF0851 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A6ABFC2218C8FF5000BF0851 /* UIKit.framework */; }; @@ -37,8 +39,6 @@ A6ABFC7318CA470200BF0851 /* SDRPointCloudRenderer.m in Sources */ = {isa = PBXBuildFile; fileRef = A6ABFC7218CA470200BF0851 /* SDRPointCloudRenderer.m */; }; A6ABFC7A18CA511200BF0851 /* PointCloudShader.vsh in Resources */ = {isa = PBXBuildFile; fileRef = A6ABFC7418CA4DE600BF0851 /* PointCloudShader.vsh */; }; A6ABFC7B18CA511200BF0851 /* PointCloudShader.fsh in Resources */ = {isa = PBXBuildFile; fileRef = A6ABFC7518CA4DE600BF0851 /* PointCloudShader.fsh */; }; - A6D60CD7196B0954008C0A0F /* Structure.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A6D60CD6196B0954008C0A0F /* Structure.framework */; }; - A6D60CD8196B0954008C0A0F /* Structure.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A6D60CD6196B0954008C0A0F /* Structure.framework */; }; A6D60CDA196B0AEE008C0A0F /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A6D60CD9196B0AEE008C0A0F /* QuartzCore.framework */; }; /* End PBXBuildFile section */ @@ -53,6 +53,8 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ + 8B2EB8811C15B81700715157 /* Structure.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = Structure.framework; sourceTree = ""; }; + 8B2EB8831C15CBB100715157 /* libz.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = libz.tbd; path = usr/lib/libz.tbd; sourceTree = SDKROOT; }; A68D07DB18CBBF430050B9D7 /* AnimationControl.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AnimationControl.h; sourceTree = ""; }; A6ABFC1B18C8FF5000BF0851 /* ScanDepthRender.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = ScanDepthRender.app; sourceTree = BUILT_PRODUCTS_DIR; }; A6ABFC1E18C8FF5000BF0851 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; }; @@ -91,7 +93,6 @@ A6ABFC7218CA470200BF0851 /* SDRPointCloudRenderer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SDRPointCloudRenderer.m; sourceTree = ""; }; A6ABFC7418CA4DE600BF0851 /* PointCloudShader.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = PointCloudShader.vsh; path = Shaders/PointCloudShader.vsh; sourceTree = ""; }; A6ABFC7518CA4DE600BF0851 /* PointCloudShader.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = PointCloudShader.fsh; path = Shaders/PointCloudShader.fsh; sourceTree = ""; }; - A6D60CD6196B0954008C0A0F /* Structure.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = Structure.framework; sourceTree = ""; }; A6D60CD9196B0AEE008C0A0F /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; }; /* End PBXFileReference section */ @@ -100,6 +101,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 8B2EB8841C15CBB100715157 /* libz.tbd in Frameworks */, + 8B2EB8821C15B81700715157 /* Structure.framework in Frameworks */, A6D60CDA196B0AEE008C0A0F /* QuartzCore.framework in Frameworks */, A6ABFC6D18C93F3F00BF0851 /* AVFoundation.framework in Frameworks */, A6ABFC1F18C8FF5000BF0851 /* Foundation.framework in Frameworks */, @@ -107,7 +110,6 @@ A6ABFC2718C8FF5000BF0851 /* OpenGLES.framework in Frameworks */, A6ABFC6818C9027B00BF0851 /* ImageIO.framework in Frameworks */, A6ABFC6618C9024E00BF0851 /* CoreMedia.framework in Frameworks */, - A6D60CD7196B0954008C0A0F /* Structure.framework in Frameworks */, A6ABFC6418C9023E00BF0851 /* CoreImage.framework in Frameworks */, A6ABFC6218C9023100BF0851 /* ExternalAccessory.framework in Frameworks */, A6ABFC5F18C9022000BF0851 /* CoreMotion.framework in Frameworks */, @@ -122,7 +124,6 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - A6D60CD8196B0954008C0A0F /* Structure.framework in Frameworks */, A6ABFC4618C8FF5000BF0851 /* XCTest.framework in Frameworks */, A6ABFC4818C8FF5000BF0851 /* UIKit.framework in Frameworks */, A6ABFC4718C8FF5000BF0851 /* Foundation.framework in Frameworks */, @@ -154,8 +155,9 @@ A6ABFC1D18C8FF5000BF0851 /* Frameworks */ = { isa = PBXGroup; children = ( + 8B2EB8831C15CBB100715157 /* libz.tbd */, + 8B2EB8811C15B81700715157 /* Structure.framework */, A6D60CD9196B0AEE008C0A0F /* QuartzCore.framework */, - A6D60CD6196B0954008C0A0F /* Structure.framework */, A6ABFC6B18C902AE00BF0851 /* Accelerate.framework */, A6ABFC6918C9029100BF0851 /* AVFoundation.framework */, A6ABFC2018C8FF5000BF0851 /* CoreGraphics.framework */, @@ -271,7 +273,7 @@ isa = PBXProject; attributes = { CLASSPREFIX = SDR; - LastUpgradeCheck = 0510; + LastUpgradeCheck = 0710; ORGANIZATIONNAME = "Nigel Choi"; TargetAttributes = { A6ABFC4318C8FF5000BF0851 = { @@ -400,6 +402,8 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; + ENABLE_BITCODE = NO; + ENABLE_TESTABILITY = YES; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_DYNAMIC_NO_PIC = NO; GCC_OPTIMIZATION_LEVEL = 0; @@ -439,6 +443,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = YES; + ENABLE_BITCODE = NO; ENABLE_NS_ASSERTIONS = NO; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; @@ -467,6 +472,7 @@ GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = "ScanDepthRender/ScanDepthRender-Prefix.pch"; INFOPLIST_FILE = "ScanDepthRender/ScanDepthRender-Info.plist"; + PRODUCT_BUNDLE_IDENTIFIER = "com.9gel.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = "$(TARGET_NAME)"; WRAPPER_EXTENSION = app; }; @@ -485,6 +491,7 @@ GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = "ScanDepthRender/ScanDepthRender-Prefix.pch"; INFOPLIST_FILE = "ScanDepthRender/ScanDepthRender-Info.plist"; + PRODUCT_BUNDLE_IDENTIFIER = "com.9gel.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = "$(TARGET_NAME)"; WRAPPER_EXTENSION = app; }; @@ -507,6 +514,7 @@ "$(inherited)", ); INFOPLIST_FILE = "ScanDepthRenderTests/ScanDepthRenderTests-Info.plist"; + PRODUCT_BUNDLE_IDENTIFIER = "com.9gel.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = "$(TARGET_NAME)"; TEST_HOST = "$(BUNDLE_LOADER)"; WRAPPER_EXTENSION = xctest; @@ -526,6 +534,7 @@ GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = "ScanDepthRender/ScanDepthRender-Prefix.pch"; INFOPLIST_FILE = "ScanDepthRenderTests/ScanDepthRenderTests-Info.plist"; + PRODUCT_BUNDLE_IDENTIFIER = "com.9gel.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = "$(TARGET_NAME)"; TEST_HOST = "$(BUNDLE_LOADER)"; WRAPPER_EXTENSION = xctest; diff --git a/ScanDepthRender/SDRExampleRenderer.h b/ScanDepthRender/SDRExampleRenderer.h index a70e78d..e96bd61 100644 --- a/ScanDepthRender/SDRExampleRenderer.h +++ b/ScanDepthRender/SDRExampleRenderer.h @@ -7,6 +7,8 @@ // #import +#include +#include @interface SDRExampleRenderer : NSObject diff --git a/ScanDepthRender/SDRExampleRenderer.m b/ScanDepthRender/SDRExampleRenderer.m index 18dd572..c899701 100644 --- a/ScanDepthRender/SDRExampleRenderer.m +++ b/ScanDepthRender/SDRExampleRenderer.m @@ -161,7 +161,7 @@ - (GLKViewDrawableDepthFormat)drawableDepthFormat - (void)updateWithBounds:(CGRect)bounds timeSinceLastUpdate:(NSTimeInterval)timeSinceLastUpdate { - float aspect = fabsf(bounds.size.width / bounds.size.height); + float aspect = std::abs(bounds.size.width / bounds.size.height); GLKMatrix4 projectionMatrix = GLKMatrix4MakePerspective(GLKMathDegreesToRadians(65.0f), aspect, 0.1f, 100.0f); self.effect.transform.projectionMatrix = projectionMatrix; diff --git a/ScanDepthRender/SDRPointCloudRenderer.h b/ScanDepthRender/SDRPointCloudRenderer.h index 16e3f83..a354879 100644 --- a/ScanDepthRender/SDRPointCloudRenderer.h +++ b/ScanDepthRender/SDRPointCloudRenderer.h @@ -7,6 +7,8 @@ // #import +#include +#include #import @interface SDRPointCloudRenderer : NSObject @@ -22,6 +24,6 @@ modelView:(GLKMatrix4)modelView invScale:(float)invScale; - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect; -- (void)updatePointsWithDepth:(STFloatDepthFrame*)depthFrame image:(CGImageRef)image; +- (void)updatePointsWithDepth:(STDepthFrame*)depthFrame image:(CGImageRef)image; @end diff --git a/ScanDepthRender/SDRPointCloudRenderer.m b/ScanDepthRender/SDRPointCloudRenderer.m index eaf71fd..cfc25bc 100644 --- a/ScanDepthRender/SDRPointCloudRenderer.m +++ b/ScanDepthRender/SDRPointCloudRenderer.m @@ -181,7 +181,7 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect glDrawArrays(GL_POINTS, NUM_TEST_POINTS, (GLsizei)(_cols*_rows)); } -- (void)updatePointsWithDepth:(STFloatDepthFrame*)depthFrame image:(CGImageRef)imageRef; +- (void)updatePointsWithDepth:(STDepthFrame*)depthFrame image:(CGImageRef)imageRef; { if (imageRef) { @@ -203,7 +203,7 @@ - (void)updatePointsWithDepth:(STFloatDepthFrame*)depthFrame image:(CGImageRef)i if (depthFrame) { float *data = (GLfloat *)_pointsData.mutableBytes; - const float *depths = [depthFrame depthAsMillimeters]; + const float *depths = [depthFrame depthInMillimeters]; for (int r = 0; r < _rows; r++) { diff --git a/ScanDepthRender/SDRViewController.h b/ScanDepthRender/SDRViewController.h index 015ab15..d034bc2 100644 --- a/ScanDepthRender/SDRViewController.h +++ b/ScanDepthRender/SDRViewController.h @@ -15,5 +15,5 @@ @property (weak,nonatomic) IBOutlet UILabel* statusLabel; @property (weak,nonatomic) IBOutlet UIImageView* depthImageView; @property (weak,nonatomic) IBOutlet UIImageView* cameraImageView; - +@property (nonatomic, retain) AVCaptureDevice *videoDevice; @end diff --git a/ScanDepthRender/SDRViewController.m b/ScanDepthRender/SDRViewController.m index b94b7f6..2e1c859 100644 --- a/ScanDepthRender/SDRViewController.m +++ b/ScanDepthRender/SDRViewController.m @@ -36,15 +36,84 @@ #define CAMERA_PRESET AVCaptureSessionPreset352x288 #endif +struct AppStatus { + NSString* const pleaseConnectSensorMessage = @"Please connect Structure Sensor."; + NSString* const pleaseChargeSensorMessage = @"Please charge Structure Sensor."; + NSString* const needColorCameraAccessMessage = @"This app requires camera access to capture color.\nAllow access by going to Settings → Privacy → Camera."; + + enum SensorStatus + { + SensorStatusOk, + SensorStatusNeedsUserToConnect, + SensorStatusNeedsUserToCharge, + }; + + // Structure Sensor status. + SensorStatus sensorStatus = SensorStatusOk; + + // Whether iOS camera access was granted by the user. + bool colorCameraIsAuthorized = true; + + // Whether there is currently a message to show. + bool needsDisplayOfStatusMessage = false; + + // Flag to disable entirely status message display. + bool statusMessageDisabled = false; +}; + +struct Options { + // The initial scanning volume size will be 0.5 x 0.5 x 0.5 meters + // (X is left-right, Y is up-down, Z is forward-back) + GLKVector3 initialVolumeSizeInMeters = GLKVector3Make (0.5f, 0.5f, 0.5f); + + // Volume resolution in meters + float initialVolumeResolutionInMeters = 0.004; // 4 mm per voxel + + // The maximum number of keyframes saved in keyFrameManager + int maxNumKeyFrames = 48; + + // Colorizer quality + STColorizerQuality colorizerQuality = STColorizerHighQuality; + + // Take a new keyframe in the rotation difference is higher than 20 degrees. + float maxKeyFrameRotation = 20.0f * (M_PI / 180.f); // 20 degrees + + // Take a new keyframe if the translation difference is higher than 30 cm. + float maxKeyFrameTranslation = 0.3; // 30cm + + // Threshold to consider that the rotation motion was small enough for a frame to be accepted + // as a keyframe. This avoids capturing keyframes with strong motion blur / rolling shutter. + float maxKeyframeRotationSpeedInDegreesPerSecond = 1.f; + + // Whether we should use depth aligned to the color viewpoint when Structure Sensor was calibrated. + // This setting may get overwritten to false if no color camera can be used. + bool useHardwareRegisteredDepth = true; + + // Whether the colorizer should try harder to preserve appearance of the first keyframe. + // Recommended for face scans. + bool prioritizeFirstFrameColor = true; + + // Target number of faces of the final textured mesh. + int colorizerTargetNumFaces = 50000; + + // Focus position for the color camera (between 0 and 1). Must remain fixed one depth streaming + // has started when using hardware registered depth. + const float lensPosition = 0.75f; +}; + @interface SDRViewController () { RENDERER_CLASS *_renderer; AnimationControl *_animation; STSensorController *_sensorController; - STFloatDepthFrame *_depthFrame; + STDepthFrame *_depthFrame; STDepthToRgba *_depthToRgba; AVCaptureSession *_avsession; + AVCaptureDevice *_videoDevice; + + AppStatus _appStatus; + Options _options; } @property (strong, nonatomic) EAGLContext *context; @end @@ -74,16 +143,16 @@ - (void)viewDidLoad // Structure setup _sensorController = [STSensorController sharedController]; _sensorController.delegate = self; - [_sensorController setFrameSyncConfig:FRAME_SYNC_CONFIG]; + //[_sensorController setFrameSyncConfig:FRAME_SYNC_CONFIG]; - _depthFrame = [[STFloatDepthFrame alloc] init]; + _depthFrame = [[STDepthFrame alloc] init]; // When the app enters the foreground, we can choose to restart the stream [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(appWillEnterForeground) name:UIApplicationWillEnterForegroundNotification object:nil]; // Color camera #if !TARGET_IPHONE_SIMULATOR - [self startAVCaptureSession]; + [self setupColorCamera]; #endif } @@ -95,7 +164,7 @@ - (void)viewDidAppear:(BOOL)animated fromLaunch = false; } - float aspect = fabsf(self.view.bounds.size.width / self.view.bounds.size.height); + float aspect = std::abs(self.view.bounds.size.width / self.view.bounds.size.height); GLKMatrix4 projectionMatrix = GLKMatrix4MakePerspective(GLKMathDegreesToRadians(42.87436f), aspect, 0.1f, 100.0f); _animation->setInitProjectionRt(projectionMatrix); _animation->setMeshCenter(GLKVector3Make(0.0f, 0.0f, -0.6666f)); @@ -154,16 +223,35 @@ - (bool)connectAndStartStreaming if (didSucceed) { - STSensorInfo *sensorInfo = [_sensorController getSensorInfo:STREAM_CONFIG]; - if (!sensorInfo) { - self.statusLabel.text = @"Error getting Structure Sensor Info."; - return false; - } - - _depthToRgba = [[STDepthToRgba alloc] initWithSensorInfo:sensorInfo]; + // There's no status about the sensor that we need to display anymore + _appStatus.sensorStatus = AppStatus::SensorStatusOk; + [self updateAppStatusMessage]; + + // Start the color camera, setup if needed + [self startColorCamera]; + + // Set sensor stream quality + STStreamConfig streamConfig = _options.useHardwareRegisteredDepth ? STStreamConfigRegisteredDepth640x480 : STStreamConfigDepth640x480; + + + // Request that we receive depth frames with synchronized color pairs // After this call, we will start to receive frames through the delegate methods - [_sensorController startStreamingWithConfig:STREAM_CONFIG]; + NSError* error = nil; + BOOL optionsAreValid = [_sensorController startStreamingWithOptions:@{kSTStreamConfigKey : @(streamConfig), + kSTFrameSyncConfigKey : @(STFrameSyncDepthAndRgb), + kSTColorCameraFixedLensPositionKey: @(_options.lensPosition), + } + error:&error]; + + _depthToRgba = [[STDepthToRgba alloc] init]; + + if (!optionsAreValid) + { + NSLog(@"Error during streaming start: %s", [[error localizedDescription] UTF8String]); + self.statusLabel.text = @"Error during streaming start."; + return false; + } // Now that we've started streaming, hide the status label self.statusLabel.hidden = YES; @@ -185,6 +273,82 @@ - (bool)connectAndStartStreaming return didSucceed; } +//////////////////////////////////////////////////// + +- (void)showAppStatusMessage:(NSString *)msg { + _appStatus.needsDisplayOfStatusMessage = true; + [self.view.layer removeAllAnimations]; + + [_statusLabel setText:msg]; + [_statusLabel setHidden:NO]; + + // Progressively show the message label. + [self.view setUserInteractionEnabled:false]; + [UIView animateWithDuration:0.5f animations:^{ + _statusLabel.alpha = 1.0f; + }completion:nil]; +} + +- (void)hideAppStatusMessage { + + _appStatus.needsDisplayOfStatusMessage = false; + [self.view.layer removeAllAnimations]; + + [UIView animateWithDuration:0.5f + animations:^{ + _statusLabel.alpha = 0.0f; + } + completion:^(BOOL finished) { + // If nobody called showAppStatusMessage before the end of the animation, do not hide it. + if (!_appStatus.needsDisplayOfStatusMessage) + { + [_statusLabel setHidden:YES]; + [self.view setUserInteractionEnabled:true]; + } + }]; +} + +- (void)updateAppStatusMessage { + // Skip everything if we should not show app status messages (e.g. in viewing state). + if (_appStatus.statusMessageDisabled) + { + [self hideAppStatusMessage]; + return; + } + + // First show sensor issues, if any. + switch (_appStatus.sensorStatus) + { + case AppStatus::SensorStatusOk: + { + break; + } + + case AppStatus::SensorStatusNeedsUserToConnect: + { + [self showAppStatusMessage:_appStatus.pleaseConnectSensorMessage]; + return; + } + + case AppStatus::SensorStatusNeedsUserToCharge: + { + [self showAppStatusMessage:_appStatus.pleaseChargeSensorMessage]; + return; + } + } + + // Then show color camera permission issues, if any. + if (!_appStatus.colorCameraIsAuthorized) + { + [self showAppStatusMessage:_appStatus.needColorCameraAccessMessage]; + return; + } + + // If we reach this point, no status to show. + [self hideAppStatusMessage]; +} + + - (void) setupGestureRecognizer { UIPinchGestureRecognizer *pinchScaleGesture = [[UIPinchGestureRecognizer alloc] @@ -230,6 +394,9 @@ - (void)sensorDidDisconnect { self.statusLabel.hidden = NO; self.statusLabel.text = @"Structure Sensor disconnected!"; + + // Stop the color camera when there isn't a connected Structure Sensor + [self stopColorCamera]; } - (void)sensorDidConnect @@ -255,6 +422,9 @@ - (void)sensorDidStopStreaming:(STSensorControllerDidStopStreamingReason)reason { self.statusLabel.hidden = NO; self.statusLabel.text = @"Structure Sensor stopped streaming"; + + // Stop the color camera when there isn't a connected Structure Sensor + [self stopColorCamera]; } - (void)sensorDidOutputDepthFrame:(STDepthFrame*)depthFrame @@ -268,10 +438,10 @@ - (void)sensorDidOutputDepthFrame:(STDepthFrame*)depthFrame // Tell the SDK we want framesync: [_ocSensorController setFrameSyncConfig:FRAME_SYNC_DEPTH_AND_RGB]; // Give the SDK color frames as they come in: [_ocSensorController frameSyncNewColorImage:sampleBuffer]; - (void)sensorDidOutputSynchronizedDepthFrame:(STDepthFrame*)depthFrame - andColorFrame:(CMSampleBufferRef)sampleBuffer + andColorFrame:(STColorFrame *)colorFrame { [self renderDepthFrame:depthFrame]; - [self renderColorFrame:sampleBuffer]; + [self renderColorFrame:colorFrame.sampleBuffer]; [_renderer updatePointsWithDepth:_depthFrame image:_cameraImageView.image.CGImage]; } @@ -279,8 +449,8 @@ - (void)sensorDidOutputSynchronizedDepthFrame:(STDepthFrame*)depthFrame - (void)renderDepthFrame:(STDepthFrame*)depthFrame { - [_depthFrame updateFromDepthFrame:depthFrame]; - uint8_t *rgbaData = [_depthToRgba convertDepthToRgba:_depthFrame]; + _depthFrame=depthFrame; + uint8_t *rgbaData = [_depthToRgba convertDepthFrameToRgba:_depthFrame]; CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); @@ -289,14 +459,14 @@ - (void)renderDepthFrame:(STDepthFrame*)depthFrame bitmapInfo |= kCGBitmapByteOrder16Big; - NSData *data = [NSData dataWithBytes:rgbaData length:depthFrame->width * depthFrame->height * sizeof(uint32_t)]; + NSData *data = [NSData dataWithBytes:rgbaData length:depthFrame.width * depthFrame.height * sizeof(uint32_t)]; CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); - CGImageRef cgImage = CGImageCreate(depthFrame->width, - depthFrame->height, + CGImageRef cgImage = CGImageCreate(depthFrame.width, + depthFrame.height, 8, 32, - depthFrame->width * sizeof(uint32_t), + depthFrame.width * sizeof(uint32_t), colorSpace, bitmapInfo, provider, @@ -355,89 +525,168 @@ - (void)renderColorFrame:(CMSampleBufferRef)sampleBuffer CGColorSpaceRelease(colorSpace); } -#pragma mark - Camera +#pragma mark - AVFoundation + +- (BOOL)queryCameraAuthorizationStatusAndNotifyUserIfNotGranted { + // This API was introduced in iOS 7, but in iOS 8 it's actually enforced. + if ([AVCaptureDevice respondsToSelector:@selector(authorizationStatusForMediaType:)]) + { + AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]; + + if (authStatus != AVAuthorizationStatusAuthorized) + { + NSLog(@"Not authorized to use the camera!"); + + [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo + completionHandler:^(BOOL granted) + { + // This block fires on a separate thread, so we need to ensure any actions here + // are sent to the right place. + + // If the request is granted, let's try again to start an AVFoundation session. Otherwise, alert + // the user that things won't go well. + if (granted) + { + + dispatch_async(dispatch_get_main_queue(), ^(void) { + + [self startColorCamera]; + + _appStatus.colorCameraIsAuthorized = true; + [self updateAppStatusMessage]; + + }); + + } + + }]; + + return false; + } + + } + + return true; + +} -- (void)startAVCaptureSession +- (void)setupColorCamera { - NSString* sessionPreset = CAMERA_PRESET; + // If already setup, skip it + if (_avsession) + return; + + bool cameraAccessAuthorized = [self queryCameraAuthorizationStatusAndNotifyUserIfNotGranted]; - //-- Setup Capture Session. + if (!cameraAccessAuthorized) + { + _appStatus.colorCameraIsAuthorized = false; + [self updateAppStatusMessage]; + return; + } + + // Use VGA color. + NSString *sessionPreset = AVCaptureSessionPreset640x480; + + // Set up Capture Session. _avsession = [[AVCaptureSession alloc] init]; [_avsession beginConfiguration]; - //-- Set preset session size. + // Set preset session size. [_avsession setSessionPreset:sessionPreset]; - //-- Creata a video device and input from that Device. Add the input to the capture session. - AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - if(videoDevice == nil) + // Create a video device and input from that Device. Add the input to the capture session. + _videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + if (_videoDevice == nil) assert(0); + // Configure Focus, Exposure, and White Balance NSError *error; - [videoDevice lockForConfiguration:&error]; - - // Auto-focus Auto-exposure, auto-white balance - if ([[[UIDevice currentDevice] systemVersion] compare:@"7.0" options:NSNumericSearch] != NSOrderedAscending) - [videoDevice setAutoFocusRangeRestriction:AVCaptureAutoFocusRangeRestrictionFar]; - [videoDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; - [videoDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; - [videoDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]; - - [videoDevice unlockForConfiguration]; + // Use auto-exposure, and auto-white balance and set the focus to infinity. + if([_videoDevice lockForConfiguration:&error]) + { + // Allow exposure to change + if ([_videoDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) + [_videoDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; + + // Allow white balance to change + if ([_videoDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) + [_videoDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]; + + // Set focus at the maximum position allowable (e.g. "near-infinity") to get the + // best color/depth alignment. + [_videoDevice setFocusModeLockedWithLensPosition:1.0f completionHandler:nil]; + + [_videoDevice unlockForConfiguration]; + } - //-- Add the device to the session. - AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; - if(error) + // Add the device to the session. + AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_videoDevice error:&error]; + if (error) + { + NSLog(@"Cannot initialize AVCaptureDeviceInput"); assert(0); + } [_avsession addInput:input]; // After this point, captureSession captureOptions are filled. - //-- Create the output for the capture session. - AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init]; + // Create the output for the capture session. + AVCaptureVideoDataOutput* dataOutput = [[AVCaptureVideoDataOutput alloc] init]; + // We don't want to process late frames. [dataOutput setAlwaysDiscardsLateVideoFrames:YES]; - //-- Set to YUV420. - [dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] - forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + // Use BGRA pixel format. + [dataOutput setVideoSettings:[NSDictionary + dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] + forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // Set dispatch to be on the main thread so OpenGL can do things with the data [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; [_avsession addOutput:dataOutput]; - if ([[[UIDevice currentDevice] systemVersion] compare:@"7.0" options:NSNumericSearch] != NSOrderedAscending) - { - [videoDevice lockForConfiguration:&error]; - [videoDevice setActiveVideoMaxFrameDuration:CMTimeMake(1, 30)]; - [videoDevice setActiveVideoMinFrameDuration:CMTimeMake(1, 30)]; - [videoDevice unlockForConfiguration]; - } - else + if([_videoDevice lockForConfiguration:&error]) { - AVCaptureConnection *conn = [dataOutput connectionWithMediaType:AVMediaTypeVideo]; - - // Deprecated use is OK here because we're using the correct APIs on iOS 7 above when available - // If we're running before iOS 7, we still really want 30 fps! -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - conn.videoMinFrameDuration = CMTimeMake(1, 30); - conn.videoMaxFrameDuration = CMTimeMake(1, 30); -#pragma clang diagnostic pop - + [_videoDevice setActiveVideoMaxFrameDuration:CMTimeMake(1, 30)]; + [_videoDevice setActiveVideoMinFrameDuration:CMTimeMake(1, 30)]; + [_videoDevice unlockForConfiguration]; } + [_avsession commitConfiguration]; +} + + +- (void)startColorCamera { + if (_avsession && [_avsession isRunning]) + return; + // Re-setup so focus is lock even when back from background + if (_avsession == nil) + [self setupColorCamera]; + + // Start streaming color images. [_avsession startRunning]; } +- (void)stopColorCamera +{ + if ([_avsession isRunning]) + { + // Stop the session + [_avsession stopRunning]; + } + + _avsession = nil; + _videoDevice = nil; +} - (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { #ifdef SCAN_DO_SYNC // Pass into the driver. The sampleBuffer will return later with a synchronized depth pair. - [_sensorController frameSyncNewColorImage:sampleBuffer]; + [_sensorController frameSyncNewColorBuffer:sampleBuffer]; #else [self renderColorFrame:sampleBuffer]; [_renderer updatePointsWithDepth:nil image:_cameraImageView.image.CGImage]; diff --git a/ScanDepthRender/ScanDepthRender-Info.plist b/ScanDepthRender/ScanDepthRender-Info.plist index c1d6252..84f0711 100644 --- a/ScanDepthRender/ScanDepthRender-Info.plist +++ b/ScanDepthRender/ScanDepthRender-Info.plist @@ -9,7 +9,7 @@ CFBundleExecutable ${EXECUTABLE_NAME} CFBundleIdentifier - com.9gel.${PRODUCT_NAME:rfc1034identifier} + $(PRODUCT_BUNDLE_IDENTIFIER) CFBundleInfoDictionaryVersion 6.0 CFBundleName diff --git a/ScanDepthRenderTests/ScanDepthRenderTests-Info.plist b/ScanDepthRenderTests/ScanDepthRenderTests-Info.plist index d06fa56..169b6f7 100644 --- a/ScanDepthRenderTests/ScanDepthRenderTests-Info.plist +++ b/ScanDepthRenderTests/ScanDepthRenderTests-Info.plist @@ -7,7 +7,7 @@ CFBundleExecutable ${EXECUTABLE_NAME} CFBundleIdentifier - com.9gel.${PRODUCT_NAME:rfc1034identifier} + $(PRODUCT_BUNDLE_IDENTIFIER) CFBundleInfoDictionaryVersion 6.0 CFBundlePackageType