Commit 959a9796 authored by Apple's avatar Apple
Browse files

Final Xcode 11 and Swift 5 updates.

parent 2b460751
......@@ -7,4 +7,15 @@
# Xcode - User files
xcuserdata/
*.xcworkspace
**/*.xcodeproj/project.xcworkspace/*
!**/*.xcodeproj/project.xcworkspace/xcshareddata
**/*.xcodeproj/project.xcworkspace/xcshareddata/*
!**/*.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
**/*.playground/playground.xcworkspace/*
!**/*.playground/playground.xcworkspace/xcshareddata
**/*.playground/playground.xcworkspace/xcshareddata/*
!**/*.playground/playground.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
Copyright © 2018 Apple Inc.
Copyright © 2019 Apple Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
......
......@@ -4,15 +4,15 @@ Visualize depth data in 2D and 3D from the TrueDepth camera.
## Overview
The TrueDepth camera provides depth data in real time that allows you to determine the distance of a pixel from the front-facing camera. This sample demonstrates how to use the AVFoundation framework’s capture API to read data from the TrueDepth camera, and how to display it in an intuitive fashion onscreen.
The TrueDepth camera provides depth data in real time that allows you to determine the distance of a pixel from the front-facing camera. This sample demonstrates how to use the AVFoundation framework’s capture API to read data from the TrueDepth camera, and how to display it in an intuitive fashion onscreen.
The sample shows two different views: a 2D view that distinguishes depth values by mapping depth to color, and a 3D view that renders data as a point cloud.
To see this sample app in action, build and run the project in Xcode on an iOS device running iOS 11 or later. Because Xcode doesn’t have access to the TrueDepth camera, this sample will not build or run in the Xcode simulator.
To see this sample app in action, build and run the project in Xcode on an iOS device running iOS 11 or later. Because Xcode doesn’t have access to the TrueDepth camera, this sample will not build or run in the Xcode simulator.
## Set Up a Capture Session
Set up an `AVCaptureSession` on a separate thread via the session queue. Initialize this session queue before configuring the camera for capture, like so:
Set up an `AVCaptureSession` on a separate thread via the session queue. Initialize this session queue before configuring the camera for capture, like so:
``` swift
private let sessionQueue = DispatchQueue(label: "session queue", attributes: [], autoreleaseFrequency: .workItem)
......@@ -26,7 +26,7 @@ sessionQueue.async {
}
```
Setting up the camera for video capture follows many of the same steps as normal video capture. See [Setting Up a Capture Session](https://developer.apple.com/documentation/avfoundation/cameras_and_media_capture/setting_up_a_capture_session) for details on configuring streaming setup.
Setting up the camera for video capture follows many of the same steps as normal video capture. See [Setting Up a Capture Session](https://developer.apple.com/documentation/avfoundation/cameras_and_media_capture/setting_up_a_capture_session) for details on configuring streaming setup.
On top of normal setup, request depth data by declaring a separate output:
......@@ -69,20 +69,20 @@ do {
}
```
Synchronize the normal RGB video data with depth data output. The first output in the `dataOutputs` array is the master output.
Synchronize the normal RGB video data with depth data output. The first output in the `dataOutputs` array is the master output.
``` swift
outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [videoDataOutput, depthDataOutput])
outputSynchronizer!.setDelegate(self, queue: dataOutputQueue)
```
The `CameraViewController` implementation creates and manages this session to interface with the camera. It also contains UI to toggle between the two viewing modes, 2D and 3D.
The `CameraViewController` implementation creates and manages this session to interface with the camera. It also contains UI to toggle between the two viewing modes, 2D and 3D.
## Visualize Depth Data in 2D
The sample uses JET color coding to distinguish depth values, ranging from red (close) to blue (far). A slider controls the blending of the color code and the actual color values. Touching a pixel displays its depth value.
The sample uses JET color coding to distinguish depth values, ranging from red (close) to blue (far). A slider controls the blending of the color code and the actual color values. Touching a pixel displays its depth value.
`DepthToJETConverter` performs the conversion. It separates the color spectrum into histogram bins, colors a Metal texture from depth values obtained in the image buffer, and renders that texture into the preview.
`DepthToJETConverter` performs the conversion. It separates the color spectrum into histogram bins, colors a Metal texture from depth values obtained in the image buffer, and renders that texture into the preview.
``` swift
var cvTextureOut: CVMetalTexture?
......@@ -96,14 +96,14 @@ guard let cvTexture = cvTextureOut, let texture = CVMetalTextureGetTexture(cvTex
## Visualize Depth Data in 3D
The sample’s 3D viewer renders data as a point cloud. Control the camera with the following gestures:
The sample’s 3D viewer renders data as a point cloud. Control the camera with the following gestures:
* Pinch to zoom.
* Pan to move the camera around the center.
* Rotate with two fingers to turn the camera angle.
* Double-tap the screen to reset the initial position.
* Pinch to zoom.
* Pan to move the camera around the center.
* Rotate with two fingers to turn the camera angle.
* Double-tap the screen to reset the initial position.
The sample implements a 3D point cloud as a `PointCloudMetalView`. It uses a Metal vertex shader to control geometry and a Metal fragment shader to color individual vertices, keeping the depth texture and color texture separate:
The sample implements a 3D point cloud as a `PointCloudMetalView`. It uses a Metal vertex shader to control geometry and a Metal fragment shader to color individual vertices, keeping the depth texture and color texture separate:
``` objective-c
CVMetalTextureCacheRef _depthTextureCache;
......@@ -155,7 +155,7 @@ id<MTLTexture> colorTexture = CVMetalTextureGetTexture(cvColorTexture);
## Track Thermal State
Processing depth data from a live stream may cause the device to heat up. Keep tabs on the thermal state so you can alert the user if it exceeds a dangerous threshold.
Processing depth data from a live stream may cause the device to heat up. Keep tabs on the thermal state so you can alert the user if it exceeds a dangerous threshold.
``` swift
@objc
......
......@@ -31,8 +31,6 @@
2633CF1C1E7C65D500FC80E1 /* DepthToJETConverter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DepthToJETConverter.swift; sourceTree = "<group>"; };
2672370D1E79BFBF003D2EAA /* DepthToJET.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; path = DepthToJET.metal; sourceTree = "<group>"; };
267ED84D1ED7965A00898078 /* TrueDepthStreamer-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "TrueDepthStreamer-Bridging-Header.h"; sourceTree = "<group>"; };
26C3826026C380D000000001 /* SampleCode.xcconfig */ = {isa = PBXFileReference; name = SampleCode.xcconfig; path = Configuration/SampleCode.xcconfig; sourceTree = "<group>"; };
28908F9028908E9000000001 /* LICENSE.txt */ = {isa = PBXFileReference; includeInIndex = 1; path = LICENSE.txt; sourceTree = "<group>"; };
6D20BC0F1FFE665100496684 /* PointCloudMetalView.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = PointCloudMetalView.mm; sourceTree = "<group>"; };
C3444A731FFE6110002D901D /* PointCloudMetalView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PointCloudMetalView.h; sourceTree = "<group>"; };
C3444A741FFE6110002D901D /* AAPLTransforms.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AAPLTransforms.mm; sourceTree = "<group>"; };
......@@ -40,6 +38,8 @@
C3444A781FFE6110002D901D /* AAPLTransforms.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AAPLTransforms.h; sourceTree = "<group>"; };
C3B256711FDE655100617DD7 /* HistogramCalculator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = HistogramCalculator.m; sourceTree = "<group>"; };
C3B256721FDE655200617DD7 /* HistogramCalculator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HistogramCalculator.h; sourceTree = "<group>"; };
CCCD7910CCCD7C9000000001 /* LICENSE.txt */ = {isa = PBXFileReference; includeInIndex = 1; path = LICENSE.txt; sourceTree = "<group>"; };
CCE67AC0CCE54C5000000001 /* SampleCode.xcconfig */ = {isa = PBXFileReference; name = SampleCode.xcconfig; path = Configuration/SampleCode.xcconfig; sourceTree = "<group>"; };
E414FC6C1D5921FD0007C979 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = "<group>"; };
E422DFB71CEF894F0047D7A4 /* TrueDepthStreamer.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = TrueDepthStreamer.app; sourceTree = BUILT_PRODUCTS_DIR; };
E422DFBF1CEF894F0047D7A4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
......@@ -75,39 +75,39 @@
path = Shaders;
sourceTree = "<group>";
};
26C3860026C3856000000001 /* Configuration */ = {
8F7DEDF83CEB2358D1CCC092 /* Configuration */ = {
isa = PBXGroup;
children = (
26C3826026C380D000000001 /* SampleCode.xcconfig */,
);
name = Configuration;
sourceTree = "<group>";
};
28908660289084B000000001 /* LICENSE */ = {
C3B2FE6F1FFA8B9A00D8BC60 /* PointCloud */ = {
isa = PBXGroup;
children = (
28908F9028908E9000000001 /* LICENSE.txt */,
C3444A781FFE6110002D901D /* AAPLTransforms.h */,
C3444A741FFE6110002D901D /* AAPLTransforms.mm */,
C3444A731FFE6110002D901D /* PointCloudMetalView.h */,
6D20BC0F1FFE665100496684 /* PointCloudMetalView.mm */,
);
name = LICENSE;
path = LICENSE;
path = PointCloud;
sourceTree = "<group>";
};
8F7DEDF83CEB2358D1CCC092 /* Configuration */ = {
CCCD8E90CCCD7B8000000001 /* LICENSE */ = {
isa = PBXGroup;
children = (
CCCD7910CCCD7C9000000001 /* LICENSE.txt */,
);
name = Configuration;
name = LICENSE;
path = LICENSE;
sourceTree = "<group>";
};
C3B2FE6F1FFA8B9A00D8BC60 /* PointCloud */ = {
CCE52550CCE525B000000001 /* Configuration */ = {
isa = PBXGroup;
children = (
C3444A781FFE6110002D901D /* AAPLTransforms.h */,
C3444A741FFE6110002D901D /* AAPLTransforms.mm */,
C3444A731FFE6110002D901D /* PointCloudMetalView.h */,
6D20BC0F1FFE665100496684 /* PointCloudMetalView.mm */,
CCE67AC0CCE54C5000000001 /* SampleCode.xcconfig */,
);
path = PointCloud;
name = Configuration;
sourceTree = "<group>";
};
E422DFAE1CEF894F0047D7A4 = {
......@@ -117,8 +117,8 @@
E422DFB91CEF894F0047D7A4 /* TrueDepthStreamer */,
E422DFB81CEF894F0047D7A4 /* Products */,
8F7DEDF83CEB2358D1CCC092 /* Configuration */,
26C3860026C3856000000001 /* Configuration */,
28908660289084B000000001 /* LICENSE */,
CCE52550CCE525B000000001 /* Configuration */,
CCCD8E90CCCD7B8000000001 /* LICENSE */,
);
sourceTree = "<group>";
};
......@@ -162,7 +162,7 @@
E422DFB31CEF894F0047D7A4 /* Sources */,
E422DFB41CEF894F0047D7A4 /* Frameworks */,
E422DFB51CEF894F0047D7A4 /* Resources */,
C3CEF13F1FE8F4C000842A05,
A6606580A660405000000001,
);
buildRules = (
);
......@@ -180,19 +180,19 @@
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0800;
LastUpgradeCheck = 1000;
LastUpgradeCheck = 1100;
ORGANIZATIONNAME = Apple;
TargetAttributes = {
E422DFB61CEF894F0047D7A4 = {
CreatedOnToolsVersion = 8.0;
LastSwiftMigration = 0900;
LastSwiftMigration = 1100;
ProvisioningStyle = Automatic;
};
};
};
buildConfigurationList = E422DFB21CEF894F0047D7A4 /* Build configuration list for PBXProject "TrueDepthStreamer" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
......@@ -266,10 +266,11 @@
/* Begin XCBuildConfiguration section */
E422DFC71CEF894F0047D7A4 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 26C3826026C380D000000001 /* SampleCode.xcconfig */;
baseConfigurationReference = CCE67AC0CCE54C5000000001 /* SampleCode.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPRESSION = lossless;
CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
......@@ -327,10 +328,11 @@
};
E422DFC81CEF894F0047D7A4 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 26C3826026C380D000000001 /* SampleCode.xcconfig */;
baseConfigurationReference = CCE67AC0CCE54C5000000001 /* SampleCode.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPRESSION = "respect-asset-catalog";
CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
......@@ -382,7 +384,7 @@
};
E422DFCA1CEF894F0047D7A4 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 26C3826026C380D000000001 /* SampleCode.xcconfig */;
baseConfigurationReference = CCE67AC0CCE54C5000000001 /* SampleCode.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
......@@ -401,14 +403,14 @@
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_OBJC_BRIDGING_HEADER = "TrueDepthStreamer/TrueDepthStreamer-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 4.2;
SWIFT_VERSION = 5.0;
TOOLCHAINS = default;
};
name = Debug;
};
E422DFCB1CEF894F0047D7A4 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 26C3826026C380D000000001 /* SampleCode.xcconfig */;
baseConfigurationReference = CCE67AC0CCE54C5000000001 /* SampleCode.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
......@@ -425,7 +427,7 @@
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_OBJC_BRIDGING_HEADER = "TrueDepthStreamer/TrueDepthStreamer-Bridging-Header.h";
SWIFT_VERSION = 4.2;
SWIFT_VERSION = 5.0;
TOOLCHAINS = default;
};
name = Release;
......
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>BuildSystemType</key>
<string>Latest</string>
</dict>
</plist>
......@@ -716,6 +716,9 @@ class CameraViewController: UIViewController, AVCaptureDataOutputSynchronizerDel
DispatchQueue.main.async {
self.touchDepth.text = ""
}
@unknown default:
print("Unknow gesture state.")
touchDetected = false
}
}
......@@ -944,6 +947,8 @@ extension PreviewMetalView.Rotation {
default: return nil
}
@unknown default:
fatalError("Unknown orientation. Can't continue.")
}
}
}
......
......@@ -28,13 +28,13 @@ kernel void depthToJET(texture2d<float, access::read> inputTexture [[ text
constant BGRAPixel *colorTable [[ buffer(2) ]],
uint2 gid [[ thread_position_in_grid ]])
{
// Ensure we don't read or write outside of the texture
if ((gid.x >= inputTexture.get_width()) || (gid.y >= inputTexture.get_height())) {
return;
}
// Ensure we don't read or write outside of the texture
if ((gid.x >= inputTexture.get_width()) || (gid.y >= inputTexture.get_height())) {
return;
}
// depthDataType is kCVPixelFormatType_DepthFloat16
float depth = inputTexture.read(gid).x;
float depth = inputTexture.read(gid).x;
ushort histIndex = (ushort)(depth * params.binningFactor);
......@@ -46,6 +46,6 @@ kernel void depthToJET(texture2d<float, access::read> inputTexture [[ text
float colorIndex = histogram[histIndex];
BGRAPixel outputColor = colorTable[(int)colorIndex];
outputTexture.write(float4(outputColor.r / 255.0, outputColor.g / 255.0, outputColor.b / 255.0, 1.0), gid);
}
......@@ -19,31 +19,31 @@ struct mixerParameters
float mixFactor;
};
vertex VertexIO vertexMixer(device float2 *pPosition [[ buffer(0) ]],
uint index [[ vertex_id ]])
vertex VertexIO vertexMixer(const device float2 *pPosition [[ buffer(0) ]],
uint index [[ vertex_id ]])
{
VertexIO outVertex;
outVertex.position.xy = pPosition[index];
outVertex.position.z = 0;
outVertex.position.w = 1.0;
// Convert texture position to texture coordinates
// Convert texture position to texture coordinates
outVertex.textureCoord.xy = 0.5 + float2(0.5, -0.5) * outVertex.position.xy;
return outVertex;
}
fragment half4 fragmentMixer(VertexIO inputFragment [[ stage_in ]],
texture2d<half> mixerInput0 [[ texture(0) ]],
texture2d<half> mixerInput1 [[ texture(1) ]],
const device mixerParameters& mixerParameters [[ buffer(0) ]],
sampler samplr [[ sampler(0) ]])
texture2d<half> mixerInput0 [[ texture(0) ]],
texture2d<half> mixerInput1 [[ texture(1) ]],
const device mixerParameters& mixerParameters [[ buffer(0) ]],
sampler samplr [[ sampler(0) ]])
{
half4 input0 = mixerInput0.sample(samplr, inputFragment.textureCoord);
half4 input1 = mixerInput1.sample(samplr, inputFragment.textureCoord);
half4 output = mix(input0, input1, half(mixerParameters.mixFactor));
half4 output = mix(input0, input1, half(mixerParameters.mixFactor));
return output;
}
......@@ -11,27 +11,27 @@ using namespace metal;
// Vertex input/output structure for passing results from vertex shader to fragment shader
struct VertexIO
{
float4 position [[position]];
float2 textureCoord [[user(texturecoord)]];
float4 position [[position]];
float2 textureCoord [[user(texturecoord)]];
};
// Vertex shader for a textured quad
vertex VertexIO vertexPassThrough(device packed_float4 *pPosition [[ buffer(0) ]],
device packed_float2 *pTexCoords [[ buffer(1) ]],
uint vid [[ vertex_id ]])
vertex VertexIO vertexPassThrough(const device packed_float4 *pPosition [[ buffer(0) ]],
const device packed_float2 *pTexCoords [[ buffer(1) ]],
uint vid [[ vertex_id ]])
{
VertexIO outVertex;
outVertex.position = pPosition[vid];
outVertex.textureCoord = pTexCoords[vid];
return outVertex;
VertexIO outVertex;
outVertex.position = pPosition[vid];
outVertex.textureCoord = pTexCoords[vid];
return outVertex;
}
// Fragment shader for a textured quad
fragment half4 fragmentPassThrough(VertexIO inputFragment [[ stage_in ]],
texture2d<half> inputTexture [[ texture(0) ]],
sampler samplr [[ sampler(0) ]])
texture2d<half> inputTexture [[ texture(0) ]],
sampler samplr [[ sampler(0) ]])
{
return inputTexture.sample(samplr, inputFragment.textureCoord);
return inputTexture.sample(samplr, inputFragment.textureCoord);
}
......@@ -32,7 +32,7 @@ vertexShaderPoints(uint vertexID [[ vertex_id ]],
// depthDataType is kCVPixelFormatType_DepthFloat16
float depth = depthTexture.read(pos).x * 1000.0f;
float xrw = (pos.x - cameraIntrinsics[2][0]) * depth / cameraIntrinsics[0][0];
float yrw = (pos.y - cameraIntrinsics[2][1]) * depth / cameraIntrinsics[1][1];
......@@ -55,7 +55,7 @@ fragment float4 fragmentShaderPoints(RasterizerDataColor in [[stage_in]],
{
constexpr sampler textureSampler (mag_filter::linear,
min_filter::linear);
const float4 colorSample = colorTexture.sample (textureSampler, in.coor);
return colorSample;
}
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment