diff --git a/Examples/Examples.xcodeproj/project.pbxproj b/Examples/Examples.xcodeproj/project.pbxproj index 736916367..ea9fb25ea 100644 --- a/Examples/Examples.xcodeproj/project.pbxproj +++ b/Examples/Examples.xcodeproj/project.pbxproj @@ -7,6 +7,7 @@ objects = { /* Begin PBXBuildFile section */ + 1C40F3322B46118800C00ED5 /* WorldScaleGeoTrackingExampleView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C40F3312B46118800C00ED5 /* WorldScaleGeoTrackingExampleView.swift */; }; 1CC376D42ABA0B3700A83300 /* TableTopExampleView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1CC376D32ABA0B3700A83300 /* TableTopExampleView.swift */; }; 4D19FCB52881C8F3002601E8 /* PopupExampleView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4D19FCB42881C8F3002601E8 /* PopupExampleView.swift */; }; 75230DAE28614369009AF501 /* UtilityNetworkTraceExampleView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 75230DAD28614369009AF501 /* UtilityNetworkTraceExampleView.swift */; }; @@ -46,6 +47,7 @@ /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ + 1C40F3312B46118800C00ED5 /* WorldScaleGeoTrackingExampleView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorldScaleGeoTrackingExampleView.swift; sourceTree = ""; }; 1CC376D32ABA0B3700A83300 /* TableTopExampleView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TableTopExampleView.swift; sourceTree = ""; }; 4D19FCB42881C8F3002601E8 /* PopupExampleView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PopupExampleView.swift; sourceTree = ""; }; 75230DAD28614369009AF501 /* UtilityNetworkTraceExampleView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UtilityNetworkTraceExampleView.swift; sourceTree = ""; }; @@ -103,13 +105,14 @@ 75657E4727ABAC8400EE865B /* CompassExampleView.swift */, E4AA9315276BF5ED000E6289 /* FloatingPanelExampleView.swift */, E4624A24278CE815000D2A38 /* FloorFilterExampleView.swift */, + 882899FC2AB5099300A0BDC1 /* FlyoverExampleView.swift */, E4F9BC98265EFCAF001280FF /* OverviewMapExampleView.swift */, 4D19FCB42881C8F3002601E8 /* PopupExampleView.swift */, 75D41B2A27C6F21400624D7C /* ScalebarExampleView.swift */, E42BFBE82672BF9500159107 /* SearchExampleView.swift */, - 75230DAD28614369009AF501 /* UtilityNetworkTraceExampleView.swift */, - 882899FC2AB5099300A0BDC1 /* FlyoverExampleView.swift */, 1CC376D32ABA0B3700A83300 /* TableTopExampleView.swift */, + 75230DAD28614369009AF501 /* UtilityNetworkTraceExampleView.swift */, + 1C40F3312B46118800C00ED5 /* WorldScaleGeoTrackingExampleView.swift */, ); name = Examples; sourceTree = ""; @@ -270,6 +273,7 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 1C40F3322B46118800C00ED5 /* WorldScaleGeoTrackingExampleView.swift in Sources */, 1CC376D42ABA0B3700A83300 /* TableTopExampleView.swift in Sources */, 752A4FC4294D268000A49479 /* MapDataModel.swift in Sources */, 752A4FC5294D268000A49479 /* SceneDataModel.swift in Sources */, diff --git a/Examples/Examples/WorldScaleGeoTrackingExampleView.swift b/Examples/Examples/WorldScaleGeoTrackingExampleView.swift new file mode 100644 index 000000000..5144353ac --- /dev/null +++ b/Examples/Examples/WorldScaleGeoTrackingExampleView.swift @@ -0,0 +1,101 @@ +// Copyright 2023 Esri +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI +import ArcGIS +import ArcGISToolkit +import CoreLocation + +/// An example that utilizes the `WorldScaleGeoTrackingSceneView` to show an augmented reality view +/// of your current location. Because this is an example that can be run from anywhere, +/// it places a red circle around your initial location which can be explored. +struct WorldScaleGeoTrackingExampleView: View { + @State private var scene: ArcGIS.Scene = { + // Creates an elevation source from Terrain3D REST service. + let elevationServiceURL = URL(string: "https://elevation3d.arcgis.com/arcgis/rest/services/WorldElevation3D/Terrain3D/ImageServer")! + let elevationSource = ArcGISTiledElevationSource(url: elevationServiceURL) + let surface = Surface() + surface.addElevationSource(elevationSource) + let scene = Scene() + scene.baseSurface = surface + scene.baseSurface.backgroundGrid.isVisible = false + scene.baseSurface.navigationConstraint = .unconstrained + scene.basemap = Basemap(style: .arcGISImagery) + scene.addOperationalLayer(parcelsLayer) + return scene + }() + + /// Basemap opacity. + @State private var opacity: Float = 1 + /// Graphics overlay to show a graphic around your initial location. + @State private var graphicsOverlay = GraphicsOverlay() + /// The location datasource that is used to access the device location. + @State private var locationDatasSource = SystemLocationDataSource() + + static var parcelsLayer: FeatureLayer { + let parcelsTable = ServiceFeatureTable(url: URL(string: "https://services.arcgis.com/aA3snZwJfFkVyDuP/ArcGIS/rest/services/Parcels_for_San_Bernardino_County/FeatureServer/0")!) + let featureLayer = FeatureLayer(featureTable: parcelsTable) + featureLayer.renderer = SimpleRenderer(symbol: SimpleLineSymbol(color: .cyan, width: 3)) + return featureLayer + } + + var body: some View { + VStack { + WorldScaleGeoTrackingSceneView { proxy in + SceneView(scene: scene, graphicsOverlays: [graphicsOverlay]) + .onSingleTapGesture { screen, _ in + print("Identifying...") + Task.detached { + let results = try await proxy.identifyLayers(screenPoint: screen, tolerance: 20) + print("\(results.count) identify result(s).") + } + } + } + // A slider to adjust the basemap opacity. + Slider(value: $opacity, in: 0...1.0) + .padding(.horizontal) + } + .onChange(of: opacity) { opacity in + guard let basemap = scene.basemap else { return } + for layer in basemap.baseLayers { + layer.opacity = opacity + } + } + .task { + // Request when-in-use location authorization. + // This is necessary for 2 reasons: + // 1. Because we use location datasource to get the initial location in this example + // in order to display a ring around the initial location. + // 2. Because the `WorldScaleSceneView` utilizes a location datasource and that + // datasource will not start until authorized. + let locationManager = CLLocationManager() + if locationManager.authorizationStatus == .notDetermined { + locationManager.requestWhenInUseAuthorization() + } + + do { + try await locationDatasSource.start() + } catch { + print("Failed to start location datasource: \(error.localizedDescription)") + } + + // Retrieve initial location + guard let initialLocation = await locationDatasSource.locations.first(where: { _ in true }) else { return } + + // Put a circle graphic around the initial location + let circle = GeometryEngine.geodeticBuffer(around: initialLocation.position, distance: 20, distanceUnit: .meters, maxDeviation: 1, curveType: .geodesic) + graphicsOverlay.addGraphic(Graphic(geometry: circle, symbol: SimpleLineSymbol(color: .red, width: 3))) + } + } +} diff --git a/Examples/ExamplesApp/Examples.swift b/Examples/ExamplesApp/Examples.swift index f5b6875da..cfbf37174 100644 --- a/Examples/ExamplesApp/Examples.swift +++ b/Examples/ExamplesApp/Examples.swift @@ -37,7 +37,8 @@ extension ExampleList { name: "Augmented Reality", examples: [ AnyExample("Flyover", content: FlyoverExampleView()), - AnyExample("Tabletop", content: TableTopExampleView()) + AnyExample("Tabletop", content: TableTopExampleView()), + AnyExample("World Scale", content: WorldScaleGeoTrackingExampleView()) ] ) diff --git a/Examples/ExamplesApp/Info.plist b/Examples/ExamplesApp/Info.plist index e53625c32..3b7d06a10 100644 --- a/Examples/ExamplesApp/Info.plist +++ b/Examples/ExamplesApp/Info.plist @@ -2,8 +2,8 @@ - NSCameraUsageDescription - This app uses augmented reality to overlay imagery over your real-world environment. Camera access is required for this functionality. + NSLocationWhenInUseUsageDescription + This app uses your location to anchor virtual objects to real-world locations in an augmented reality experience. CFBundleDevelopmentRegion $(DEVELOPMENT_LANGUAGE) CFBundleExecutable @@ -59,6 +59,8 @@ 1 LSRequiresIPhoneOS + NSCameraUsageDescription + This app uses augmented reality to overlay imagery over your real-world environment. Camera access is required for this functionality. UIApplicationSceneManifest UIApplicationSupportsMultipleScenes diff --git a/Sources/ArcGISToolkit/Components/Augmented Reality/TableTopSceneView.swift b/Sources/ArcGISToolkit/Components/Augmented Reality/TableTopSceneView.swift index 54a4862f8..22e71b36c 100644 --- a/Sources/ArcGISToolkit/Components/Augmented Reality/TableTopSceneView.swift +++ b/Sources/ArcGISToolkit/Components/Augmented Reality/TableTopSceneView.swift @@ -304,26 +304,6 @@ private extension SceneViewProxy { return initialTransformation } - - /// Sets the field of view for the scene view's camera for a given augmented reality frame. - /// - Parameters: - /// - frame: The current AR frame. - /// - orientation: The interface orientation. - func setFieldOfView(for frame: ARFrame, orientation: InterfaceOrientation) { - let camera = frame.camera - let intrinsics = camera.intrinsics - let imageResolution = camera.imageResolution - - setFieldOfViewFromLensIntrinsics( - xFocalLength: intrinsics[0][0], - yFocalLength: intrinsics[1][1], - xPrincipal: intrinsics[2][0], - yPrincipal: intrinsics[2][1], - xImageSize: Float(imageResolution.width), - yImageSize: Float(imageResolution.height), - interfaceOrientation: orientation - ) - } } private extension String { diff --git a/Sources/ArcGISToolkit/Components/Augmented Reality/WorldScaleGeoTrackingSceneView.swift b/Sources/ArcGISToolkit/Components/Augmented Reality/WorldScaleGeoTrackingSceneView.swift new file mode 100644 index 000000000..10301bb9e --- /dev/null +++ b/Sources/ArcGISToolkit/Components/Augmented Reality/WorldScaleGeoTrackingSceneView.swift @@ -0,0 +1,298 @@ +// Copyright 2023 Esri +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ARKit +import SwiftUI +import ArcGIS + +/// A scene view that provides an augmented reality world scale experience using geotracking. +public struct WorldScaleGeoTrackingSceneView: View { + /// The proxy for the ARSwiftUIView. + @State private var arViewProxy = ARSwiftUIViewProxy() + /// The camera controller that will be set on the scene view. + @State private var cameraController: TransformationMatrixCameraController + /// The current interface orientation. + @State private var interfaceOrientation: InterfaceOrientation? + /// The location datasource that is used to access the device location. + @State private var locationDataSource: LocationDataSource + /// A Boolean value indicating if the camera was initially set. + @State private var initialCameraIsSet = false + /// A Boolean value that indicates whether to hide the coaching overlay view. + private var coachingOverlayIsHidden: Bool = false + /// A Boolean value that indicates whether the coaching overlay view is active. + @State private var coachingOverlayIsActive: Bool = true + /// The current camera of the scene view. + @State private var currentCamera: Camera? + /// A Boolean value that indicates whether the calibration view is hidden. + private var calibrationViewIsHidden: Bool = false + /// The calibrated camera heading. + @State private var calibrationHeading: Double? + /// The closure that builds the scene view. + private let sceneViewBuilder: (SceneViewProxy) -> SceneView + /// The configuration for the AR session. + private let configuration: ARConfiguration + /// The timestamp of the last received location. + @State private var lastLocationTimestamp: Date? + /// The current device location. + @State private var currentLocation: Location? + /// The current device heading. + @State private var currentHeading: Double? + /// The valid accuracy threshold for a location in meters. + private var validAccuracyThreshold: Double = 0 + + /// Creates a world scale scene view. + /// - Parameters: + /// - locationDataSource: The location datasource used to acquire the device's location. + /// - clippingDistance: Determines the clipping distance in meters around the camera. A value + /// of `nil` means that no data will be clipped. + /// - sceneView: A closure that builds the scene view to be overlayed on top of the + /// augmented reality video feed. + /// - Remark: The provided scene view will have certain properties overridden in order to + /// be effectively viewed in augmented reality. Properties such as the camera controller, + /// and view drawing mode. + public init( + locationDataSource: LocationDataSource = SystemLocationDataSource(), + clippingDistance: Double? = nil, + @ViewBuilder sceneView: @escaping (SceneViewProxy) -> SceneView + ) { + self.sceneViewBuilder = sceneView + + let cameraController = TransformationMatrixCameraController() + cameraController.translationFactor = 1 + cameraController.clippingDistance = clippingDistance + _cameraController = .init(initialValue: cameraController) + + if ARGeoTrackingConfiguration.isSupported { + configuration = ARGeoTrackingConfiguration() + } else { + configuration = ARWorldTrackingConfiguration() + configuration.worldAlignment = .gravityAndHeading + } + + _locationDataSource = .init(initialValue: locationDataSource) + } + + public var body: some View { + SceneViewReader { sceneViewProxy in + ZStack { + ARSwiftUIView(proxy: arViewProxy) + .onDidUpdateFrame { _, frame in + guard let interfaceOrientation, initialCameraIsSet else { return } + + sceneViewProxy.updateCamera( + frame: frame, + cameraController: cameraController, + orientation: interfaceOrientation, + initialTransformation: .identity + ) + sceneViewProxy.setFieldOfView( + for: frame, + orientation: interfaceOrientation + ) + } + + if initialCameraIsSet { + sceneViewBuilder(sceneViewProxy) + .cameraController(cameraController) + .attributionBarHidden(true) + .spaceEffect(.transparent) + .atmosphereEffect(.off) + .interactiveNavigationDisabled(true) + .onCameraChanged { camera in + self.currentCamera = camera + } + } + + if !coachingOverlayIsHidden { + ARCoachingOverlay(goal: .geoTracking) + .sessionProvider(arViewProxy) + .active(coachingOverlayIsActive) + .allowsHitTesting(false) + } + } + } + .observingInterfaceOrientation($interfaceOrientation) + .onAppear { + arViewProxy.session.run(configuration) + } + .onDisappear { + arViewProxy.session.pause() + Task { await locationDataSource.stop() } + } + .task { + do { + try await locationDataSource.start() + await withTaskGroup(of: Void.self) { group in + group.addTask { + for await location in locationDataSource.locations { + self.lastLocationTimestamp = location.timestamp + self.currentLocation = location + if let heading = currentHeading { + await updateSceneView(for: location, heading: heading) + } + } + } + group.addTask { + for await heading in locationDataSource.headings { + self.currentHeading = heading + if let location = self.currentLocation { + await updateSceneView(for: location, heading: heading) + } + } + } + } + } catch {} + } + .toolbar { + ToolbarItem(placement: .bottomBar) { + if !calibrationViewIsHidden { + calibrationView + } + } + } + .overlay(alignment: .top) { + accuracyView + .multilineTextAlignment(.center) + .frame(maxWidth: .infinity, alignment: .center) + .padding(8) + .background(.regularMaterial, ignoresSafeAreaEdges: .horizontal) + } + } + + /// If necessary, updates the scene view's camera controller for a new location coming + /// from the location datasource. + @MainActor + private func updateSceneView(for location: Location, heading: Double) { + // Do not use cached location more than 10 seconds old. + guard abs(lastLocationTimestamp?.timeIntervalSinceNow ?? 0) < 10 else { return } + + // Make sure that horizontal and vertical accuracy are valid. + guard location.horizontalAccuracy > validAccuracyThreshold, + location.verticalAccuracy > validAccuracyThreshold else { return } + + // Make sure either the initial camera is not set, or we need to update the camera. + guard (!initialCameraIsSet || shouldUpdateCamera(for: location)) else { return } + + // Add some of the vertical accuracy to the z value of the position, that way if the + // GPS location is not accurate, we won't end up below the earth's surface. + let altitude = (location.position.z ?? 0) + location.verticalAccuracy + + cameraController.originCamera = Camera( + latitude: location.position.y, + longitude: location.position.x, + altitude: altitude, + heading: calibrationHeading ?? heading, + pitch: 90, + roll: 0 + ) + + // We have to do this or the error gets bigger and bigger. + cameraController.transformationMatrix = .identity + arViewProxy.session.run(configuration, options: .resetTracking) + + // If initial camera is not set, then we set it the flag here to true + // and set the status text to empty. + if !initialCameraIsSet { + coachingOverlayIsActive = false + withAnimation { + initialCameraIsSet = true + } + } + } + + /// Returns a Boolean value indicating if the camera should be updated for a location + /// coming in from the location datasource. + func shouldUpdateCamera(for location: Location) -> Bool { + // Do not update unless the horizontal accuracy is less than a threshold. + guard let currentCamera, + let spatialReference = currentCamera.location.spatialReference, + let currentPosition = GeometryEngine.project(location.position, into: spatialReference) + else { return false } + + // Measure the distance between the location datasource's reported location + // and the camera's current location. + guard let result = GeometryEngine.geodeticDistance( + from: currentCamera.location, + to: currentPosition, + distanceUnit: .meters, + azimuthUnit: nil, + curveType: .geodesic + ) else { + return false + } + + // If the location becomes off by over a certain threshold, then update the camera location. + let threshold = 2.0 + return result.distance.value > threshold ? true : false + } + + /// Sets the visibility of the coaching overlay view for the AR experience. + /// - Parameter hidden: A Boolean value that indicates whether to hide the + /// coaching overlay view. + public func coachingOverlayHidden(_ hidden: Bool) -> Self { + var view = self + view.coachingOverlayIsHidden = hidden + return view + } + + /// Sets the visibility of the calibration view for the AR experience. + /// - Parameter hidden: A Boolean value that indicates whether to hide the + /// calibration view. + public func calibrationViewHidden(_ hidden: Bool) -> Self { + var view = self + view.calibrationViewIsHidden = hidden + return view + } + + /// Updates the heading of the scene view camera controller. + /// - Parameter heading: The camera heading. + func updateHeading(_ heading: Double) { + cameraController.originCamera = cameraController.originCamera.rotatedTo( + heading: heading, + pitch: cameraController.originCamera.pitch, + roll: cameraController.originCamera.roll + ) + } + + var calibrationView: some View { + HStack { + Button { + let heading = cameraController.originCamera.heading + 1 + updateHeading(heading) + calibrationHeading = heading + } label: { + Image(systemName: "plus") + } + + Text("heading: \(calibrationHeading?.rounded() ?? cameraController.originCamera.heading.rounded(.towardZero), format: .number)") + + Button { + let heading = cameraController.originCamera.heading - 1 + updateHeading(heading) + calibrationHeading = heading + } label: { + Image(systemName: "minus") + } + } + } + + var accuracyView: some View { + VStack { + if let currentLocation { + Text("horizontalAccuracy: \(currentLocation.horizontalAccuracy, format: .number)") + Text("verticalAccuracy: \(currentLocation.verticalAccuracy, format: .number)") + } + } + } +} diff --git a/Sources/ArcGISToolkit/Extensions/ArcGIS/SceneViewProxy.swift b/Sources/ArcGISToolkit/Extensions/ArcGIS/SceneViewProxy.swift index 6d8f862ad..d669b030c 100644 --- a/Sources/ArcGISToolkit/Extensions/ArcGIS/SceneViewProxy.swift +++ b/Sources/ArcGISToolkit/Extensions/ArcGIS/SceneViewProxy.swift @@ -45,6 +45,26 @@ extension SceneViewProxy { cameraController.transformationMatrix = transformationMatrix } } + + /// Sets the field of view for the scene view's camera for a given augmented reality frame. + /// - Parameters: + /// - frame: The current AR frame. + /// - orientation: The interface orientation. + func setFieldOfView(for frame: ARFrame, orientation: InterfaceOrientation) { + let camera = frame.camera + let intrinsics = camera.intrinsics + let imageResolution = camera.imageResolution + + setFieldOfViewFromLensIntrinsics( + xFocalLength: intrinsics[0][0], + yFocalLength: intrinsics[1][1], + xPrincipal: intrinsics[2][0], + yPrincipal: intrinsics[2][1], + xImageSize: Float(imageResolution.width), + yImageSize: Float(imageResolution.height), + interfaceOrientation: orientation + ) + } } private extension ARCamera {