From a7915e031c10993603dd3dd92819dca72343b818 Mon Sep 17 00:00:00 2001 From: Martin Date: Thu, 26 Jun 2025 16:24:11 -0700 Subject: [PATCH 1/4] Added feature to share scans via ios Share Sheet --- StrayScanner.xcodeproj/project.pbxproj | 6 ++ StrayScanner/Helpers/ShareUtility.swift | 60 ++++++++++++++++ StrayScanner/Views/InformationView.swift | 11 +-- StrayScanner/Views/SessionDetail.swift | 92 ++++++++++++++++++++++-- 4 files changed, 160 insertions(+), 9 deletions(-) create mode 100644 StrayScanner/Helpers/ShareUtility.swift diff --git a/StrayScanner.xcodeproj/project.pbxproj b/StrayScanner.xcodeproj/project.pbxproj index e8258ae..af99333 100644 --- a/StrayScanner.xcodeproj/project.pbxproj +++ b/StrayScanner.xcodeproj/project.pbxproj @@ -66,6 +66,8 @@ 38E969CB2572608E00054CC4 /* NewSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 38E969CA2572608E00054CC4 /* NewSession.swift */; }; 38FB730F2572A9FA007D9CB0 /* RecordSessionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 38FB730E2572A9FA007D9CB0 /* RecordSessionViewController.swift */; }; 38FB73162572AF63007D9CB0 /* Shaders.metal in Sources */ = {isa = PBXBuildFile; fileRef = 38FB73152572AF63007D9CB0 /* Shaders.metal */; }; + 709259192E0B79AB00A7B62E /* ShareUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 709259182E0B79AB00A7B62E /* ShareUtility.swift */; }; + 7092591A2E0B79AB00A7B62E /* ShareUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 709259182E0B79AB00A7B62E /* ShareUtility.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -119,6 +121,7 @@ 38FB73152572AF63007D9CB0 /* Shaders.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = Shaders.metal; sourceTree = ""; }; 38FB731F2573EABB007D9CB0 /* ShaderTypes.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ShaderTypes.h; sourceTree = ""; }; 38FB73242573ECE2007D9CB0 /* BridgeHeader.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = BridgeHeader.h; path = StrayScanner/BridgeHeader.h; sourceTree = ""; }; + 709259182E0B79AB00A7B62E /* ShareUtility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShareUtility.swift; sourceTree = ""; }; AC52749A3B5AED09C9753120 /* Pods-StrayScanner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-StrayScanner.debug.xcconfig"; path = "Target Support Files/Pods-StrayScanner/Pods-StrayScanner.debug.xcconfig"; sourceTree = ""; }; DBC054EDDA47FB8A717AA671 /* Pods_StrayScanner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_StrayScanner.framework; sourceTree = BUILT_PRODUCTS_DIR; }; /* End PBXFileReference section */ @@ -244,6 +247,7 @@ 38694C6B26D2C66F00546EA1 /* Helpers */ = { isa = PBXGroup; children = ( + 709259182E0B79AB00A7B62E /* ShareUtility.swift */, 38694C6C26D2C66F00546EA1 /* DatasetEncoder.swift */, 38694C6D26D2C66F00546EA1 /* ConfidenceEncoder.swift */, 38694C6E26D2C66F00546EA1 /* VideoEncoder.swift */, @@ -513,6 +517,7 @@ 38694C7626D2C66F00546EA1 /* ConfidenceEncoder.swift in Sources */, 386E277825991163007D023B /* RecordButton.swift in Sources */, 385999BF25616F2B00F3F681 /* SceneDelegate.swift in Sources */, + 709259192E0B79AB00A7B62E /* ShareUtility.swift in Sources */, 38694C8026D2C66F00546EA1 /* DepthEncoder.swift in Sources */, 38C17B13259BE1DA006B3FDA /* Recording+CoreDataProperties.swift in Sources */, 38C17B12259BE1DA006B3FDA /* Recording+CoreDataClass.swift in Sources */, @@ -543,6 +548,7 @@ 3863FE6A25FCBB0E00C1DA4F /* RecordButton.swift in Sources */, 3807421027BBD07D003194C1 /* PngEncoder.mm in Sources */, 3863FE6C25FCBB0E00C1DA4F /* SceneDelegate.swift in Sources */, + 7092591A2E0B79AB00A7B62E /* ShareUtility.swift in Sources */, 38694C8126D2C66F00546EA1 /* DepthEncoder.swift in Sources */, 3863FE6E25FCBB0E00C1DA4F /* Recording+CoreDataProperties.swift in Sources */, 3863FE6F25FCBB0E00C1DA4F /* Recording+CoreDataClass.swift in Sources */, diff --git a/StrayScanner/Helpers/ShareUtility.swift b/StrayScanner/Helpers/ShareUtility.swift new file mode 100644 index 0000000..8a35c27 --- /dev/null +++ b/StrayScanner/Helpers/ShareUtility.swift @@ -0,0 +1,60 @@ +// +// ShareUtility.swift +// StrayScanner +// +// Created by Claude on 6/24/25. +// + +import Foundation +import Compression + +/// Utility class for creating shareable archives from recording datasets +class ShareUtility { + + /// Creates a shareable ZIP archive from a recording's dataset + /// - Parameter recording: The recording to create a ZIP archive for + /// - Returns: URL of the created ZIP file + static func createShareableArchive(for recording: Recording) async throws -> URL { + guard let sourceDirectory = recording.directoryPath() else { + throw NSError(domain: "ShareError", code: 1, userInfo: [NSLocalizedDescriptionKey: "Unable to get recording directory path"]) + } + + let tempDirectory = FileManager.default.temporaryDirectory + let archiveName = "\(recording.name ?? "Recording")_\(recording.id?.uuidString.prefix(8) ?? "unknown").zip" + let archiveURL = tempDirectory.appendingPathComponent(archiveName) + + // Remove existing archive if it exists + try? FileManager.default.removeItem(at: archiveURL) + + return try await withCheckedThrowingContinuation { continuation in + DispatchQueue.global(qos: .userInitiated).async { + do { + try createZipArchive(sourceDirectory: sourceDirectory, destinationURL: archiveURL) + continuation.resume(returning: archiveURL) + } catch { + continuation.resume(throwing: error) + } + } + } + } + + private static func createZipArchive(sourceDirectory: URL, destinationURL: URL) throws { + let coordinator = NSFileCoordinator() + var error: NSError? + + coordinator.coordinate(readingItemAt: sourceDirectory, options: [.forUploading], error: &error) { (zipURL) in + do { + _ = zipURL.startAccessingSecurityScopedResource() + defer { zipURL.stopAccessingSecurityScopedResource() } + + try FileManager.default.copyItem(at: zipURL, to: destinationURL) + } catch { + print("Failed to create zip: \(error)") + } + } + + if let error = error { + throw error + } + } +} diff --git a/StrayScanner/Views/InformationView.swift b/StrayScanner/Views/InformationView.swift index ca25c53..2169bec 100644 --- a/StrayScanner/Views/InformationView.swift +++ b/StrayScanner/Views/InformationView.swift @@ -26,13 +26,16 @@ This app lets you record video and depth datasets using the camera and LIDAR sca heading("Transfering Datasets To Your Desktop Computer") bodyText(""" -The recorded datasets can be exported by connecting your device to it with the lightning cable. +The recorded datasets can be exported in several ways: -On Mac, you can access the files through Finder. In the sidebar, select your device. Under the "Files" tab, you should see an entry for Stray Scanner. Expand it, then drag the folders to the desired location. There is one folder per dataset, each named after a random alphanumerical hash. +1. Share directly from the app: Tap the "Share" button inside any recording to export it as a TAR archive via AirDrop, email, or save to Files. -On Windows, you can access the files through iTunes. +2. Connect your device with the lightning cable: + • On Mac, access files through Finder sidebar > your device > "Files" tab > Stray Scanner + • On Windows, access files through iTunes + • Drag folders to your desired location (one folder per dataset) -Alternatively, you can access the data in the Files app under "Browse > On My iPhone > Stray Scanner" and export them to another app or move them to your iCloud drive. +3. Use the Files app: Browse > On My iPhone > Stray Scanner, then export to another app or iCloud drive. """) } Group { diff --git a/StrayScanner/Views/SessionDetail.swift b/StrayScanner/Views/SessionDetail.swift index 2723554..4144f64 100644 --- a/StrayScanner/Views/SessionDetail.swift +++ b/StrayScanner/Views/SessionDetail.swift @@ -9,6 +9,7 @@ import SwiftUI import AVKit import CoreData +import Foundation class SessionDetailViewModel: ObservableObject { private var dataContext: NSManagedObjectContext? @@ -45,6 +46,10 @@ struct SessionDetailView: View { @ObservedObject var viewModel = SessionDetailViewModel() var recording: Recording @Environment(\.presentationMode) var presentationMode: Binding + @State private var showingShareSheet = false + @State private var tempPackageURL: URL? + @State private var isCreatingPackage = false + @State private var player: AVPlayer? let defaultUrl = URL(fileURLWithPath: "") @@ -55,16 +60,52 @@ struct SessionDetailView: View { Color("BackgroundColor") .edgesIgnoringSafeArea(.all) VStack { - let player = AVPlayer(url: recording.absoluteRgbPath() ?? defaultUrl) - VideoPlayer(player: player) + VideoPlayer(player: player ?? AVPlayer(url: defaultUrl)) .frame(width: width, height: height) .padding(.horizontal, 0.0) - Button(action: deleteItem) { - Text("Delete").foregroundColor(Color("DangerColor")) + .onAppear { + if player == nil { + player = AVPlayer(url: recording.absoluteRgbPath() ?? defaultUrl) + } + } + + HStack(spacing: 20) { + Button(action: shareItem) { + HStack { + if isCreatingPackage { + ProgressView() + .scaleEffect(0.8) + } else { + Image(systemName: "square.and.arrow.up") + } + Text(isCreatingPackage ? "Preparing..." : "Share") + .fixedSize() + } + .foregroundColor(isCreatingPackage ? .gray : .blue) + .frame(minWidth: 100) + } + .disabled(isCreatingPackage) + + Button(action: deleteItem) { + Text("Delete").foregroundColor(Color("DangerColor")) + } } + .padding(.top, 20) } .navigationBarTitle(viewModel.title(recording: recording)) .background(Color("BackgroundColor")) + .sheet(isPresented: $showingShareSheet) { + if let packageURL = tempPackageURL { + ShareSheet(activityItems: [packageURL]) { activityType, completed, returnedItems, activityError in + DispatchQueue.main.async { + // Clean up temporary package after sharing + try? FileManager.default.removeItem(at: packageURL) + tempPackageURL = nil + showingShareSheet = false + } + } + } + } } } @@ -72,9 +113,50 @@ struct SessionDetailView: View { viewModel.delete(recording: recording) self.presentationMode.wrappedValue.dismiss() } + + func shareItem() { + isCreatingPackage = true + + Task { + do { + let packageURL = try await ShareUtility.createShareableArchive(for: recording) + await MainActor.run { + tempPackageURL = packageURL + isCreatingPackage = false + showingShareSheet = true + } + } catch { + await MainActor.run { + isCreatingPackage = false + print("Failed to create package: \(error)") + } + } + } + } } - +struct ShareSheet: UIViewControllerRepresentable { + let activityItems: [Any] + let applicationActivities: [UIActivity]? = nil + let completionWithItemsHandler: UIActivityViewController.CompletionWithItemsHandler? + + init(activityItems: [Any], completionHandler: UIActivityViewController.CompletionWithItemsHandler? = nil) { + self.activityItems = activityItems + self.completionWithItemsHandler = completionHandler + } + + func makeUIViewController(context: UIViewControllerRepresentableContext) -> UIActivityViewController { + let controller = UIActivityViewController( + activityItems: activityItems, + applicationActivities: applicationActivities + ) + controller.completionWithItemsHandler = completionWithItemsHandler + return controller + } + + func updateUIViewController(_ uiViewController: UIActivityViewController, context: UIViewControllerRepresentableContext) { + } +} struct SessionDetailView_Previews: PreviewProvider { static var recording: Recording = { () -> Recording in From f653e45f1dcc28075e99da6486b932922579db08 Mon Sep 17 00:00:00 2001 From: Martin Date: Thu, 26 Jun 2025 17:46:57 -0700 Subject: [PATCH 2/4] Minor project ref cleanup, corrected outdated info TAR->ZIP --- StrayScanner.xcodeproj/project.pbxproj | 2 -- StrayScanner/Views/InformationView.swift | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/StrayScanner.xcodeproj/project.pbxproj b/StrayScanner.xcodeproj/project.pbxproj index af99333..567c3c9 100644 --- a/StrayScanner.xcodeproj/project.pbxproj +++ b/StrayScanner.xcodeproj/project.pbxproj @@ -67,7 +67,6 @@ 38FB730F2572A9FA007D9CB0 /* RecordSessionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 38FB730E2572A9FA007D9CB0 /* RecordSessionViewController.swift */; }; 38FB73162572AF63007D9CB0 /* Shaders.metal in Sources */ = {isa = PBXBuildFile; fileRef = 38FB73152572AF63007D9CB0 /* Shaders.metal */; }; 709259192E0B79AB00A7B62E /* ShareUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 709259182E0B79AB00A7B62E /* ShareUtility.swift */; }; - 7092591A2E0B79AB00A7B62E /* ShareUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 709259182E0B79AB00A7B62E /* ShareUtility.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -548,7 +547,6 @@ 3863FE6A25FCBB0E00C1DA4F /* RecordButton.swift in Sources */, 3807421027BBD07D003194C1 /* PngEncoder.mm in Sources */, 3863FE6C25FCBB0E00C1DA4F /* SceneDelegate.swift in Sources */, - 7092591A2E0B79AB00A7B62E /* ShareUtility.swift in Sources */, 38694C8126D2C66F00546EA1 /* DepthEncoder.swift in Sources */, 3863FE6E25FCBB0E00C1DA4F /* Recording+CoreDataProperties.swift in Sources */, 3863FE6F25FCBB0E00C1DA4F /* Recording+CoreDataClass.swift in Sources */, diff --git a/StrayScanner/Views/InformationView.swift b/StrayScanner/Views/InformationView.swift index 2169bec..03eebfb 100644 --- a/StrayScanner/Views/InformationView.swift +++ b/StrayScanner/Views/InformationView.swift @@ -28,7 +28,7 @@ This app lets you record video and depth datasets using the camera and LIDAR sca bodyText(""" The recorded datasets can be exported in several ways: -1. Share directly from the app: Tap the "Share" button inside any recording to export it as a TAR archive via AirDrop, email, or save to Files. +1. Share directly from the app: Tap the "Share" button inside any recording to export it as a ZIP archive via AirDrop, email, or save to Files. 2. Connect your device with the lightning cable: • On Mac, access files through Finder sidebar > your device > "Files" tab > Stray Scanner From 0e39cc05d0108791b82083c7a8b50ca38375ea03 Mon Sep 17 00:00:00 2001 From: Martin Date: Thu, 26 Jun 2025 22:04:10 -0700 Subject: [PATCH 3/4] Adaptive recording mode (capture on pose threshold) and settings --- StrayScanner.xcodeproj/project.pbxproj | 18 ++++ .../Constants/SettingsConstants.swift | 22 +++++ .../RecordSessionViewController.swift | 28 ++++-- StrayScanner/Helpers/DatasetEncoder.swift | 71 ++++++++++++++ StrayScanner/Views/SessionList.swift | 13 +++ StrayScanner/Views/SettingsView.swift | 92 +++++++++++++++++++ 6 files changed, 237 insertions(+), 7 deletions(-) create mode 100644 StrayScanner/Constants/SettingsConstants.swift create mode 100644 StrayScanner/Views/SettingsView.swift diff --git a/StrayScanner.xcodeproj/project.pbxproj b/StrayScanner.xcodeproj/project.pbxproj index 567c3c9..1dd70f8 100644 --- a/StrayScanner.xcodeproj/project.pbxproj +++ b/StrayScanner.xcodeproj/project.pbxproj @@ -66,6 +66,8 @@ 38E969CB2572608E00054CC4 /* NewSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 38E969CA2572608E00054CC4 /* NewSession.swift */; }; 38FB730F2572A9FA007D9CB0 /* RecordSessionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 38FB730E2572A9FA007D9CB0 /* RecordSessionViewController.swift */; }; 38FB73162572AF63007D9CB0 /* Shaders.metal in Sources */ = {isa = PBXBuildFile; fileRef = 38FB73152572AF63007D9CB0 /* Shaders.metal */; }; + 70849EC02E0E46B1007E1315 /* SettingsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70849EBF2E0E46B1007E1315 /* SettingsView.swift */; }; + 70849EC42E0E4CE2007E1315 /* SettingsConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70849EC22E0E4CE2007E1315 /* SettingsConstants.swift */; }; 709259192E0B79AB00A7B62E /* ShareUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 709259182E0B79AB00A7B62E /* ShareUtility.swift */; }; /* End PBXBuildFile section */ @@ -120,6 +122,8 @@ 38FB73152572AF63007D9CB0 /* Shaders.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = Shaders.metal; sourceTree = ""; }; 38FB731F2573EABB007D9CB0 /* ShaderTypes.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ShaderTypes.h; sourceTree = ""; }; 38FB73242573ECE2007D9CB0 /* BridgeHeader.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = BridgeHeader.h; path = StrayScanner/BridgeHeader.h; sourceTree = ""; }; + 70849EBF2E0E46B1007E1315 /* SettingsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsView.swift; sourceTree = ""; }; + 70849EC22E0E4CE2007E1315 /* SettingsConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsConstants.swift; sourceTree = ""; }; 709259182E0B79AB00A7B62E /* ShareUtility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShareUtility.swift; sourceTree = ""; }; AC52749A3B5AED09C9753120 /* Pods-StrayScanner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-StrayScanner.debug.xcconfig"; path = "Target Support Files/Pods-StrayScanner/Pods-StrayScanner.debug.xcconfig"; sourceTree = ""; }; DBC054EDDA47FB8A717AA671 /* Pods_StrayScanner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_StrayScanner.framework; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -181,6 +185,7 @@ 38045D562561A6D20004F23C /* Views */ = { isa = PBXGroup; children = ( + 70849EBF2E0E46B1007E1315 /* SettingsView.swift */, 38045D572561A7370004F23C /* SessionRow.swift */, 38045D592561AC000004F23C /* SessionList.swift */, 38E969CA2572608E00054CC4 /* NewSession.swift */, @@ -218,6 +223,7 @@ children = ( 38694C6B26D2C66F00546EA1 /* Helpers */, 387B576825EA55AC00132903 /* CCode */, + 70849EC32E0E4CE2007E1315 /* Constants */, 38C17AE4259BCD16006B3FDA /* UI */, 38FB73142572AF50007D9CB0 /* Shaders */, 38FB730D2572A91D007D9CB0 /* Controllers */, @@ -313,6 +319,14 @@ name = Frameworks; sourceTree = ""; }; + 70849EC32E0E4CE2007E1315 /* Constants */ = { + isa = PBXGroup; + children = ( + 70849EC22E0E4CE2007E1315 /* SettingsConstants.swift */, + ); + path = Constants; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ @@ -511,12 +525,14 @@ 385999BD25616F2B00F3F681 /* AppDelegate.swift in Sources */, 38C17B23259D1C80006B3FDA /* SessionDetail.swift in Sources */, 385999C225616F2B00F3F681 /* Stray_Scanner.xcdatamodeld in Sources */, + 70849EC12E0E46B1007E1315 /* SettingsView.swift in Sources */, 38045D5A2561AC000004F23C /* SessionList.swift in Sources */, 38694C8226D2C66F00546EA1 /* AppDaemon.swift in Sources */, 38694C7626D2C66F00546EA1 /* ConfidenceEncoder.swift in Sources */, 386E277825991163007D023B /* RecordButton.swift in Sources */, 385999BF25616F2B00F3F681 /* SceneDelegate.swift in Sources */, 709259192E0B79AB00A7B62E /* ShareUtility.swift in Sources */, + 70849EC42E0E4CE2007E1315 /* SettingsConstants.swift in Sources */, 38694C8026D2C66F00546EA1 /* DepthEncoder.swift in Sources */, 38C17B13259BE1DA006B3FDA /* Recording+CoreDataProperties.swift in Sources */, 38C17B12259BE1DA006B3FDA /* Recording+CoreDataClass.swift in Sources */, @@ -536,6 +552,7 @@ 3863FE6325FCBB0E00C1DA4F /* SessionRow.swift in Sources */, 38694C7526D2C66F00546EA1 /* DatasetEncoder.swift in Sources */, 38694C7D26D2C66F00546EA1 /* CameraRenderer.swift in Sources */, + 70849EC02E0E46B1007E1315 /* SettingsView.swift in Sources */, 38694C7B26D2C66F00546EA1 /* IMUEncoder.swift in Sources */, 3863FE6425FCBB0E00C1DA4F /* Shaders.metal in Sources */, 3863FE6525FCBB0E00C1DA4F /* AppDelegate.swift in Sources */, @@ -545,6 +562,7 @@ 38694C8326D2C66F00546EA1 /* AppDaemon.swift in Sources */, 38694C7726D2C66F00546EA1 /* ConfidenceEncoder.swift in Sources */, 3863FE6A25FCBB0E00C1DA4F /* RecordButton.swift in Sources */, + 70849EC52E0E4CE2007E1315 /* SettingsConstants.swift in Sources */, 3807421027BBD07D003194C1 /* PngEncoder.mm in Sources */, 3863FE6C25FCBB0E00C1DA4F /* SceneDelegate.swift in Sources */, 38694C8126D2C66F00546EA1 /* DepthEncoder.swift in Sources */, diff --git a/StrayScanner/Constants/SettingsConstants.swift b/StrayScanner/Constants/SettingsConstants.swift new file mode 100644 index 0000000..54985ce --- /dev/null +++ b/StrayScanner/Constants/SettingsConstants.swift @@ -0,0 +1,22 @@ +// +// SettingsConstants.swift +// StrayScanner +// +// Constants for settings keys and default values +// + +import Foundation + +// MARK: - UserDefaults Keys +let FpsUserDefaultsKey: String = "FPS" +let AdaptiveThresholdPositionKey: String = "AdaptiveThresholdPosition" +let AdaptiveThresholdAngleKey: String = "AdaptiveThresholdAngle" + +// MARK: - FPS Settings +let FpsDividers: [Int] = [1, 2, 4, 12, 60] +let AvailableFpsSettings: [Int] = FpsDividers.map { Int(60 / $0) } +let AdaptiveModeIndex: Int = -1 // Special index for adaptive mode + +// MARK: - Adaptive Mode Defaults +let DefaultAdaptiveThresholdPosition: Double = 0.15 // 15cm +let DefaultAdaptiveThresholdAngle: Double = 15.0 // 15 degrees \ No newline at end of file diff --git a/StrayScanner/Controllers/RecordSessionViewController.swift b/StrayScanner/Controllers/RecordSessionViewController.swift index 503542c..6dd23c1 100644 --- a/StrayScanner/Controllers/RecordSessionViewController.swift +++ b/StrayScanner/Controllers/RecordSessionViewController.swift @@ -13,9 +13,7 @@ import ARKit import CoreData import CoreMotion -let FpsDividers: [Int] = [1, 2, 4, 12, 60] -let AvailableFpsSettings: [Int] = FpsDividers.map { Int(60 / $0) } -let FpsUserDefaultsKey: String = "FPS" +// Settings constants are now in SettingsConstants.swift class MetalView : UIView { override class var layerClass: AnyClass { @@ -164,7 +162,10 @@ class RecordSessionViewController : UIViewController, ARSessionDelegate { self.updateTime() } startRawIMU() - datasetEncoder = DatasetEncoder(arConfiguration: arConfiguration!, fpsDivider: FpsDividers[chosenFpsSetting]) + + // Use fpsDivider of 1 for adaptive mode (adaptive logic handles frame skipping) + let fpsDivider = chosenFpsSetting == AdaptiveModeIndex ? 1 : FpsDividers[chosenFpsSetting] + datasetEncoder = DatasetEncoder(arConfiguration: arConfiguration!, fpsDivider: fpsDivider) startRawIMU() } @@ -246,7 +247,15 @@ class RecordSessionViewController : UIViewController, ARSessionDelegate { } @IBAction func fpsButtonTapped() { - chosenFpsSetting = (chosenFpsSetting + 1) % AvailableFpsSettings.count + // Always cycle through FPS settings and adaptive mode + if chosenFpsSetting == AdaptiveModeIndex { + chosenFpsSetting = 0 + } else if chosenFpsSetting == AvailableFpsSettings.count - 1 { + chosenFpsSetting = AdaptiveModeIndex + } else { + chosenFpsSetting = chosenFpsSetting + 1 + } + updateFpsSetting() UserDefaults.standard.set(chosenFpsSetting, forKey: FpsUserDefaultsKey) } @@ -267,8 +276,13 @@ class RecordSessionViewController : UIViewController, ARSessionDelegate { } private func updateFpsSetting() { - let fps = AvailableFpsSettings[chosenFpsSetting] - let buttonLabel: String = "\(fps) fps" + let buttonLabel: String + if chosenFpsSetting == AdaptiveModeIndex { + buttonLabel = "Adaptive" + } else { + let fps = AvailableFpsSettings[chosenFpsSetting] + buttonLabel = "\(fps) fps" + } fpsButton.setTitle(buttonLabel, for: UIControl.State.normal) } diff --git a/StrayScanner/Helpers/DatasetEncoder.swift b/StrayScanner/Helpers/DatasetEncoder.swift index c131ed9..b1a7936 100644 --- a/StrayScanner/Helpers/DatasetEncoder.swift +++ b/StrayScanner/Helpers/DatasetEncoder.swift @@ -10,6 +10,7 @@ import Foundation import ARKit import CryptoKit import CoreMotion +import UIKit class DatasetEncoder { enum Status { @@ -39,12 +40,41 @@ class DatasetEncoder { private var latestAccelerometerData: (timestamp: Double, data: simd_double3)? private var latestGyroscopeData: (timestamp: Double, data: simd_double3)? + + // Adaptive mode properties + private let adaptiveModeEnabled: Bool + private let positionThreshold: Float + private let angleThresholdCos: Float // Cosine of angle threshold for efficient comparison + private var lastSavedTransform: simd_float4x4? + private let hapticGenerator = UIImpactFeedbackGenerator(style: .light) init(arConfiguration: ARWorldTrackingConfiguration, fpsDivider: Int = 1) { self.frameInterval = fpsDivider self.queue = DispatchQueue(label: "encoderQueue") + // Check if we're in adaptive mode (indicated by the FPS button selection) + let currentFpsSetting = UserDefaults.standard.integer(forKey: FpsUserDefaultsKey) + self.adaptiveModeEnabled = (currentFpsSetting == AdaptiveModeIndex) + + // Load adaptive mode thresholds with defaults + let posThreshold = UserDefaults.standard.double(forKey: AdaptiveThresholdPositionKey) + let angleThresholdDegrees = UserDefaults.standard.double(forKey: AdaptiveThresholdAngleKey) + self.positionThreshold = Float(posThreshold > 0 ? posThreshold : DefaultAdaptiveThresholdPosition) + + // Convert angle threshold to cosine for efficient comparison + let angleThresholdRadians = Float(angleThresholdDegrees > 0 ? angleThresholdDegrees : DefaultAdaptiveThresholdAngle) * Float.pi / 180.0 + self.angleThresholdCos = cos(angleThresholdRadians) // Direct angle for forward vector comparison + + // Prepare haptic generator + if self.adaptiveModeEnabled { + hapticGenerator.prepare() + let angleDegreesForDebug = acos(self.angleThresholdCos) * 180.0 / Float.pi + print("Adaptive mode enabled: pos threshold=\(self.positionThreshold*100)cm, angle threshold=\(angleDegreesForDebug)° (forward vector)") + } else { + print("Adaptive mode disabled") + } + let width = arConfiguration.videoFormat.imageResolution.width let height = arConfiguration.videoFormat.imageResolution.height var theId: UUID = UUID() @@ -67,9 +97,50 @@ class DatasetEncoder { let totalFrames: Int = currentFrame let frameNumber: Int = savedFrames currentFrame = currentFrame + 1 + + // Check if we should skip this frame based on frame interval if (currentFrame % frameInterval != 0) { return } + + // If adaptive mode is enabled, check if pose has changed significantly + if adaptiveModeEnabled { + let currentTransform = frame.camera.transform + + if let lastTransform = lastSavedTransform { + // Calculate position change + let lastPos = simd_float3(lastTransform.columns.3.x, lastTransform.columns.3.y, lastTransform.columns.3.z) + let currentPos = simd_float3(currentTransform.columns.3.x, currentTransform.columns.3.y, currentTransform.columns.3.z) + let positionDelta = simd_distance(lastPos, currentPos) + + // Calculate forward vector dot product for rotation comparison + // Forward is -Z in ARKit's coordinate system + let lastForward = -simd_float3(lastTransform.columns.2.x, lastTransform.columns.2.y, lastTransform.columns.2.z) + let currentForward = -simd_float3(currentTransform.columns.2.x, currentTransform.columns.2.y, currentTransform.columns.2.z) + let dotProduct = simd_dot(lastForward, currentForward) + + // Debug logging (remove in production) +// if adaptiveModeEnabled { +// let angleDelta = acos(min(dotProduct, 1.0)) * 180.0 / Float.pi +// let angleThresholdDegrees = acos(angleThresholdCos) * 180.0 / Float.pi +// print("Adaptive mode: pos=\(positionDelta*100)cm, angle=\(angleDelta)°, thresholds: pos=\(positionThreshold*100)cm, angle=\(angleThresholdDegrees)°") +// } + + // Skip frame if changes are below thresholds + // Note: dotProduct > angleThresholdCos means angle < threshold (cosine decreases as angle increases) + if positionDelta < positionThreshold && dotProduct > angleThresholdCos { + return + } + } + + // Update last saved transform + lastSavedTransform = currentTransform + + // Trigger haptic feedback for adaptive mode capture + DispatchQueue.main.async { + self.hapticGenerator.impactOccurred() + } + } dispatchGroup.enter() queue.async { if let sceneDepth = frame.sceneDepth { diff --git a/StrayScanner/Views/SessionList.swift b/StrayScanner/Views/SessionList.swift index 3e1125e..99b2aaf 100644 --- a/StrayScanner/Views/SessionList.swift +++ b/StrayScanner/Views/SessionList.swift @@ -46,6 +46,7 @@ class SessionListViewModel: ObservableObject { struct SessionList: View { @ObservedObject var viewModel = SessionListViewModel() @State private var showingInfo = false + @State private var showingSettings = false init() { UITableView.appearance().backgroundColor = UIColor(named: "BackgroundColor") @@ -65,6 +66,18 @@ struct SessionList: View { .multilineTextAlignment(.center) .padding([.top, .leading], 15.0) Spacer() + Button(action: { + showingSettings.toggle() + }, label: { + Image(systemName: "gearshape") + .resizable() + .frame(width: 25, height: 25, alignment: .center) + .padding(.top, 17) + .padding(.trailing, 10) + .foregroundColor(Color("TextColor")) + }).sheet(isPresented: $showingSettings) { + SettingsView() + } Button(action: { showingInfo.toggle() }, label: { diff --git a/StrayScanner/Views/SettingsView.swift b/StrayScanner/Views/SettingsView.swift new file mode 100644 index 0000000..04a83c7 --- /dev/null +++ b/StrayScanner/Views/SettingsView.swift @@ -0,0 +1,92 @@ +// +// SettingsView.swift +// StrayScanner +// +// Settings view for configuring scan parameters +// + +import SwiftUI + +struct SettingsView: View { + @AppStorage(FpsUserDefaultsKey) private var fpsSettingIndex: Int = 0 + @AppStorage(AdaptiveThresholdPositionKey) private var adaptiveThresholdPosition: Double = DefaultAdaptiveThresholdPosition + @AppStorage(AdaptiveThresholdAngleKey) private var adaptiveThresholdAngle: Double = DefaultAdaptiveThresholdAngle + + @Environment(\.presentationMode) var presentationMode + + init() { + // Ensure default values are stored in UserDefaults + if UserDefaults.standard.object(forKey: AdaptiveThresholdPositionKey) == nil { + UserDefaults.standard.set(DefaultAdaptiveThresholdPosition, forKey: AdaptiveThresholdPositionKey) + } + if UserDefaults.standard.object(forKey: AdaptiveThresholdAngleKey) == nil { + UserDefaults.standard.set(DefaultAdaptiveThresholdAngle, forKey: AdaptiveThresholdAngleKey) + } + } + + var body: some View { + NavigationView { + Form { + Section(header: Text("Adaptive Mode")) { + VStack(alignment: .leading, spacing: 10) { + Text("Adaptive mode only captures frames when the camera pose changes significantly. Select 'Adaptive' from the frame rate button during recording to use.") + .font(.caption) + .foregroundColor(.secondary) + .padding(.bottom, 5) + + VStack(alignment: .leading) { + Text("Position Threshold: \(String(format: "%.1f", adaptiveThresholdPosition * 100)) cm") + .font(.caption) + Slider(value: $adaptiveThresholdPosition, in: 0.01...0.5, step: 0.01) + } + + VStack(alignment: .leading) { + Text("Rotation Threshold: \(String(format: "%.1f", adaptiveThresholdAngle))°") + .font(.caption) + Slider(value: $adaptiveThresholdAngle, in: 1.0...90.0, step: 1.0) + } + } + .padding(.top, 5) + } + + Section(header: Text("About")) { + if let version = Bundle.main.infoDictionary?["CFBundleVersion"] as? String { + HStack { + Text("Version") + Spacer() + Text(version) + } + } + HStack { + Text("IMU Sample Rate") + Spacer() + Text("~100 Hz") + .foregroundColor(.secondary) + } + + HStack { + Text("Max Video Frame Rate") + Spacer() + Text("60 fps") + .foregroundColor(.secondary) + } + } + } + .navigationTitle("Settings") + .navigationBarTitleDisplayMode(.inline) + .toolbar { + ToolbarItem(placement: .navigationBarTrailing) { + Button("Done") { + presentationMode.wrappedValue.dismiss() + } + } + } + } + } +} + +struct SettingsView_Previews: PreviewProvider { + static var previews: some View { + SettingsView() + } +} From c56423f9a9b2d457c7b53ecf4fd40b19f6d9919b Mon Sep 17 00:00:00 2001 From: Martin Date: Thu, 3 Jul 2025 20:15:39 -0700 Subject: [PATCH 4/4] Fix memory management and frame numbering in DatasetEncoder MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add weak self capture to prevent retain cycles in async frame processing - Extract ARFrame data before async dispatch to release frames quickly - Fix frame number inconsistency (was using totalFrames instead of frameNumber for RGB encoder) - Add defer block to ensure dispatch group balance even if self is nil 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --- StrayScanner/Helpers/DatasetEncoder.swift | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/StrayScanner/Helpers/DatasetEncoder.swift b/StrayScanner/Helpers/DatasetEncoder.swift index b1a7936..40c0453 100644 --- a/StrayScanner/Helpers/DatasetEncoder.swift +++ b/StrayScanner/Helpers/DatasetEncoder.swift @@ -141,9 +141,23 @@ class DatasetEncoder { self.hapticGenerator.impactOccurred() } } + + // Extract what we need from the frame before async dispatch + let capturedImage = frame.capturedImage + let sceneDepth = frame.sceneDepth + let frameTimestamp = frame.timestamp + let frameTransform = frame.camera.transform + dispatchGroup.enter() - queue.async { - if let sceneDepth = frame.sceneDepth { + queue.async { [weak self] in + defer { + // Always balance enter/leave, even if self is nil + self?.dispatchGroup.leave() + } + + guard let self = self else { return } + + if let sceneDepth = sceneDepth { self.depthEncoder.encodeFrame(frame: sceneDepth.depthMap, frameNumber: frameNumber) if let confidence = sceneDepth.confidenceMap { self.confidenceEncoder.encodeFrame(frame: confidence, frameNumber: frameNumber) @@ -153,10 +167,10 @@ class DatasetEncoder { } else { print("warning: scene depth missing.") } - self.rgbEncoder.add(frame: VideoEncoderInput(buffer: frame.capturedImage, time: frame.timestamp), currentFrame: totalFrames) + + self.rgbEncoder.add(frame: VideoEncoderInput(buffer: capturedImage, time: frameTimestamp), currentFrame: frameNumber) self.odometryEncoder.add(frame: frame, currentFrame: frameNumber) self.lastFrame = frame - self.dispatchGroup.leave() } savedFrames = savedFrames + 1 }