Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions StrayScanner.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,9 @@
38E969CB2572608E00054CC4 /* NewSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 38E969CA2572608E00054CC4 /* NewSession.swift */; };
38FB730F2572A9FA007D9CB0 /* RecordSessionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 38FB730E2572A9FA007D9CB0 /* RecordSessionViewController.swift */; };
38FB73162572AF63007D9CB0 /* Shaders.metal in Sources */ = {isa = PBXBuildFile; fileRef = 38FB73152572AF63007D9CB0 /* Shaders.metal */; };
70849EC02E0E46B1007E1315 /* SettingsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70849EBF2E0E46B1007E1315 /* SettingsView.swift */; };
70849EC42E0E4CE2007E1315 /* SettingsConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70849EC22E0E4CE2007E1315 /* SettingsConstants.swift */; };
709259192E0B79AB00A7B62E /* ShareUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 709259182E0B79AB00A7B62E /* ShareUtility.swift */; };
/* End PBXBuildFile section */

/* Begin PBXContainerItemProxy section */
Expand Down Expand Up @@ -119,6 +122,9 @@
38FB73152572AF63007D9CB0 /* Shaders.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = Shaders.metal; sourceTree = "<group>"; };
38FB731F2573EABB007D9CB0 /* ShaderTypes.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ShaderTypes.h; sourceTree = "<group>"; };
38FB73242573ECE2007D9CB0 /* BridgeHeader.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = BridgeHeader.h; path = StrayScanner/BridgeHeader.h; sourceTree = "<group>"; };
70849EBF2E0E46B1007E1315 /* SettingsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsView.swift; sourceTree = "<group>"; };
70849EC22E0E4CE2007E1315 /* SettingsConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsConstants.swift; sourceTree = "<group>"; };
709259182E0B79AB00A7B62E /* ShareUtility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShareUtility.swift; sourceTree = "<group>"; };
AC52749A3B5AED09C9753120 /* Pods-StrayScanner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-StrayScanner.debug.xcconfig"; path = "Target Support Files/Pods-StrayScanner/Pods-StrayScanner.debug.xcconfig"; sourceTree = "<group>"; };
DBC054EDDA47FB8A717AA671 /* Pods_StrayScanner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_StrayScanner.framework; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
Expand Down Expand Up @@ -179,6 +185,7 @@
38045D562561A6D20004F23C /* Views */ = {
isa = PBXGroup;
children = (
70849EBF2E0E46B1007E1315 /* SettingsView.swift */,
38045D572561A7370004F23C /* SessionRow.swift */,
38045D592561AC000004F23C /* SessionList.swift */,
38E969CA2572608E00054CC4 /* NewSession.swift */,
Expand Down Expand Up @@ -216,6 +223,7 @@
children = (
38694C6B26D2C66F00546EA1 /* Helpers */,
387B576825EA55AC00132903 /* CCode */,
70849EC32E0E4CE2007E1315 /* Constants */,
38C17AE4259BCD16006B3FDA /* UI */,
38FB73142572AF50007D9CB0 /* Shaders */,
38FB730D2572A91D007D9CB0 /* Controllers */,
Expand Down Expand Up @@ -244,6 +252,7 @@
38694C6B26D2C66F00546EA1 /* Helpers */ = {
isa = PBXGroup;
children = (
709259182E0B79AB00A7B62E /* ShareUtility.swift */,
38694C6C26D2C66F00546EA1 /* DatasetEncoder.swift */,
38694C6D26D2C66F00546EA1 /* ConfidenceEncoder.swift */,
38694C6E26D2C66F00546EA1 /* VideoEncoder.swift */,
Expand Down Expand Up @@ -310,6 +319,14 @@
name = Frameworks;
sourceTree = "<group>";
};
70849EC32E0E4CE2007E1315 /* Constants */ = {
isa = PBXGroup;
children = (
70849EC22E0E4CE2007E1315 /* SettingsConstants.swift */,
);
path = Constants;
sourceTree = "<group>";
};
/* End PBXGroup section */

/* Begin PBXNativeTarget section */
Expand Down Expand Up @@ -508,11 +525,14 @@
385999BD25616F2B00F3F681 /* AppDelegate.swift in Sources */,
38C17B23259D1C80006B3FDA /* SessionDetail.swift in Sources */,
385999C225616F2B00F3F681 /* Stray_Scanner.xcdatamodeld in Sources */,
70849EC12E0E46B1007E1315 /* SettingsView.swift in Sources */,
38045D5A2561AC000004F23C /* SessionList.swift in Sources */,
38694C8226D2C66F00546EA1 /* AppDaemon.swift in Sources */,
38694C7626D2C66F00546EA1 /* ConfidenceEncoder.swift in Sources */,
386E277825991163007D023B /* RecordButton.swift in Sources */,
385999BF25616F2B00F3F681 /* SceneDelegate.swift in Sources */,
709259192E0B79AB00A7B62E /* ShareUtility.swift in Sources */,
70849EC42E0E4CE2007E1315 /* SettingsConstants.swift in Sources */,
38694C8026D2C66F00546EA1 /* DepthEncoder.swift in Sources */,
38C17B13259BE1DA006B3FDA /* Recording+CoreDataProperties.swift in Sources */,
38C17B12259BE1DA006B3FDA /* Recording+CoreDataClass.swift in Sources */,
Expand All @@ -532,6 +552,7 @@
3863FE6325FCBB0E00C1DA4F /* SessionRow.swift in Sources */,
38694C7526D2C66F00546EA1 /* DatasetEncoder.swift in Sources */,
38694C7D26D2C66F00546EA1 /* CameraRenderer.swift in Sources */,
70849EC02E0E46B1007E1315 /* SettingsView.swift in Sources */,
38694C7B26D2C66F00546EA1 /* IMUEncoder.swift in Sources */,
3863FE6425FCBB0E00C1DA4F /* Shaders.metal in Sources */,
3863FE6525FCBB0E00C1DA4F /* AppDelegate.swift in Sources */,
Expand All @@ -541,6 +562,7 @@
38694C8326D2C66F00546EA1 /* AppDaemon.swift in Sources */,
38694C7726D2C66F00546EA1 /* ConfidenceEncoder.swift in Sources */,
3863FE6A25FCBB0E00C1DA4F /* RecordButton.swift in Sources */,
70849EC52E0E4CE2007E1315 /* SettingsConstants.swift in Sources */,
3807421027BBD07D003194C1 /* PngEncoder.mm in Sources */,
3863FE6C25FCBB0E00C1DA4F /* SceneDelegate.swift in Sources */,
38694C8126D2C66F00546EA1 /* DepthEncoder.swift in Sources */,
Expand Down
22 changes: 22 additions & 0 deletions StrayScanner/Constants/SettingsConstants.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
//
// SettingsConstants.swift
// StrayScanner
//
// Constants for settings keys and default values
//

import Foundation

// MARK: - UserDefaults Keys
let FpsUserDefaultsKey: String = "FPS"
let AdaptiveThresholdPositionKey: String = "AdaptiveThresholdPosition"
let AdaptiveThresholdAngleKey: String = "AdaptiveThresholdAngle"

// MARK: - FPS Settings
let FpsDividers: [Int] = [1, 2, 4, 12, 60]
let AvailableFpsSettings: [Int] = FpsDividers.map { Int(60 / $0) }
let AdaptiveModeIndex: Int = -1 // Special index for adaptive mode

// MARK: - Adaptive Mode Defaults
let DefaultAdaptiveThresholdPosition: Double = 0.15 // 15cm
let DefaultAdaptiveThresholdAngle: Double = 15.0 // 15 degrees
28 changes: 21 additions & 7 deletions StrayScanner/Controllers/RecordSessionViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@ import ARKit
import CoreData
import CoreMotion

let FpsDividers: [Int] = [1, 2, 4, 12, 60]
let AvailableFpsSettings: [Int] = FpsDividers.map { Int(60 / $0) }
let FpsUserDefaultsKey: String = "FPS"
// Settings constants are now in SettingsConstants.swift

class MetalView : UIView {
override class var layerClass: AnyClass {
Expand Down Expand Up @@ -164,7 +162,10 @@ class RecordSessionViewController : UIViewController, ARSessionDelegate {
self.updateTime()
}
startRawIMU()
datasetEncoder = DatasetEncoder(arConfiguration: arConfiguration!, fpsDivider: FpsDividers[chosenFpsSetting])

// Use fpsDivider of 1 for adaptive mode (adaptive logic handles frame skipping)
let fpsDivider = chosenFpsSetting == AdaptiveModeIndex ? 1 : FpsDividers[chosenFpsSetting]
datasetEncoder = DatasetEncoder(arConfiguration: arConfiguration!, fpsDivider: fpsDivider)
startRawIMU()
}

Expand Down Expand Up @@ -246,7 +247,15 @@ class RecordSessionViewController : UIViewController, ARSessionDelegate {
}

@IBAction func fpsButtonTapped() {
chosenFpsSetting = (chosenFpsSetting + 1) % AvailableFpsSettings.count
// Always cycle through FPS settings and adaptive mode
if chosenFpsSetting == AdaptiveModeIndex {
chosenFpsSetting = 0
} else if chosenFpsSetting == AvailableFpsSettings.count - 1 {
chosenFpsSetting = AdaptiveModeIndex
} else {
chosenFpsSetting = chosenFpsSetting + 1
}

updateFpsSetting()
UserDefaults.standard.set(chosenFpsSetting, forKey: FpsUserDefaultsKey)
}
Expand All @@ -267,8 +276,13 @@ class RecordSessionViewController : UIViewController, ARSessionDelegate {
}

private func updateFpsSetting() {
let fps = AvailableFpsSettings[chosenFpsSetting]
let buttonLabel: String = "\(fps) fps"
let buttonLabel: String
if chosenFpsSetting == AdaptiveModeIndex {
buttonLabel = "Adaptive"
} else {
let fps = AvailableFpsSettings[chosenFpsSetting]
buttonLabel = "\(fps) fps"
}
fpsButton.setTitle(buttonLabel, for: UIControl.State.normal)
}

Expand Down
93 changes: 89 additions & 4 deletions StrayScanner/Helpers/DatasetEncoder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import Foundation
import ARKit
import CryptoKit
import CoreMotion
import UIKit

class DatasetEncoder {
enum Status {
Expand Down Expand Up @@ -39,12 +40,41 @@ class DatasetEncoder {

private var latestAccelerometerData: (timestamp: Double, data: simd_double3)?
private var latestGyroscopeData: (timestamp: Double, data: simd_double3)?

// Adaptive mode properties
private let adaptiveModeEnabled: Bool
private let positionThreshold: Float
private let angleThresholdCos: Float // Cosine of angle threshold for efficient comparison
private var lastSavedTransform: simd_float4x4?
private let hapticGenerator = UIImpactFeedbackGenerator(style: .light)


init(arConfiguration: ARWorldTrackingConfiguration, fpsDivider: Int = 1) {
self.frameInterval = fpsDivider
self.queue = DispatchQueue(label: "encoderQueue")

// Check if we're in adaptive mode (indicated by the FPS button selection)
let currentFpsSetting = UserDefaults.standard.integer(forKey: FpsUserDefaultsKey)
self.adaptiveModeEnabled = (currentFpsSetting == AdaptiveModeIndex)

// Load adaptive mode thresholds with defaults
let posThreshold = UserDefaults.standard.double(forKey: AdaptiveThresholdPositionKey)
let angleThresholdDegrees = UserDefaults.standard.double(forKey: AdaptiveThresholdAngleKey)
self.positionThreshold = Float(posThreshold > 0 ? posThreshold : DefaultAdaptiveThresholdPosition)

// Convert angle threshold to cosine for efficient comparison
let angleThresholdRadians = Float(angleThresholdDegrees > 0 ? angleThresholdDegrees : DefaultAdaptiveThresholdAngle) * Float.pi / 180.0
self.angleThresholdCos = cos(angleThresholdRadians) // Direct angle for forward vector comparison

// Prepare haptic generator
if self.adaptiveModeEnabled {
hapticGenerator.prepare()
let angleDegreesForDebug = acos(self.angleThresholdCos) * 180.0 / Float.pi
print("Adaptive mode enabled: pos threshold=\(self.positionThreshold*100)cm, angle threshold=\(angleDegreesForDebug)° (forward vector)")
} else {
print("Adaptive mode disabled")
}

let width = arConfiguration.videoFormat.imageResolution.width
let height = arConfiguration.videoFormat.imageResolution.height
var theId: UUID = UUID()
Expand All @@ -67,12 +97,67 @@ class DatasetEncoder {
let totalFrames: Int = currentFrame
let frameNumber: Int = savedFrames
currentFrame = currentFrame + 1

// Check if we should skip this frame based on frame interval
if (currentFrame % frameInterval != 0) {
return
}

// If adaptive mode is enabled, check if pose has changed significantly
if adaptiveModeEnabled {
let currentTransform = frame.camera.transform

if let lastTransform = lastSavedTransform {
// Calculate position change
let lastPos = simd_float3(lastTransform.columns.3.x, lastTransform.columns.3.y, lastTransform.columns.3.z)
let currentPos = simd_float3(currentTransform.columns.3.x, currentTransform.columns.3.y, currentTransform.columns.3.z)
let positionDelta = simd_distance(lastPos, currentPos)

// Calculate forward vector dot product for rotation comparison
// Forward is -Z in ARKit's coordinate system
let lastForward = -simd_float3(lastTransform.columns.2.x, lastTransform.columns.2.y, lastTransform.columns.2.z)
let currentForward = -simd_float3(currentTransform.columns.2.x, currentTransform.columns.2.y, currentTransform.columns.2.z)
let dotProduct = simd_dot(lastForward, currentForward)

// Debug logging (remove in production)
// if adaptiveModeEnabled {
// let angleDelta = acos(min(dotProduct, 1.0)) * 180.0 / Float.pi
// let angleThresholdDegrees = acos(angleThresholdCos) * 180.0 / Float.pi
// print("Adaptive mode: pos=\(positionDelta*100)cm, angle=\(angleDelta)°, thresholds: pos=\(positionThreshold*100)cm, angle=\(angleThresholdDegrees)°")
// }

// Skip frame if changes are below thresholds
// Note: dotProduct > angleThresholdCos means angle < threshold (cosine decreases as angle increases)
if positionDelta < positionThreshold && dotProduct > angleThresholdCos {
return
}
}

// Update last saved transform
lastSavedTransform = currentTransform

// Trigger haptic feedback for adaptive mode capture
DispatchQueue.main.async {
self.hapticGenerator.impactOccurred()
}
}

// Extract what we need from the frame before async dispatch
let capturedImage = frame.capturedImage
let sceneDepth = frame.sceneDepth
let frameTimestamp = frame.timestamp
let frameTransform = frame.camera.transform

dispatchGroup.enter()
queue.async {
if let sceneDepth = frame.sceneDepth {
queue.async { [weak self] in
defer {
// Always balance enter/leave, even if self is nil
self?.dispatchGroup.leave()
}

guard let self = self else { return }

if let sceneDepth = sceneDepth {
self.depthEncoder.encodeFrame(frame: sceneDepth.depthMap, frameNumber: frameNumber)
if let confidence = sceneDepth.confidenceMap {
self.confidenceEncoder.encodeFrame(frame: confidence, frameNumber: frameNumber)
Expand All @@ -82,10 +167,10 @@ class DatasetEncoder {
} else {
print("warning: scene depth missing.")
}
self.rgbEncoder.add(frame: VideoEncoderInput(buffer: frame.capturedImage, time: frame.timestamp), currentFrame: totalFrames)

self.rgbEncoder.add(frame: VideoEncoderInput(buffer: capturedImage, time: frameTimestamp), currentFrame: frameNumber)
self.odometryEncoder.add(frame: frame, currentFrame: frameNumber)
self.lastFrame = frame
self.dispatchGroup.leave()
}
savedFrames = savedFrames + 1
}
Expand Down
60 changes: 60 additions & 0 deletions StrayScanner/Helpers/ShareUtility.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
//
// ShareUtility.swift
// StrayScanner
//
// Created by Claude on 6/24/25.
//

import Foundation
import Compression

/// Utility class for creating shareable archives from recording datasets
class ShareUtility {

/// Creates a shareable ZIP archive from a recording's dataset
/// - Parameter recording: The recording to create a ZIP archive for
/// - Returns: URL of the created ZIP file
static func createShareableArchive(for recording: Recording) async throws -> URL {
guard let sourceDirectory = recording.directoryPath() else {
throw NSError(domain: "ShareError", code: 1, userInfo: [NSLocalizedDescriptionKey: "Unable to get recording directory path"])
}

let tempDirectory = FileManager.default.temporaryDirectory
let archiveName = "\(recording.name ?? "Recording")_\(recording.id?.uuidString.prefix(8) ?? "unknown").zip"
let archiveURL = tempDirectory.appendingPathComponent(archiveName)

// Remove existing archive if it exists
try? FileManager.default.removeItem(at: archiveURL)

return try await withCheckedThrowingContinuation { continuation in
DispatchQueue.global(qos: .userInitiated).async {
do {
try createZipArchive(sourceDirectory: sourceDirectory, destinationURL: archiveURL)
continuation.resume(returning: archiveURL)
} catch {
continuation.resume(throwing: error)
}
}
}
}

private static func createZipArchive(sourceDirectory: URL, destinationURL: URL) throws {
let coordinator = NSFileCoordinator()
var error: NSError?

coordinator.coordinate(readingItemAt: sourceDirectory, options: [.forUploading], error: &error) { (zipURL) in
do {
_ = zipURL.startAccessingSecurityScopedResource()
defer { zipURL.stopAccessingSecurityScopedResource() }

try FileManager.default.copyItem(at: zipURL, to: destinationURL)
} catch {
print("Failed to create zip: \(error)")
}
}

if let error = error {
throw error
}
}
}
11 changes: 7 additions & 4 deletions StrayScanner/Views/InformationView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,16 @@ This app lets you record video and depth datasets using the camera and LIDAR sca
heading("Transfering Datasets To Your Desktop Computer")

bodyText("""
The recorded datasets can be exported by connecting your device to it with the lightning cable.
The recorded datasets can be exported in several ways:

On Mac, you can access the files through Finder. In the sidebar, select your device. Under the "Files" tab, you should see an entry for Stray Scanner. Expand it, then drag the folders to the desired location. There is one folder per dataset, each named after a random alphanumerical hash.
1. Share directly from the app: Tap the "Share" button inside any recording to export it as a ZIP archive via AirDrop, email, or save to Files.

On Windows, you can access the files through iTunes.
2. Connect your device with the lightning cable:
• On Mac, access files through Finder sidebar > your device > "Files" tab > Stray Scanner
• On Windows, access files through iTunes
• Drag folders to your desired location (one folder per dataset)

Alternatively, you can access the data in the Files app under "Browse > On My iPhone > Stray Scanner" and export them to another app or move them to your iCloud drive.
3. Use the Files app: Browse > On My iPhone > Stray Scanner, then export to another app or iCloud drive.
""")
}
Group {
Expand Down
Loading