diff --git a/GPUImage2.podspec b/GPUImage2.podspec new file mode 100644 index 00000000..6ce15189 --- /dev/null +++ b/GPUImage2.podspec @@ -0,0 +1,20 @@ +Pod::Spec.new do |s| + s.name = 'GPUImage2' + s.version = '0.1.0' + s.license = 'BSD' + s.summary = 'An open source iOS framework for GPU-based image and video processing.' + s.homepage = 'https://github.com/BradLarson/GPUImage2' + s.author = { 'Brad Larson' => 'contact@sunsetlakesoftware.com' } + + s.source = { :git => 'https://github.com/RoCry/GPUImage2' } + + s.source_files = 'framework/Source/**/*.{h,m,swift}' + s.resources = 'framework/Source/Operations/Shaders/*.{fsh}' + s.requires_arc = true + s.xcconfig = { 'CLANG_MODULES_AUTOLINK' => 'YES', 'OTHER_SWIFT_FLAGS' => "$(inherited) -DGLES"} + + s.ios.deployment_target = '10.0' + s.ios.exclude_files = 'framework/Source/Mac', 'framework/Source/Linux', 'framework/Source/Operations/Shaders/ConvertedShaders_GL.swift' + s.frameworks = ['OpenGLES', 'CoreMedia', 'QuartzCore', 'AVFoundation'] + +end diff --git a/README.md b/README.md index 89c2d95d..c231f7d7 100755 --- a/README.md +++ b/README.md @@ -152,7 +152,7 @@ There are a few different ways to approach filtering an image. The easiest are t ```swift let testImage = UIImage(named:"WID-small.jpg")! let toonFilter = SmoothToonFilter() -let filteredImage = testImage.filterWithOperation(toonFilter) +let filteredImage = try! testImage.filterWithOperation(toonFilter) ``` for a more complex pipeline: @@ -161,7 +161,7 @@ for a more complex pipeline: let testImage = UIImage(named:"WID-small.jpg")! let toonFilter = SmoothToonFilter() let luminanceFilter = Luminance() -let filteredImage = testImage.filterWithPipeline{input, output in +let filteredImage = try! testImage.filterWithPipeline{input, output in input --> toonFilter --> luminanceFilter --> output } ``` @@ -173,7 +173,7 @@ Both of these convenience methods wrap several operations. To feed a picture int ```swift let toonFilter = SmoothToonFilter() let testImage = UIImage(named:"WID-small.jpg")! -let pictureInput = PictureInput(image:testImage) +let pictureInput = try! PictureInput(image:testImage) let pictureOutput = PictureOutput() pictureOutput.imageAvailableCallback = {image in // Do something with image @@ -186,24 +186,116 @@ In the above, the imageAvailableCallback will be triggered right at the processI ### Filtering and re-encoding a movie ### -To filter an existing movie file, you can write code like the following: +To filter and playback an existing movie file, you can write code like the following: ```swift do { - let bundleURL = Bundle.main.resourceURL! - let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! - movie = try MovieInput(url:movieURL, playAtActualSpeed:true) + let bundleURL = Bundle.main.resourceURL! + let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + + let audioDecodeSettings = [AVFormatIDKey:kAudioFormatLinearPCM] + + movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings:audioDecodeSettings) + speaker = SpeakerOutput() + movie.audioEncodingTarget = speaker + filter = SaturationAdjustment() movie --> filter --> renderView + movie.start() + speaker.start() } catch { - fatalError("Could not initialize rendering pipeline: \(error)") + print("Couldn't process movie with error: \(error)") } ``` where renderView is an instance of RenderView that you've placed somewhere in your view hierarchy. The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a saturation filter, and directs movie frames to be processed through the saturation filter on their way to the screen. start() initiates the movie playback. +To filter an existing movie file and save the result to a new movie file you can write code like the following: + + +```swift +let bundleURL = Bundle.main.resourceURL! +// The movie you want to reencode +let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + +let documentsDir = FileManager().urls(for:.documentDirectory, in:.userDomainMask).first! +// The location you want to save the new video +let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! + +let asset = AVURLAsset(url:movieURL, options:[AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)]) + +guard let videoTrack = asset.tracks(withMediaType:.video).first else { return } +let audioTrack = asset.tracks(withMediaType:.audio).first + +// If you would like passthrough audio instead, set both audioDecodingSettings and audioEncodingSettings to nil +let audioDecodingSettings:[String:Any] = [AVFormatIDKey:kAudioFormatLinearPCM] // Noncompressed audio samples + +do { + movieInput = try MovieInput(asset:asset, videoComposition:nil, playAtActualSpeed:false, loop:false, audioSettings:audioDecodingSettings) +} +catch { + print("ERROR: Unable to setup MovieInput with error: \(error)") + return +} + +try? FileManager().removeItem(at: exportedURL) + +let videoEncodingSettings:[String:Any] = [ + AVVideoCompressionPropertiesKey: [ + AVVideoExpectedSourceFrameRateKey:videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey:videoTrack.estimatedDataRate, + AVVideoProfileLevelKey:AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey:videoTrack.requiresFrameReordering], + AVVideoCodecKey:AVVideoCodecH264] + +var acl = AudioChannelLayout() +memset(&acl, 0, MemoryLayout.size) +acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo +let audioEncodingSettings:[String:Any] = [ + AVFormatIDKey:kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey:2, + AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), + AVEncoderBitRateKey:96000 +] + +do { + movieOutput = try MovieOutput(URL:exportedURL, size:Size(width:Float(videoTrack.naturalSize.width), height:Float(videoTrack.naturalSize.height)), fileType:AVFileType.mp4.rawValue, liveVideo:false, videoSettings:videoEncodingSettings, videoNaturalTimeScale:videoTrack.naturalTimeScale, audioSettings:audioEncodingSettings) +} +catch { + print("ERROR: Unable to setup MovieOutput with error: \(error)") + return +} + +filter = SaturationAdjustment() + +if(audioTrack != nil) { movieInput.audioEncodingTarget = movieOutput } +movieInput.synchronizedMovieOutput = movieOutput +movieInput --> filter --> movieOutput + +movieInput.completion = { + self.movieOutput.finishRecording { + self.movieInput.audioEncodingTarget = nil + self.movieInput.synchronizedMovieOutput = nil + print("Encoding finished") + } +} + +movieOutput.startRecording() { started, error in + if(!started) { + print("ERROR: MovieOutput unable to start writing with error: \(String(describing: error))") + return + } + self.movieInput.start() + print("Encoding started") +} +``` + + The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a saturation filter, and directs movie frames to be processed through the saturation filter on their way to the new file. In addition it writes the audio in AAC format to the new file. + ### Writing a custom image processing operation ### The framework uses a series of protocols to define types that can output images to be processed, take in an image for processing, or do both. These are the ImageSource, ImageConsumer, and ImageProcessingOperation protocols, respectively. Any type can comply to these, but typically classes are used. diff --git a/examples/Linux-OpenGL/SimpleVideoFilter/Source/main.swift b/examples/Linux-OpenGL/SimpleVideoFilter/Source/main.swift index 85703e56..7f753bd5 100755 --- a/examples/Linux-OpenGL/SimpleVideoFilter/Source/main.swift +++ b/examples/Linux-OpenGL/SimpleVideoFilter/Source/main.swift @@ -1,11 +1,11 @@ import GPUImage // For now, GLUT initialization is done in the render window, so that must come first in sequence -let renderWindow = GLUTRenderWindow(width:1280, height:720, title:"Simple Video Filter") -let camera = V4LCamera(size:Size(width:1280.0, height:720.0)) +let renderWindow = GLUTRenderWindow(width: 1280, height: 720, title: "Simple Video Filter") +let camera = V4LCamera(size: Size(width: 1280.0, height: 720.0)) let edgeDetection = SobelEdgeDetection() camera --> edgeDetection --> renderWindow camera.startCapture() -renderWindow.loopWithFunction(camera.grabFrame) \ No newline at end of file +renderWindow.loopWithFunction(camera.grabFrame) diff --git a/examples/Linux-RPi/SimpleVideoFilter/Source/main.swift b/examples/Linux-RPi/SimpleVideoFilter/Source/main.swift index f33fa9ea..16f4a639 100755 --- a/examples/Linux-RPi/SimpleVideoFilter/Source/main.swift +++ b/examples/Linux-RPi/SimpleVideoFilter/Source/main.swift @@ -1,15 +1,15 @@ import GPUImage // For now, rendering requires the window to be created first -let renderWindow = RPiRenderWindow(width:1280, height:720) -let camera = V4LCamera(size:Size(width:1280.0, height:720.0)) +let renderWindow = RPiRenderWindow(width: 1280, height: 720) +let camera = V4LCamera(size: Size(width: 1280.0, height: 720.0)) let edgeDetection = SobelEdgeDetection() camera --> edgeDetection --> renderWindow -var terminate:Int = 0 +var terminate: Int = 0 camera.startCapture() -while (terminate == 0) { +while terminate == 0 { camera.grabFrame() -} \ No newline at end of file +} diff --git a/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift b/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift index 19f72e6a..4cc95db4 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift @@ -2,14 +2,12 @@ import Cocoa @NSApplicationMain class AppDelegate: NSObject, NSApplicationDelegate { - @IBOutlet weak var window: NSWindow! - var windowController:FilterShowcaseWindowController? + var windowController: FilterShowcaseWindowController? func applicationDidFinishLaunching(_ aNotification: Notification) { - self.windowController = FilterShowcaseWindowController(windowNibName:"FilterShowcaseWindowController") + self.windowController = FilterShowcaseWindowController(windowNibName: "FilterShowcaseWindowController") self.windowController?.showWindow(self) } } - diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift index bd479310..21841e2a 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift @@ -3,7 +3,7 @@ import GPUImage enum FilterSliderSetting { case disabled - case enabled(minimumValue:Float, maximumValue:Float, initialValue:Float) + case enabled(minimumValue: Float, maximumValue: Float, initialValue: Float) } typealias FilterSetupFunction = (Camera, ImageProcessingOperation, RenderView) -> ImageSource? @@ -11,33 +11,33 @@ typealias FilterSetupFunction = (Camera, ImageProcessingOperation, RenderView) - enum FilterOperationType { case singleInput case blend - case custom(filterSetupFunction:FilterSetupFunction) + case custom(filterSetupFunction: FilterSetupFunction) } protocol FilterOperationInterface { var filter: ImageProcessingOperation { get } - var secondInput:ImageSource? { get } + var secondInput: ImageSource? { get } var listName: String { get } var titleName: String { get } - var sliderConfiguration: FilterSliderSetting { get } - var filterOperationType: FilterOperationType { get } + var sliderConfiguration: FilterSliderSetting { get } + var filterOperationType: FilterOperationType { get } - func configureCustomFilter(_ secondInput:ImageSource?) - func updateBasedOnSliderValue(_ sliderValue:Float) + func configureCustomFilter(_ secondInput: ImageSource?) + func updateBasedOnSliderValue(_ sliderValue: Float) } class FilterOperation: FilterOperationInterface { - lazy var internalFilter:FilterClass = { + lazy var internalFilter: FilterClass = { return self.filterCreationFunction() }() let filterCreationFunction:() -> FilterClass - var secondInput:ImageSource? - let listName:String - let titleName:String - let sliderConfiguration:FilterSliderSetting - let filterOperationType:FilterOperationType - let sliderUpdateCallback: ((FilterClass, Float) -> ())? - init(filter:@escaping () -> FilterClass, listName: String, titleName: String, sliderConfiguration: FilterSliderSetting, sliderUpdateCallback:((FilterClass, Float) -> ())?, filterOperationType: FilterOperationType) { + var secondInput: ImageSource? + let listName: String + let titleName: String + let sliderConfiguration: FilterSliderSetting + let filterOperationType: FilterOperationType + let sliderUpdateCallback: ((FilterClass, Float) -> Void)? + init(filter:@escaping () -> FilterClass, listName: String, titleName: String, sliderConfiguration: FilterSliderSetting, sliderUpdateCallback: ((FilterClass, Float) -> Void)?, filterOperationType: FilterOperationType) { self.listName = listName self.titleName = titleName self.sliderConfiguration = sliderConfiguration @@ -50,12 +50,11 @@ class FilterOperation: FilterOperationInt return internalFilter } - func configureCustomFilter(_ secondInput:ImageSource?) { + func configureCustomFilter(_ secondInput: ImageSource?) { self.secondInput = secondInput } - func updateBasedOnSliderValue(_ sliderValue:Float) { + func updateBasedOnSliderValue(_ sliderValue: Float) { sliderUpdateCallback?(internalFilter, sliderValue) } } - diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift index 234cdfd7..182a00c9 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift @@ -1,150 +1,187 @@ import GPUImage import QuartzCore -let filterOperations: Array = [ - FilterOperation ( - filter:{SaturationAdjustment()}, - listName:"Saturation", - titleName:"Saturation", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in +let filterOperations: [FilterOperationInterface] = [ + FilterOperation( + filter: { AlphaBlend() }, + listName: "Highlights Blur", + titleName: "Gaussian Blur Lumi>0.6(alpha)", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.8), + sliderUpdateCallback: {filter, sliderValue in + filter.mix = sliderValue + }, + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in + let blendFilter = filter as! AlphaBlend + blendFilter.removeAllSources() + + let gaussianBlur = GaussianBlur(blurRadiusInPixels: 10, luminanceThreshold: 0.6) + camera --> blendFilter + camera --> gaussianBlur --> blendFilter --> outputView + return blendFilter + }) + ), + FilterOperation( + filter: { AlphaBlend() }, + listName: "Soft Focus", + titleName: "Gaussian Blur + Alpha Blend", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in + filter.mix = sliderValue + }, + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in + let blendFilter = filter as! AlphaBlend + blendFilter.removeAllSources() + + let gaussianBlur = GaussianBlur(blurRadiusInPixels: 10, luminanceThreshold: 0.6) + gaussianBlur.blurRadiusInPixels = 10 + camera --> blendFilter + camera --> gaussianBlur --> blendFilter --> outputView + return blendFilter + }) + ), + FilterOperation( + filter: { SaturationAdjustment() }, + listName: "Saturation", + titleName: "Saturation", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.saturation = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ContrastAdjustment()}, - listName:"Contrast", - titleName:"Contrast", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ContrastAdjustment() }, + listName: "Contrast", + titleName: "Contrast", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 4.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.contrast = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{BrightnessAdjustment()}, - listName:"Brightness", - titleName:"Brightness", - sliderConfiguration:.enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { BrightnessAdjustment() }, + listName: "Brightness", + titleName: "Brightness", + sliderConfiguration: .enabled(minimumValue: -1.0, maximumValue: 1.0, initialValue: 0.0), + sliderUpdateCallback: {filter, sliderValue in filter.brightness = sliderValue }, - filterOperationType:.singleInput - ), - FilterOperation( - filter:{LevelsAdjustment()}, - listName:"Levels", - titleName:"Levels", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in - filter.minimum = Color(red:Float(sliderValue), green:Float(sliderValue), blue:Float(sliderValue)) - filter.middle = Color(red:1.0, green:1.0, blue:1.0) - filter.maximum = Color(red:1.0, green:1.0, blue:1.0) - filter.minOutput = Color(red:0.0, green:0.0, blue:0.0) - filter.maxOutput = Color(red:1.0, green:1.0, blue:1.0) + filterOperationType: .singleInput + ), + FilterOperation( + filter: { LevelsAdjustment() }, + listName: "Levels", + titleName: "Levels", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.0), + sliderUpdateCallback: {filter, sliderValue in + filter.minimum = Color(red: Float(sliderValue), green: Float(sliderValue), blue: Float(sliderValue)) + filter.middle = Color(red: 1.0, green: 1.0, blue: 1.0) + filter.maximum = Color(red: 1.0, green: 1.0, blue: 1.0) + filter.minOutput = Color(red: 0.0, green: 0.0, blue: 0.0) + filter.maxOutput = Color(red: 1.0, green: 1.0, blue: 1.0) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ExposureAdjustment()}, - listName:"Exposure", - titleName:"Exposure", - sliderConfiguration:.enabled(minimumValue:-4.0, maximumValue:4.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ExposureAdjustment() }, + listName: "Exposure", + titleName: "Exposure", + sliderConfiguration: .enabled(minimumValue: -4.0, maximumValue: 4.0, initialValue: 0.0), + sliderUpdateCallback: {filter, sliderValue in filter.exposure = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{RGBAdjustment()}, - listName:"RGB", - titleName:"RGB", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { RGBAdjustment() }, + listName: "RGB", + titleName: "RGB", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.green = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{HueAdjustment()}, - listName:"Hue", - titleName:"Hue", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:360.0, initialValue:90.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HueAdjustment() }, + listName: "Hue", + titleName: "Hue", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 360.0, initialValue: 90.0), + sliderUpdateCallback: {filter, sliderValue in filter.hue = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{WhiteBalance()}, - listName:"White balance", - titleName:"White Balance", - sliderConfiguration:.enabled(minimumValue:2500.0, maximumValue:7500.0, initialValue:5000.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { WhiteBalance() }, + listName: "White balance", + titleName: "White Balance", + sliderConfiguration: .enabled(minimumValue: 2500.0, maximumValue: 7500.0, initialValue: 5000.0), + sliderUpdateCallback: {filter, sliderValue in filter.temperature = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{MonochromeFilter()}, - listName:"Monochrome", - titleName:"Monochrome", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { MonochromeFilter() }, + listName: "Monochrome", + titleName: "Monochrome", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! MonochromeFilter camera --> castFilter --> outputView - castFilter.color = Color(red:0.0, green:0.0, blue:1.0, alpha:1.0) + castFilter.color = Color(red: 0.0, green: 0.0, blue: 1.0, alpha: 1.0) return nil }) ), FilterOperation( - filter:{FalseColor()}, - listName:"False color", - titleName:"False Color", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + filter: { FalseColor() }, + listName: "False color", + titleName: "False Color", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{Sharpen()}, - listName:"Sharpen", - titleName:"Sharpen", - sliderConfiguration:.enabled(minimumValue:-1.0, maximumValue:4.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Sharpen() }, + listName: "Sharpen", + titleName: "Sharpen", + sliderConfiguration: .enabled(minimumValue: -1.0, maximumValue: 4.0, initialValue: 0.0), + sliderUpdateCallback: {filter, sliderValue in filter.sharpness = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{UnsharpMask()}, - listName:"Unsharp mask", - titleName:"Unsharp Mask", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { UnsharpMask() }, + listName: "Unsharp mask", + titleName: "Unsharp Mask", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 5.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{TransformOperation()}, - listName:"Transform (2-D)", - titleName:"Transform (2-D)", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), - sliderUpdateCallback:{(filter, sliderValue) in - filter.transform = Matrix4x4(CGAffineTransform(rotationAngle:CGFloat(sliderValue))) + filter: { TransformOperation() }, + listName: "Transform (2-D)", + titleName: "Transform (2-D)", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 6.28, initialValue: 0.75), + sliderUpdateCallback: {filter, sliderValue in + filter.transform = Matrix4x4(CGAffineTransform(rotationAngle: CGFloat(sliderValue))) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{TransformOperation()}, - listName:"Transform (3-D)", - titleName:"Transform (3-D)", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), - sliderUpdateCallback:{(filter, sliderValue) in + filter: { TransformOperation() }, + listName: "Transform (3-D)", + titleName: "Transform (3-D)", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 6.28, initialValue: 0.75), + sliderUpdateCallback: {filter, sliderValue in var perspectiveTransform = CATransform3DIdentity perspectiveTransform.m34 = 0.4 perspectiveTransform.m33 = 0.4 @@ -152,27 +189,37 @@ let filterOperations: Array = [ perspectiveTransform = CATransform3DRotate(perspectiveTransform, CGFloat(sliderValue), 0.0, 1.0, 0.0) filter.transform = Matrix4x4(perspectiveTransform) }, - filterOperationType:.singleInput + filterOperationType: .singleInput + ), + FilterOperation( + filter: { Crop() }, + listName: "Crop", + titleName: "Crop", + sliderConfiguration: .enabled(minimumValue: 240.0, maximumValue: 480.0, initialValue: 240.0), + sliderUpdateCallback: {filter, sliderValue in + filter.cropSizeInPixels = Size(width: 480.0, height: sliderValue) + }, + filterOperationType: .singleInput ), FilterOperation( - filter:{Crop()}, - listName:"Crop", - titleName:"Crop", - sliderConfiguration:.enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), - sliderUpdateCallback:{(filter, sliderValue) in - filter.cropSizeInPixels = Size(width:480.0, height:sliderValue) + filter: { ResizeCrop() }, + listName: "ResizeCrop", + titleName: "ResizeCrop", + sliderConfiguration: .enabled(minimumValue: 240.0, maximumValue: 480.0, initialValue: 240.0), + sliderUpdateCallback: {filter, sliderValue in + filter.cropSizeInPixels = Size(width: 480.0, height: sliderValue) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Luminance()}, - listName:"Masking", - titleName:"Mask Example", - sliderConfiguration:.disabled, + filter: { Luminance() }, + listName: "Masking", + titleName: "Mask Example", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! Luminance - let maskImage = PictureInput(imageName:"Mask.png") + let maskImage = try! PictureInput(imageName: "Mask.png") castFilter.drawUnmodifiedImageOutsideOfMask = false castFilter.mask = maskImage maskImage.processImage() @@ -181,128 +228,128 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{GammaAdjustment()}, - listName:"Gamma", - titleName:"Gamma", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:3.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { GammaAdjustment() }, + listName: "Gamma", + titleName: "Gamma", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 3.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.gamma = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), // TODO : Tone curve FilterOperation( - filter:{HighlightsAndShadows()}, - listName:"Highlights and shadows", - titleName:"Highlights and Shadows", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HighlightsAndShadows() }, + listName: "Highlights and shadows", + titleName: "Highlights and Shadows", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.highlights = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Haze()}, - listName:"Haze / UV", - titleName:"Haze / UV", - sliderConfiguration:.enabled(minimumValue:-0.2, maximumValue:0.2, initialValue:0.2), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Haze() }, + listName: "Haze / UV", + titleName: "Haze / UV", + sliderConfiguration: .enabled(minimumValue: -0.2, maximumValue: 0.2, initialValue: 0.2), + sliderUpdateCallback: {filter, sliderValue in filter.distance = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SepiaToneFilter()}, - listName:"Sepia tone", - titleName:"Sepia Tone", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SepiaToneFilter() }, + listName: "Sepia tone", + titleName: "Sepia Tone", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{AmatorkaFilter()}, - listName:"Amatorka (Lookup)", - titleName:"Amatorka (Lookup)", - sliderConfiguration:.disabled, + filter: { AmatorkaFilter() }, + listName: "Amatorka (Lookup)", + titleName: "Amatorka (Lookup)", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{MissEtikateFilter()}, - listName:"Miss Etikate (Lookup)", - titleName:"Miss Etikate (Lookup)", - sliderConfiguration:.disabled, + filter: { MissEtikateFilter() }, + listName: "Miss Etikate (Lookup)", + titleName: "Miss Etikate (Lookup)", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SoftElegance()}, - listName:"Soft elegance (Lookup)", - titleName:"Soft Elegance (Lookup)", - sliderConfiguration:.disabled, + filter: { SoftElegance() }, + listName: "Soft elegance (Lookup)", + titleName: "Soft Elegance (Lookup)", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ColorInversion()}, - listName:"Color invert", - titleName:"Color Invert", - sliderConfiguration:.disabled, + filter: { ColorInversion() }, + listName: "Color invert", + titleName: "Color Invert", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Solarize()}, - listName:"Solarize", - titleName:"Solarize", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Solarize() }, + listName: "Solarize", + titleName: "Solarize", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Vibrance()}, - listName:"Vibrance", - titleName:"Vibrance", - sliderConfiguration:.enabled(minimumValue:-1.2, maximumValue:1.2, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Vibrance() }, + listName: "Vibrance", + titleName: "Vibrance", + sliderConfiguration: .enabled(minimumValue: -1.2, maximumValue: 1.2, initialValue: 0.0), + sliderUpdateCallback: {filter, sliderValue in filter.vibrance = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{HighlightAndShadowTint()}, - listName:"Highlight and shadow tint", - titleName:"Highlight / Shadow Tint", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HighlightAndShadowTint() }, + listName: "Highlight and shadow tint", + titleName: "Highlight / Shadow Tint", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.0), + sliderUpdateCallback: {filter, sliderValue in filter.shadowTintIntensity = sliderValue }, - filterOperationType:.singleInput - ), - FilterOperation ( - filter:{Luminance()}, - listName:"Luminance", - titleName:"Luminance", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput - ), - FilterOperation( - filter:{Histogram(type:.rgb)}, - listName:"Histogram", - titleName:"Histogram", - sliderConfiguration:.enabled(minimumValue:4.0, maximumValue:32.0, initialValue:16.0), - sliderUpdateCallback: {(filter, sliderValue) in + filterOperationType: .singleInput + ), + FilterOperation( + filter: { Luminance() }, + listName: "Luminance", + titleName: "Luminance", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput + ), + FilterOperation( + filter: { Histogram(type: .rgb) }, + listName: "Histogram", + titleName: "Histogram", + sliderConfiguration: .enabled(minimumValue: 4.0, maximumValue: 32.0, initialValue: 16.0), + sliderUpdateCallback: {filter, sliderValue in filter.downsamplingFactor = UInt(round(sliderValue)) }, - filterOperationType:.custom(filterSetupFunction: {(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! Histogram let histogramGraph = HistogramDisplay() - histogramGraph.overriddenOutputSize = Size(width:256.0, height:330.0) + histogramGraph.overriddenOutputSize = Size(width: 256.0, height: 330.0) let blendFilter = AlphaBlend() blendFilter.mix = 0.75 camera --> blendFilter @@ -311,23 +358,23 @@ let filterOperations: Array = [ return blendFilter }) ), - FilterOperation ( - filter:{HistogramEqualization(type:.rgb)}, - listName:"Histogram equalization", - titleName:"Histogram Equalization", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + FilterOperation( + filter: { HistogramEqualization(type: .rgb) }, + listName: "Histogram equalization", + titleName: "Histogram Equalization", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{AverageColorExtractor()}, - listName:"Average color", - titleName:"Average Color", - sliderConfiguration:.disabled, + filter: { AverageColorExtractor() }, + listName: "Average color", + titleName: "Average Color", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! AverageColorExtractor - let colorGenerator = SolidColorGenerator(size:outputView.sizeInPixels) + let colorGenerator = SolidColorGenerator(size: outputView.sizeInPixels) castFilter.extractedColorCallback = {color in colorGenerator.renderColor(color) @@ -338,17 +385,17 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{AverageLuminanceExtractor()}, - listName:"Average luminosity", - titleName:"Average Luminosity", - sliderConfiguration:.disabled, + filter: { AverageLuminanceExtractor() }, + listName: "Average luminosity", + titleName: "Average Luminosity", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! AverageLuminanceExtractor - let colorGenerator = SolidColorGenerator(size:outputView.sizeInPixels) + let colorGenerator = SolidColorGenerator(size: outputView.sizeInPixels) castFilter.extractedLuminanceCallback = {luminosity in - colorGenerator.renderColor(Color(red:luminosity, green:luminosity, blue:luminosity)) + colorGenerator.renderColor(Color(red: luminosity, green: luminosity, blue: luminosity)) } camera --> castFilter @@ -357,161 +404,161 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{LuminanceThreshold()}, - listName:"Luminance threshold", - titleName:"Luminance Threshold", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { LuminanceThreshold() }, + listName: "Luminance threshold", + titleName: "Luminance Threshold", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{AdaptiveThreshold()}, - listName:"Adaptive threshold", - titleName:"Adaptive Threshold", - sliderConfiguration:.enabled(minimumValue:1.0, maximumValue:20.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { AdaptiveThreshold() }, + listName: "Adaptive threshold", + titleName: "Adaptive Threshold", + sliderConfiguration: .enabled(minimumValue: 1.0, maximumValue: 20.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{AverageLuminanceThreshold()}, - listName:"Average luminance threshold", - titleName:"Avg. Lum. Threshold", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { AverageLuminanceThreshold() }, + listName: "Average luminance threshold", + titleName: "Avg. Lum. Threshold", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.thresholdMultiplier = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Pixellate()}, - listName:"Pixellate", - titleName:"Pixellate", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Pixellate() }, + listName: "Pixellate", + titleName: "Pixellate", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 0.3, initialValue: 0.05), + sliderUpdateCallback: {filter, sliderValue in filter.fractionalWidthOfAPixel = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{PolarPixellate()}, - listName:"Polar pixellate", - titleName:"Polar Pixellate", - sliderConfiguration:.enabled(minimumValue:-0.1, maximumValue:0.1, initialValue:0.05), - sliderUpdateCallback: {(filter, sliderValue) in - filter.pixelSize = Size(width:sliderValue, height:sliderValue) + filter: { PolarPixellate() }, + listName: "Polar pixellate", + titleName: "Polar Pixellate", + sliderConfiguration: .enabled(minimumValue: -0.1, maximumValue: 0.1, initialValue: 0.05), + sliderUpdateCallback: {filter, sliderValue in + filter.pixelSize = Size(width: sliderValue, height: sliderValue) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Pixellate()}, - listName:"Masked Pixellate", - titleName:"Masked Pixellate", - sliderConfiguration:.disabled, + filter: { Pixellate() }, + listName: "Masked Pixellate", + titleName: "Masked Pixellate", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! Pixellate castFilter.fractionalWidthOfAPixel = 0.05 // TODO: Find a way to not hardcode these values #if os(iOS) - let circleGenerator = CircleGenerator(size:Size(width:480, height:640)) + let circleGenerator = CircleGenerator(size: Size(width: 480, height: 640)) #else - let circleGenerator = CircleGenerator(size:Size(width:1280, height:720)) + let circleGenerator = CircleGenerator(size: Size(width: 1280, height: 720)) #endif castFilter.mask = circleGenerator - circleGenerator.renderCircleOfRadius(0.25, center:Position.center, circleColor:Color.white, backgroundColor:Color.transparent) + circleGenerator.renderCircleOfRadius(0.25, center: Position.center, circleColor: Color.white, backgroundColor: Color.transparent) camera --> castFilter --> outputView return nil }) ), FilterOperation( - filter:{PolkaDot()}, - listName:"Polka dot", - titleName:"Polka Dot", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { PolkaDot() }, + listName: "Polka dot", + titleName: "Polka Dot", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 0.3, initialValue: 0.05), + sliderUpdateCallback: {filter, sliderValue in filter.fractionalWidthOfAPixel = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Halftone()}, - listName:"Halftone", - titleName:"Halftone", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:0.05, initialValue:0.01), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Halftone() }, + listName: "Halftone", + titleName: "Halftone", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 0.05, initialValue: 0.01), + sliderUpdateCallback: {filter, sliderValue in filter.fractionalWidthOfAPixel = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Crosshatch()}, - listName:"Crosshatch", - titleName:"Crosshatch", - sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.06, initialValue:0.03), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Crosshatch() }, + listName: "Crosshatch", + titleName: "Crosshatch", + sliderConfiguration: .enabled(minimumValue: 0.01, maximumValue: 0.06, initialValue: 0.03), + sliderUpdateCallback: {filter, sliderValue in filter.crossHatchSpacing = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SobelEdgeDetection()}, - listName:"Sobel edge detection", - titleName:"Sobel Edge Detection", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SobelEdgeDetection() }, + listName: "Sobel edge detection", + titleName: "Sobel Edge Detection", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.25), + sliderUpdateCallback: {filter, sliderValue in filter.edgeStrength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{PrewittEdgeDetection()}, - listName:"Prewitt edge detection", - titleName:"Prewitt Edge Detection", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { PrewittEdgeDetection() }, + listName: "Prewitt edge detection", + titleName: "Prewitt Edge Detection", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.edgeStrength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{CannyEdgeDetection()}, - listName:"Canny edge detection", - titleName:"Canny Edge Detection", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { CannyEdgeDetection() }, + listName: "Canny edge detection", + titleName: "Canny Edge Detection", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 4.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ThresholdSobelEdgeDetection()}, - listName:"Threshold edge detection", - titleName:"Threshold Edge Detection", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ThresholdSobelEdgeDetection() }, + listName: "Threshold edge detection", + titleName: "Threshold Edge Detection", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.25), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{HarrisCornerDetector()}, - listName:"Harris corner detector", - titleName:"Harris Corner Detector", - sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HarrisCornerDetector() }, + listName: "Harris corner detector", + titleName: "Harris Corner Detector", + sliderConfiguration: .enabled(minimumValue: 0.01, maximumValue: 0.70, initialValue: 0.20), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! HarrisCornerDetector // TODO: Get this more dynamically sized #if os(iOS) - let crosshairGenerator = CrosshairGenerator(size:Size(width:480, height:640)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 480, height: 640)) #else - let crosshairGenerator = CrosshairGenerator(size:Size(width:1280, height:720)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 1280, height: 720)) #endif crosshairGenerator.crosshairWidth = 15.0 @@ -529,20 +576,20 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{NobleCornerDetector()}, - listName:"Noble corner detector", - titleName:"Noble Corner Detector", - sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { NobleCornerDetector() }, + listName: "Noble corner detector", + titleName: "Noble Corner Detector", + sliderConfiguration: .enabled(minimumValue: 0.01, maximumValue: 0.70, initialValue: 0.20), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! NobleCornerDetector // TODO: Get this more dynamically sized #if os(iOS) - let crosshairGenerator = CrosshairGenerator(size:Size(width:480, height:640)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 480, height: 640)) #else - let crosshairGenerator = CrosshairGenerator(size:Size(width:1280, height:720)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 1280, height: 720)) #endif crosshairGenerator.crosshairWidth = 15.0 @@ -560,20 +607,20 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{ShiTomasiFeatureDetector()}, - listName:"Shi-Tomasi feature detector", - titleName:"Shi-Tomasi Feature Detector", - sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ShiTomasiFeatureDetector() }, + listName: "Shi-Tomasi feature detector", + titleName: "Shi-Tomasi Feature Detector", + sliderConfiguration: .enabled(minimumValue: 0.01, maximumValue: 0.70, initialValue: 0.20), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! ShiTomasiFeatureDetector // TODO: Get this more dynamically sized #if os(iOS) - let crosshairGenerator = CrosshairGenerator(size:Size(width:480, height:640)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 480, height: 640)) #else - let crosshairGenerator = CrosshairGenerator(size:Size(width:1280, height:720)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 1280, height: 720)) #endif crosshairGenerator.crosshairWidth = 15.0 @@ -592,112 +639,112 @@ let filterOperations: Array = [ ), // TODO: Hough transform line detector FilterOperation( - filter:{ColourFASTFeatureDetection()}, - listName:"ColourFAST feature detection", - titleName:"ColourFAST Features", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + filter: { ColourFASTFeatureDetection() }, + listName: "ColourFAST feature detection", + titleName: "ColourFAST Features", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{LowPassFilter()}, - listName:"Low pass", - titleName:"Low Pass", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { LowPassFilter() }, + listName: "Low pass", + titleName: "Low Pass", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in filter.strength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{HighPassFilter()}, - listName:"High pass", - titleName:"High Pass", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HighPassFilter() }, + listName: "High pass", + titleName: "High Pass", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in filter.strength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), // TODO: Motion detector FilterOperation( - filter:{SketchFilter()}, - listName:"Sketch", - titleName:"Sketch", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SketchFilter() }, + listName: "Sketch", + titleName: "Sketch", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in filter.edgeStrength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ThresholdSketchFilter()}, - listName:"Threshold Sketch", - titleName:"Threshold Sketch", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ThresholdSketchFilter() }, + listName: "Threshold Sketch", + titleName: "Threshold Sketch", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.25), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ToonFilter()}, - listName:"Toon", - titleName:"Toon", - sliderConfiguration:.disabled, + filter: { ToonFilter() }, + listName: "Toon", + titleName: "Toon", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SmoothToonFilter()}, - listName:"Smooth toon", - titleName:"Smooth Toon", - sliderConfiguration:.enabled(minimumValue:1.0, maximumValue:6.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SmoothToonFilter() }, + listName: "Smooth toon", + titleName: "Smooth Toon", + sliderConfiguration: .enabled(minimumValue: 1.0, maximumValue: 6.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{TiltShift()}, - listName:"Tilt shift", - titleName:"Tilt Shift", - sliderConfiguration:.enabled(minimumValue:0.2, maximumValue:0.8, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { TiltShift() }, + listName: "Tilt shift", + titleName: "Tilt Shift", + sliderConfiguration: .enabled(minimumValue: 0.2, maximumValue: 0.8, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in filter.topFocusLevel = sliderValue - 0.1 filter.bottomFocusLevel = sliderValue + 0.1 }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{CGAColorspaceFilter()}, - listName:"CGA colorspace", - titleName:"CGA Colorspace", - sliderConfiguration:.disabled, + filter: { CGAColorspaceFilter() }, + listName: "CGA colorspace", + titleName: "CGA Colorspace", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Posterize()}, - listName:"Posterize", - titleName:"Posterize", - sliderConfiguration:.enabled(minimumValue:1.0, maximumValue:20.0, initialValue:10.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Posterize() }, + listName: "Posterize", + titleName: "Posterize", + sliderConfiguration: .enabled(minimumValue: 1.0, maximumValue: 20.0, initialValue: 10.0), + sliderUpdateCallback: {filter, sliderValue in filter.colorLevels = round(sliderValue) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Convolution3x3()}, - listName:"3x3 convolution", - titleName:"3x3 convolution", - sliderConfiguration:.disabled, + filter: { Convolution3x3() }, + listName: "3x3 convolution", + titleName: "3x3 convolution", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! Convolution3x3 - castFilter.convolutionKernel = Matrix3x3(rowMajorValues:[ + castFilter.convolutionKernel = Matrix3x3(rowMajorValues: [ -1.0, 0.0, 1.0, -2.0, 0.0, 2.0, -1.0, 0.0, 1.0]) @@ -708,38 +755,38 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{EmbossFilter()}, - listName:"Emboss", - titleName:"Emboss", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { EmbossFilter() }, + listName: "Emboss", + titleName: "Emboss", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 5.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Laplacian()}, - listName:"Laplacian", - titleName:"Laplacian", - sliderConfiguration:.disabled, + filter: { Laplacian() }, + listName: "Laplacian", + titleName: "Laplacian", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ChromaKeying()}, - listName:"Chroma key", - titleName:"Chroma Key", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.00, initialValue:0.40), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ChromaKeying() }, + listName: "Chroma key", + titleName: "Chroma Key", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.00, initialValue: 0.40), + sliderUpdateCallback: {filter, sliderValue in filter.thresholdSensitivity = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! ChromaKeying let blendFilter = AlphaBlend() blendFilter.mix = 1.0 - let inputImage = PictureInput(imageName:blendImageName) + let inputImage = try! PictureInput(imageName: blendImageName) inputImage --> blendFilter camera --> castFilter --> blendFilter --> outputView @@ -748,139 +795,139 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{KuwaharaFilter()}, - listName:"Kuwahara", - titleName:"Kuwahara", - sliderConfiguration:.enabled(minimumValue:3.0, maximumValue:9.0, initialValue:3.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { KuwaharaFilter() }, + listName: "Kuwahara", + titleName: "Kuwahara", + sliderConfiguration: .enabled(minimumValue: 3.0, maximumValue: 9.0, initialValue: 3.0), + sliderUpdateCallback: {filter, sliderValue in filter.radius = Int(round(sliderValue)) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{KuwaharaRadius3Filter()}, - listName:"Kuwahara (radius 3)", - titleName:"Kuwahara (Radius 3)", - sliderConfiguration:.disabled, + filter: { KuwaharaRadius3Filter() }, + listName: "Kuwahara (radius 3)", + titleName: "Kuwahara (Radius 3)", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Vignette()}, - listName:"Vignette", - titleName:"Vignette", - sliderConfiguration:.enabled(minimumValue:0.5, maximumValue:0.9, initialValue:0.75), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Vignette() }, + listName: "Vignette", + titleName: "Vignette", + sliderConfiguration: .enabled(minimumValue: 0.5, maximumValue: 0.9, initialValue: 0.75), + sliderUpdateCallback: {filter, sliderValue in filter.end = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{GaussianBlur()}, - listName:"Gaussian blur", - titleName:"Gaussian Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { GaussianBlur() }, + listName: "Gaussian blur", + titleName: "Gaussian Blur", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 40.0, initialValue: 2.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{BoxBlur()}, - listName:"Box blur", - titleName:"Box Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { BoxBlur() }, + listName: "Box blur", + titleName: "Box Blur", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 40.0, initialValue: 2.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{MedianFilter()}, - listName:"Median", - titleName:"Median", - sliderConfiguration:.disabled, + filter: { MedianFilter() }, + listName: "Median", + titleName: "Median", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{BilateralBlur()}, - listName:"Bilateral blur", - titleName:"Bilateral Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:10.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { BilateralBlur() }, + listName: "Bilateral blur", + titleName: "Bilateral Blur", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 10.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.distanceNormalizationFactor = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{MotionBlur()}, - listName:"Motion blur", - titleName:"Motion Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:180.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { MotionBlur() }, + listName: "Motion blur", + titleName: "Motion Blur", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 180.0, initialValue: 0.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurAngle = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ZoomBlur()}, - listName:"Zoom blur", - titleName:"Zoom Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.5, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ZoomBlur() }, + listName: "Zoom blur", + titleName: "Zoom Blur", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.5, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurSize = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( // TODO: Make this only partially applied to the view - filter:{iOSBlur()}, - listName:"iOS 7 blur", - titleName:"iOS 7 Blur", - sliderConfiguration:.disabled, + filter: { iOSBlur() }, + listName: "iOS 7 blur", + titleName: "iOS 7 Blur", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SwirlDistortion()}, - listName:"Swirl", - titleName:"Swirl", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SwirlDistortion() }, + listName: "Swirl", + titleName: "Swirl", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.0, initialValue: 1.0), + sliderUpdateCallback: {filter, sliderValue in filter.angle = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{BulgeDistortion()}, - listName:"Bulge", - titleName:"Bulge", - sliderConfiguration:.enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { BulgeDistortion() }, + listName: "Bulge", + titleName: "Bulge", + sliderConfiguration: .enabled(minimumValue: -1.0, maximumValue: 1.0, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in // filter.scale = sliderValue filter.center = Position(0.5, sliderValue) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{PinchDistortion()}, - listName:"Pinch", - titleName:"Pinch", - sliderConfiguration:.enabled(minimumValue:-2.0, maximumValue:2.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { PinchDistortion() }, + listName: "Pinch", + titleName: "Pinch", + sliderConfiguration: .enabled(minimumValue: -2.0, maximumValue: 2.0, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in filter.scale = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SphereRefraction()}, - listName:"Sphere refraction", - titleName:"Sphere Refraction", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), - sliderUpdateCallback:{(filter, sliderValue) in + filter: { SphereRefraction() }, + listName: "Sphere refraction", + titleName: "Sphere Refraction", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.15), + sliderUpdateCallback: {filter, sliderValue in filter.radius = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! SphereRefraction // Provide a blurred image for a cool-looking background @@ -897,14 +944,14 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{GlassSphereRefraction()}, - listName:"Glass sphere", - titleName:"Glass Sphere", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), - sliderUpdateCallback:{(filter, sliderValue) in + filter: { GlassSphereRefraction() }, + listName: "Glass sphere", + titleName: "Glass Sphere", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.15), + sliderUpdateCallback: {filter, sliderValue in filter.radius = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! GlassSphereRefraction // Provide a blurred image for a cool-looking background @@ -920,245 +967,245 @@ let filterOperations: Array = [ return blendFilter }) ), - FilterOperation ( - filter:{StretchDistortion()}, - listName:"Stretch", - titleName:"Stretch", - sliderConfiguration:.disabled, + FilterOperation( + filter: { StretchDistortion() }, + listName: "Stretch", + titleName: "Stretch", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Dilation()}, - listName:"Dilation", - titleName:"Dilation", - sliderConfiguration:.disabled, + filter: { Dilation() }, + listName: "Dilation", + titleName: "Dilation", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Erosion()}, - listName:"Erosion", - titleName:"Erosion", - sliderConfiguration:.disabled, + filter: { Erosion() }, + listName: "Erosion", + titleName: "Erosion", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{OpeningFilter()}, - listName:"Opening", - titleName:"Opening", - sliderConfiguration:.disabled, + filter: { OpeningFilter() }, + listName: "Opening", + titleName: "Opening", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ClosingFilter()}, - listName:"Closing", - titleName:"Closing", - sliderConfiguration:.disabled, + filter: { ClosingFilter() }, + listName: "Closing", + titleName: "Closing", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), // TODO: Perlin noise // TODO: JFAVoronoi // TODO: Mosaic FilterOperation( - filter:{LocalBinaryPattern()}, - listName:"Local binary pattern", - titleName:"Local Binary Pattern", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + filter: { LocalBinaryPattern() }, + listName: "Local binary pattern", + titleName: "Local Binary Pattern", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{ColorLocalBinaryPattern()}, - listName:"Local binary pattern (color)", - titleName:"Local Binary Pattern (Color)", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + filter: { ColorLocalBinaryPattern() }, + listName: "Local binary pattern (color)", + titleName: "Local Binary Pattern (Color)", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{DissolveBlend()}, - listName:"Dissolve blend", - titleName:"Dissolve Blend", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { DissolveBlend() }, + listName: "Dissolve blend", + titleName: "Dissolve Blend", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), + sliderUpdateCallback: {filter, sliderValue in filter.mix = sliderValue }, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{ChromaKeyBlend()}, - listName:"Chroma key blend (green)", - titleName:"Chroma Key (Green)", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.4), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ChromaKeyBlend() }, + listName: "Chroma key blend (green)", + titleName: "Chroma Key (Green)", + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.4), + sliderUpdateCallback: {filter, sliderValue in filter.thresholdSensitivity = sliderValue }, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{AddBlend()}, - listName:"Add blend", - titleName:"Add Blend", - sliderConfiguration:.disabled, + filter: { AddBlend() }, + listName: "Add blend", + titleName: "Add Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{DivideBlend()}, - listName:"Divide blend", - titleName:"Divide Blend", - sliderConfiguration:.disabled, + filter: { DivideBlend() }, + listName: "Divide blend", + titleName: "Divide Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{MultiplyBlend()}, - listName:"Multiply blend", - titleName:"Multiply Blend", - sliderConfiguration:.disabled, + filter: { MultiplyBlend() }, + listName: "Multiply blend", + titleName: "Multiply Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{OverlayBlend()}, - listName:"Overlay blend", - titleName:"Overlay Blend", - sliderConfiguration:.disabled, + filter: { OverlayBlend() }, + listName: "Overlay blend", + titleName: "Overlay Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{LightenBlend()}, - listName:"Lighten blend", - titleName:"Lighten Blend", - sliderConfiguration:.disabled, + filter: { LightenBlend() }, + listName: "Lighten blend", + titleName: "Lighten Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{DarkenBlend()}, - listName:"Darken blend", - titleName:"Darken Blend", - sliderConfiguration:.disabled, + filter: { DarkenBlend() }, + listName: "Darken blend", + titleName: "Darken Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{ColorBurnBlend()}, - listName:"Color burn blend", - titleName:"Color Burn Blend", - sliderConfiguration:.disabled, + filter: { ColorBurnBlend() }, + listName: "Color burn blend", + titleName: "Color Burn Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{ColorDodgeBlend()}, - listName:"Color dodge blend", - titleName:"Color Dodge Blend", - sliderConfiguration:.disabled, + filter: { ColorDodgeBlend() }, + listName: "Color dodge blend", + titleName: "Color Dodge Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{LinearBurnBlend()}, - listName:"Linear burn blend", - titleName:"Linear Burn Blend", - sliderConfiguration:.disabled, + filter: { LinearBurnBlend() }, + listName: "Linear burn blend", + titleName: "Linear Burn Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{ScreenBlend()}, - listName:"Screen blend", - titleName:"Screen Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { ScreenBlend() }, + listName: "Screen blend", + titleName: "Screen Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{DifferenceBlend()}, - listName:"Difference blend", - titleName:"Difference Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { DifferenceBlend() }, + listName: "Difference blend", + titleName: "Difference Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{SubtractBlend()}, - listName:"Subtract blend", - titleName:"Subtract Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { SubtractBlend() }, + listName: "Subtract blend", + titleName: "Subtract Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{ExclusionBlend()}, - listName:"Exclusion blend", - titleName:"Exclusion Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { ExclusionBlend() }, + listName: "Exclusion blend", + titleName: "Exclusion Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{HardLightBlend()}, - listName:"Hard light blend", - titleName:"Hard Light Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { HardLightBlend() }, + listName: "Hard light blend", + titleName: "Hard Light Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{SoftLightBlend()}, - listName:"Soft light blend", - titleName:"Soft Light Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { SoftLightBlend() }, + listName: "Soft light blend", + titleName: "Soft Light Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{ColorBlend()}, - listName:"Color blend", - titleName:"Color Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { ColorBlend() }, + listName: "Color blend", + titleName: "Color Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{HueBlend()}, - listName:"Hue blend", - titleName:"Hue Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { HueBlend() }, + listName: "Hue blend", + titleName: "Hue Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{SaturationBlend()}, - listName:"Saturation blend", - titleName:"Saturation Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { SaturationBlend() }, + listName: "Saturation blend", + titleName: "Saturation Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{LuminosityBlend()}, - listName:"Luminosity blend", - titleName:"Luminosity Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { LuminosityBlend() }, + listName: "Luminosity blend", + titleName: "Luminosity Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{NormalBlend()}, - listName:"Normal blend", - titleName:"Normal Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend - ), + filter: { NormalBlend() }, + listName: "Normal blend", + titleName: "Normal Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend + ) // TODO: Poisson blend ] diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift index a65d8091..9d3bad95 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift @@ -5,14 +5,13 @@ import AVFoundation let blendImageName = "Lambeau.jpg" class FilterShowcaseWindowController: NSWindowController { - @IBOutlet var filterView: RenderView! @IBOutlet weak var filterSlider: NSSlider! - dynamic var currentSliderValue:Float = 0.5 { + dynamic var currentSliderValue: Float = 0.5 { willSet(newSliderValue) { - switch (currentFilterOperation!.sliderConfiguration) { + switch currentFilterOperation!.sliderConfiguration { case .enabled: currentFilterOperation!.updateBasedOnSliderValue(newSliderValue) case .disabled: break } @@ -20,9 +19,9 @@ class FilterShowcaseWindowController: NSWindowController { } var currentFilterOperation: FilterOperationInterface? - var videoCamera:Camera! - lazy var blendImage:PictureInput = { - return PictureInput(imageName:blendImageName) + var videoCamera: Camera! + lazy var blendImage: PictureInput = { + return PictureInput(imageName: blendImageName) }() var currentlySelectedRow = 1 @@ -30,7 +29,7 @@ class FilterShowcaseWindowController: NSWindowController { super.windowDidLoad() do { - videoCamera = try Camera(sessionPreset:AVCaptureSessionPreset1280x720) + videoCamera = try Camera(sessionPreset: AVCaptureSessionPreset1280x720) videoCamera.runBenchmark = true videoCamera.startCapture() } catch { @@ -39,8 +38,8 @@ class FilterShowcaseWindowController: NSWindowController { self.changeSelectedRow(0) } - func changeSelectedRow(_ row:Int) { - guard (currentlySelectedRow != row) else { return } + func changeSelectedRow(_ row: Int) { + guard currentlySelectedRow != row else { return } currentlySelectedRow = row // Clean up everything from the previous filter selection first @@ -60,7 +59,7 @@ class FilterShowcaseWindowController: NSWindowController { self.blendImage.addTarget((currentFilterOperation!.filter)) currentFilterOperation!.filter.addTarget(filterView!) self.blendImage.processImage() - case let .custom(filterSetupFunction:setupFunction): + case let .custom(filterSetupFunction: setupFunction): currentFilterOperation!.configureCustomFilter(setupFunction(videoCamera!, currentFilterOperation!.filter, filterView!)) } @@ -81,12 +80,12 @@ class FilterShowcaseWindowController: NSWindowController { // MARK: - // MARK: Table view delegate and datasource methods - func numberOfRowsInTableView(_ aTableView:NSTableView!) -> Int { + func numberOfRowsInTableView(_ aTableView: NSTableView!) -> Int { return filterOperations.count } - func tableView(_ aTableView:NSTableView!, objectValueForTableColumn aTableColumn:NSTableColumn!, row rowIndex:Int) -> AnyObject! { - let filterInList:FilterOperationInterface = filterOperations[rowIndex] + func tableView(_ aTableView: NSTableView!, objectValueForTableColumn aTableColumn: NSTableColumn!, row rowIndex: Int) -> AnyObject! { + let filterInList: FilterOperationInterface = filterOperations[rowIndex] return filterInList.listName as NSString } diff --git a/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift b/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift index 3586c02f..6b36170e 100755 --- a/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift +++ b/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift @@ -3,12 +3,11 @@ import GPUImage @NSApplicationMain class AppDelegate: NSObject, NSApplicationDelegate { - @IBOutlet weak var window: NSWindow! @IBOutlet weak var renderView: RenderView! - var image:PictureInput! - var filter:SaturationAdjustment! + var image: PictureInput! + var filter: SaturationAdjustment! dynamic var filterValue = 1.0 { didSet { @@ -18,8 +17,8 @@ class AppDelegate: NSObject, NSApplicationDelegate { } func applicationDidFinishLaunching(_ aNotification: Notification) { - let inputImage = NSImage(named:"Lambeau.jpg")! - image = PictureInput(image:inputImage) + let inputImage = NSImage(named: "Lambeau.jpg")! + image = PictureInput(image: inputImage) filter = SaturationAdjustment() @@ -27,4 +26,3 @@ class AppDelegate: NSObject, NSApplicationDelegate { image.processImage() } } - diff --git a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift index 188cc4f1..99bd646c 100644 --- a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift +++ b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift @@ -3,12 +3,11 @@ import GPUImage @NSApplicationMain class AppDelegate: NSObject, NSApplicationDelegate { - @IBOutlet weak var window: NSWindow! @IBOutlet weak var renderView: RenderView! - var movie:MovieInput! - var filter:Pixellate! + var movie: MovieInput! + var filter: Pixellate! dynamic var filterValue = 0.05 { didSet { @@ -18,10 +17,10 @@ class AppDelegate: NSObject, NSApplicationDelegate { func applicationDidFinishLaunching(_ aNotification: Notification) { let bundleURL = Bundle.main.resourceURL! - let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + let movieURL = URL(string: "sample_iPod.m4v", relativeTo: bundleURL)! do { - movie = try MovieInput(url:movieURL, playAtActualSpeed:true) + movie = try MovieInput(url: movieURL, playAtActualSpeed: true) filter = Pixellate() movie --> filter --> renderView movie.runBenchmark = true @@ -31,4 +30,3 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } } - diff --git a/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift b/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift index 875309df..c87e16d2 100755 --- a/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift +++ b/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift @@ -7,10 +7,10 @@ class AppDelegate: NSObject, NSApplicationDelegate { @IBOutlet weak var window: NSWindow! @IBOutlet weak var renderView: RenderView! - var camera:Camera! - var filter:Pixellate! + var camera: Camera! + var filter: Pixellate! - dynamic var filterSetting:Float = 0.01 { + dynamic var filterSetting: Float = 0.01 { didSet { filter.fractionalWidthOfAPixel = filterSetting } @@ -22,13 +22,13 @@ class AppDelegate: NSObject, NSApplicationDelegate { let okayButton = imageSavingDialog.runModal() if okayButton == NSModalResponseOK { - filter.saveNextFrameToURL(imageSavingDialog.url!, format:.png) + filter.saveNextFrameToURL(imageSavingDialog.url!, format: .png) } } func applicationDidFinishLaunching(_ aNotification: Notification) { do { - camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) + camera = try Camera(sessionPreset: AVCaptureSessionPreset640x480) filter = Pixellate() camera --> filter --> renderView diff --git a/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift b/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift index cbf2f6c7..18fad03a 100644 --- a/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift +++ b/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift @@ -4,18 +4,17 @@ import AVFoundation @NSApplicationMain class AppDelegate: NSObject, NSApplicationDelegate { + @IBOutlet weak var window: NSWindow! + @IBOutlet var renderView: RenderView! - @IBOutlet weak var window:NSWindow! - @IBOutlet var renderView:RenderView! - - var camera:Camera! - var filter:SmoothToonFilter! - var movieOutput:MovieOutput? + var camera: Camera! + var filter: SmoothToonFilter! + var movieOutput: MovieOutput? var isRecording = false func applicationDidFinishLaunching(_ aNotification: Notification) { do { - camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) + camera = try Camera(sessionPreset: AVCaptureSessionPreset640x480) filter = SmoothToonFilter() camera --> filter --> renderView @@ -31,7 +30,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { } @IBAction func record(_ sender: AnyObject) { - if (!isRecording) { + if !isRecording { let movieSavingDialog = NSSavePanel() movieSavingDialog.allowedFileTypes = ["mp4"] let okayButton = movieSavingDialog.runModal() @@ -40,7 +39,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { do { self.isRecording = true // movieOutput = try MovieOutput(URL:movieSavingDialog.url!, size:Size(width:1280, height:720), liveVideo:true) - movieOutput = try MovieOutput(URL:movieSavingDialog.url!, size:Size(width:640, height:480), liveVideo:true) + movieOutput = try MovieOutput(URL: movieSavingDialog.url!, size: Size(width: 640, height: 480), liveVideo: true) // camera.audioEncodingTarget = movieOutput filter --> movieOutput! movieOutput!.startRecording() @@ -50,7 +49,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } } else { - movieOutput?.finishRecording{ + movieOutput?.finishRecording { self.isRecording = false DispatchQueue.main.async { (sender as! NSButton).title = "Record" @@ -62,4 +61,3 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } - diff --git a/examples/SharedAssets/Assets-iOS.xcassets/AppIcon.appiconset/Contents.json b/examples/SharedAssets/Assets-iOS.xcassets/AppIcon.appiconset/Contents.json index 1ce2f457..71356ed4 100644 --- a/examples/SharedAssets/Assets-iOS.xcassets/AppIcon.appiconset/Contents.json +++ b/examples/SharedAssets/Assets-iOS.xcassets/AppIcon.appiconset/Contents.json @@ -1,5 +1,15 @@ { "images" : [ + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "3x" + }, { "size" : "29x29", "idiom" : "iphone", @@ -36,6 +46,16 @@ "filename" : "Icon-180.png", "scale" : "3x" }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "2x" + }, { "size" : "29x29", "idiom" : "ipad", @@ -77,6 +97,11 @@ "idiom" : "ipad", "filename" : "Icon-167.png", "scale" : "2x" + }, + { + "idiom" : "ios-marketing", + "size" : "1024x1024", + "scale" : "1x" } ], "info" : { diff --git a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj index 5358e00c..1c954c57 100644 --- a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj +++ b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj @@ -157,7 +157,7 @@ isa = PBXGroup; children = ( BC9E364D1E525A3200B8604F /* GPUImage.framework */, - BC9E364F1E525A3200B8604F /* GPUImage.xctest */, + BC9E364F1E525A3200B8604F /* GPUImageTests_macOS.xctest */, BC9E36511E525A3200B8604F /* GPUImage.framework */, BC9E36531E525A3200B8604F /* GPUImageTests_iOS.xctest */, ); @@ -209,6 +209,7 @@ TargetAttributes = { BC0037B6195CA11B00B9D651 = { CreatedOnToolsVersion = 6.0; + DevelopmentTeam = BCUYJQB9VH; LastSwiftMigration = 0800; ProvisioningStyle = Automatic; }; @@ -219,6 +220,7 @@ developmentRegion = English; hasScannedForEncodings = 0; knownRegions = ( + English, en, Base, ); @@ -246,10 +248,10 @@ remoteRef = BC9E364C1E525A3200B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E364F1E525A3200B8604F /* GPUImage.xctest */ = { + BC9E364F1E525A3200B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E364E1E525A3200B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -420,12 +422,13 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = BCUYJQB9VH; INFOPLIST_FILE = FilterShowcaseSwift/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "com.sunsetlakesoftware.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = FilterShowcase; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 4.0; }; name = Debug; }; @@ -434,13 +437,14 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = BCUYJQB9VH; INFOPLIST_FILE = FilterShowcaseSwift/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "com.sunsetlakesoftware.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = FilterShowcase; SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 4.0; }; name = Release; }; diff --git a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/xcshareddata/xcschemes/FilterShowcase.xcscheme b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/xcshareddata/xcschemes/FilterShowcase.xcscheme new file mode 100644 index 00000000..87d1eb0e --- /dev/null +++ b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/xcshareddata/xcschemes/FilterShowcase.xcscheme @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift index d353864b..0b2eeea4 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift @@ -1,12 +1,11 @@ import UIKit +import GPUImage @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? - func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject : AnyObject]?) -> Bool { - return true + func applicationDidFinishLaunching(_ application: UIApplication) { + _needCheckFilterContainerThread = false } } - diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift index b98b61bb..a3e8dff7 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift @@ -5,17 +5,15 @@ import AVFoundation let blendImageName = "WID-small.jpg" class FilterDisplayViewController: UIViewController, UISplitViewControllerDelegate { - @IBOutlet var filterSlider: UISlider? @IBOutlet var filterView: RenderView? - let videoCamera:Camera? - var blendImage:PictureInput? + let videoCamera: Camera? + var blendImage: PictureInput? - required init(coder aDecoder: NSCoder) - { + required init(coder aDecoder: NSCoder) { do { - videoCamera = try Camera(sessionPreset:AVCaptureSessionPreset640x480, location:.backFacing) + videoCamera = try Camera(sessionPreset: AVCaptureSession.Preset.vga640x480, location: .backFacing) videoCamera!.runBenchmark = true } catch { videoCamera = nil @@ -45,11 +43,11 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega currentFilterConfiguration.filter.addTarget(view) case .blend: videoCamera.addTarget(currentFilterConfiguration.filter) - self.blendImage = PictureInput(imageName:blendImageName) + self.blendImage = try? PictureInput(imageName: blendImageName) self.blendImage?.addTarget(currentFilterConfiguration.filter) self.blendImage?.processImage() currentFilterConfiguration.filter.addTarget(view) - case let .custom(filterSetupFunction:setupFunction): + case let .custom(filterSetupFunction: setupFunction): currentFilterConfiguration.configureCustomFilter(setupFunction(videoCamera, currentFilterConfiguration.filter, view)) } @@ -76,8 +74,8 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega @IBAction func updateSliderValue() { if let currentFilterConfiguration = self.filterOperation { - switch (currentFilterConfiguration.sliderConfiguration) { - case .enabled(_, _, _): currentFilterConfiguration.updateBasedOnSliderValue(Float(self.filterSlider!.value)) + switch currentFilterConfiguration.sliderConfiguration { + case .enabled: currentFilterConfiguration.updateBasedOnSliderValue(Float(self.filterSlider!.value)) case .disabled: break } } @@ -104,4 +102,3 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega } } - diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift index 430af00b..a596376e 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift @@ -1,13 +1,12 @@ import UIKit class FilterListViewController: UITableViewController { - - var filterDisplayViewController: FilterDisplayViewController? = nil + var filterDisplayViewController: FilterDisplayViewController? var objects = NSMutableArray() // #pragma mark - Segues - override func prepare(for segue: UIStoryboardSegue, sender: Any?){ + override func prepare(for segue: UIStoryboardSegue, sender: Any?) { if segue.identifier == "showDetail" { if let indexPath = self.tableView.indexPathForSelectedRow { let filterInList = filterOperations[(indexPath as NSIndexPath).row] @@ -30,9 +29,8 @@ class FilterListViewController: UITableViewController { override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { let cell = tableView.dequeueReusableCell(withIdentifier: "Cell", for: indexPath) - let filterInList:FilterOperationInterface = filterOperations[(indexPath as NSIndexPath).row] + let filterInList: FilterOperationInterface = filterOperations[(indexPath as NSIndexPath).row] cell.textLabel?.text = filterInList.listName return cell } } - diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj index 6fb5dd86..2f0afb57 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj @@ -94,7 +94,7 @@ isa = PBXGroup; children = ( BC9E36601E525B5B00B8604F /* GPUImage.framework */, - BC9E36621E525B5B00B8604F /* GPUImage.xctest */, + BC9E36621E525B5B00B8604F /* GPUImageTests_macOS.xctest */, BC9E36641E525B5B00B8604F /* GPUImage.framework */, BC9E36661E525B5B00B8604F /* GPUImageTests_iOS.xctest */, ); @@ -219,10 +219,10 @@ remoteRef = BC9E365F1E525B5B00B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E36621E525B5B00B8604F /* GPUImage.xctest */ = { + BC9E36621E525B5B00B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E36611E525B5B00B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift index 7d21e236..44ffc100 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift @@ -2,11 +2,9 @@ import UIKit @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { return true } } - diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift index 7980b1dc..2a729e7a 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift @@ -2,34 +2,44 @@ import UIKit import GPUImage class ViewController: UIViewController { - @IBOutlet weak var renderView: RenderView! - var picture:PictureInput! - var filter:SaturationAdjustment! + var picture: PictureInput! + var filter: SaturationAdjustment! override func viewDidLayoutSubviews() { super.viewDidLayoutSubviews() // Filtering image for saving - let testImage = UIImage(named:"WID-small.jpg")! + let testImage = UIImage(named: "WID-small.jpg")! let toonFilter = SmoothToonFilter() - let filteredImage = testImage.filterWithOperation(toonFilter) + + let filteredImage: UIImage + do { + filteredImage = try testImage.filterWithOperation(toonFilter) + } catch { + print("Couldn't filter image with error: \(error)") + return + } let pngImage = UIImagePNGRepresentation(filteredImage)! do { - let documentsDir = try FileManager.default.url(for:.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) - let fileURL = URL(string:"test.png", relativeTo:documentsDir)! - try pngImage.write(to:fileURL, options:.atomic) + let documentsDir = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) + let fileURL = URL(string: "test.png", relativeTo: documentsDir)! + try pngImage.write(to: fileURL, options: .atomic) } catch { print("Couldn't write to file with error: \(error)") } // Filtering image for display - picture = PictureInput(image:UIImage(named:"WID-small.jpg")!) + do { + picture = try PictureInput(image: UIImage(named: "WID-small.jpg")!) + } catch { + print("Couldn't create PictureInput with error: \(error)") + return + } filter = SaturationAdjustment() picture --> filter --> renderView picture.processImage() } } - diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj new file mode 100644 index 00000000..6aae7e00 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj @@ -0,0 +1,467 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 48; + objects = { + +/* Begin PBXBuildFile section */ + 1F2393442071C12C001886DD /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F2393432071C12C001886DD /* AppDelegate.swift */; }; + 1F2393462071C12C001886DD /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F2393452071C12C001886DD /* ViewController.swift */; }; + 1F2393492071C12C001886DD /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1F2393472071C12C001886DD /* Main.storyboard */; }; + 1F23934B2071C12C001886DD /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1F23934A2071C12C001886DD /* Assets.xcassets */; }; + 1F23934E2071C12C001886DD /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1F23934C2071C12C001886DD /* LaunchScreen.storyboard */; }; + 1F2393662071C169001886DD /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F2393652071C169001886DD /* AVFoundation.framework */; }; + 1F2393682071C16D001886DD /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F2393672071C16D001886DD /* CoreAudio.framework */; }; + 1F23936D2071C2DB001886DD /* sample_iPod.m4v in Resources */ = {isa = PBXBuildFile; fileRef = 1F23936C2071C2DB001886DD /* sample_iPod.m4v */; }; + 1F2393772071F51C001886DD /* GPUImage.framework in CopyFiles */ = {isa = PBXBuildFile; fileRef = 1F2393612071C155001886DD /* GPUImage.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; + 1F2393792071FCB1001886DD /* Assets-iOS.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1F2393782071FCB1001886DD /* Assets-iOS.xcassets */; }; + 1F23937B2071FCDB001886DD /* lookup_miss_etikate.png in Resources */ = {isa = PBXBuildFile; fileRef = 1F23937A2071FCDA001886DD /* lookup_miss_etikate.png */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + 1F23935C2071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC6E7CAB1C39A9D8006DF678; + remoteInfo = GPUImage_macOS; + }; + 1F23935E2071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC6E7CB51C39A9D8006DF678; + remoteInfo = GPUImageTests_macOS; + }; + 1F2393602071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC9E34E91E524A2200B8604F; + remoteInfo = GPUImage_iOS; + }; + 1F2393622071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC9E34F11E524A2200B8604F; + remoteInfo = GPUImageTests_iOS; + }; + 1F23936A2071C29D001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 1; + remoteGlobalIDString = BC9E34E81E524A2200B8604F; + remoteInfo = GPUImage_iOS; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 1F2393762071F506001886DD /* CopyFiles */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + 1F2393772071F51C001886DD /* GPUImage.framework in CopyFiles */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + 1F2393402071C12C001886DD /* SimpleMovieEncoding.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SimpleMovieEncoding.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 1F2393432071C12C001886DD /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + 1F2393452071C12C001886DD /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; + 1F2393482071C12C001886DD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 1F23934A2071C12C001886DD /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 1F23934D2071C12C001886DD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 1F23934F2071C12C001886DD /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 1F2393552071C155001886DD /* GPUImage.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = GPUImage.xcodeproj; path = ../../../../framework/GPUImage.xcodeproj; sourceTree = ""; }; + 1F2393652071C169001886DD /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; + 1F2393672071C16D001886DD /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = System/Library/Frameworks/CoreAudio.framework; sourceTree = SDKROOT; }; + 1F23936C2071C2DB001886DD /* sample_iPod.m4v */ = {isa = PBXFileReference; lastKnownFileType = file; name = sample_iPod.m4v; path = ../../../SharedAssets/sample_iPod.m4v; sourceTree = ""; }; + 1F2393782071FCB1001886DD /* Assets-iOS.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = "Assets-iOS.xcassets"; path = "../../../SharedAssets/Assets-iOS.xcassets"; sourceTree = ""; }; + 1F23937A2071FCDA001886DD /* lookup_miss_etikate.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = lookup_miss_etikate.png; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 1F23933D2071C12C001886DD /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 1F2393682071C16D001886DD /* CoreAudio.framework in Frameworks */, + 1F2393662071C169001886DD /* AVFoundation.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 1F2393372071C12C001886DD = { + isa = PBXGroup; + children = ( + 1F2393422071C12C001886DD /* SimpleMovieEncoding */, + 1F2393412071C12C001886DD /* Products */, + 1F2393642071C169001886DD /* Frameworks */, + ); + sourceTree = ""; + }; + 1F2393412071C12C001886DD /* Products */ = { + isa = PBXGroup; + children = ( + 1F2393402071C12C001886DD /* SimpleMovieEncoding.app */, + ); + name = Products; + sourceTree = ""; + }; + 1F2393422071C12C001886DD /* SimpleMovieEncoding */ = { + isa = PBXGroup; + children = ( + 1F2393432071C12C001886DD /* AppDelegate.swift */, + 1F2393452071C12C001886DD /* ViewController.swift */, + 1F2393472071C12C001886DD /* Main.storyboard */, + 1F2393552071C155001886DD /* GPUImage.xcodeproj */, + 1F23934A2071C12C001886DD /* Assets.xcassets */, + 1F2393782071FCB1001886DD /* Assets-iOS.xcassets */, + 1F23934C2071C12C001886DD /* LaunchScreen.storyboard */, + 1F23937A2071FCDA001886DD /* lookup_miss_etikate.png */, + 1F23936C2071C2DB001886DD /* sample_iPod.m4v */, + 1F23934F2071C12C001886DD /* Info.plist */, + ); + path = SimpleMovieEncoding; + sourceTree = ""; + }; + 1F2393562071C155001886DD /* Products */ = { + isa = PBXGroup; + children = ( + 1F23935D2071C155001886DD /* GPUImage.framework */, + 1F23935F2071C155001886DD /* GPUImageTests_macOS.xctest */, + 1F2393612071C155001886DD /* GPUImage.framework */, + 1F2393632071C155001886DD /* GPUImageTests_iOS.xctest */, + ); + name = Products; + sourceTree = ""; + }; + 1F2393642071C169001886DD /* Frameworks */ = { + isa = PBXGroup; + children = ( + 1F2393672071C16D001886DD /* CoreAudio.framework */, + 1F2393652071C169001886DD /* AVFoundation.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 1F23933F2071C12C001886DD /* SimpleMovieEncoding */ = { + isa = PBXNativeTarget; + buildConfigurationList = 1F2393522071C12C001886DD /* Build configuration list for PBXNativeTarget "SimpleMovieEncoding" */; + buildPhases = ( + 1F23933C2071C12C001886DD /* Sources */, + 1F23933D2071C12C001886DD /* Frameworks */, + 1F23933E2071C12C001886DD /* Resources */, + 1F2393762071F506001886DD /* CopyFiles */, + ); + buildRules = ( + ); + dependencies = ( + 1F23936B2071C29D001886DD /* PBXTargetDependency */, + ); + name = SimpleMovieEncoding; + productName = SimpleMovieEncoding; + productReference = 1F2393402071C12C001886DD /* SimpleMovieEncoding.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 1F2393382071C12C001886DD /* Project object */ = { + isa = PBXProject; + attributes = { + LastSwiftUpdateCheck = 0920; + LastUpgradeCheck = 0920; + ORGANIZATIONNAME = "Sunset Lake Software LLC"; + TargetAttributes = { + 1F23933F2071C12C001886DD = { + CreatedOnToolsVersion = 9.2; + ProvisioningStyle = Automatic; + }; + }; + }; + buildConfigurationList = 1F23933B2071C12C001886DD /* Build configuration list for PBXProject "SimpleMovieEncoding" */; + compatibilityVersion = "Xcode 8.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 1F2393372071C12C001886DD; + productRefGroup = 1F2393412071C12C001886DD /* Products */; + projectDirPath = ""; + projectReferences = ( + { + ProductGroup = 1F2393562071C155001886DD /* Products */; + ProjectRef = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + }, + ); + projectRoot = ""; + targets = ( + 1F23933F2071C12C001886DD /* SimpleMovieEncoding */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXReferenceProxy section */ + 1F23935D2071C155001886DD /* GPUImage.framework */ = { + isa = PBXReferenceProxy; + fileType = wrapper.framework; + path = GPUImage.framework; + remoteRef = 1F23935C2071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; + 1F23935F2071C155001886DD /* GPUImageTests_macOS.xctest */ = { + isa = PBXReferenceProxy; + fileType = wrapper.cfbundle; + path = GPUImageTests_macOS.xctest; + remoteRef = 1F23935E2071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; + 1F2393612071C155001886DD /* GPUImage.framework */ = { + isa = PBXReferenceProxy; + fileType = wrapper.framework; + path = GPUImage.framework; + remoteRef = 1F2393602071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; + 1F2393632071C155001886DD /* GPUImageTests_iOS.xctest */ = { + isa = PBXReferenceProxy; + fileType = wrapper.cfbundle; + path = GPUImageTests_iOS.xctest; + remoteRef = 1F2393622071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; +/* End PBXReferenceProxy section */ + +/* Begin PBXResourcesBuildPhase section */ + 1F23933E2071C12C001886DD /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 1F23934E2071C12C001886DD /* LaunchScreen.storyboard in Resources */, + 1F23936D2071C2DB001886DD /* sample_iPod.m4v in Resources */, + 1F23934B2071C12C001886DD /* Assets.xcassets in Resources */, + 1F2393492071C12C001886DD /* Main.storyboard in Resources */, + 1F2393792071FCB1001886DD /* Assets-iOS.xcassets in Resources */, + 1F23937B2071FCDB001886DD /* lookup_miss_etikate.png in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 1F23933C2071C12C001886DD /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 1F2393462071C12C001886DD /* ViewController.swift in Sources */, + 1F2393442071C12C001886DD /* AppDelegate.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + 1F23936B2071C29D001886DD /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + name = GPUImage_iOS; + targetProxy = 1F23936A2071C29D001886DD /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin PBXVariantGroup section */ + 1F2393472071C12C001886DD /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 1F2393482071C12C001886DD /* Base */, + ); + name = Main.storyboard; + sourceTree = ""; + }; + 1F23934C2071C12C001886DD /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 1F23934D2071C12C001886DD /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 1F2393502071C12C001886DD /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 9.0; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 1F2393512071C12C001886DD /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 9.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 1F2393532071C12C001886DD /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = ""; + INFOPLIST_FILE = SimpleMovieEncoding/Info.plist; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieEncoding; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 4.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 1F2393542071C12C001886DD /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = ""; + INFOPLIST_FILE = SimpleMovieEncoding/Info.plist; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieEncoding; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 4.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 1F23933B2071C12C001886DD /* Build configuration list for PBXProject "SimpleMovieEncoding" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 1F2393502071C12C001886DD /* Debug */, + 1F2393512071C12C001886DD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 1F2393522071C12C001886DD /* Build configuration list for PBXNativeTarget "SimpleMovieEncoding" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 1F2393532071C12C001886DD /* Debug */, + 1F2393542071C12C001886DD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 1F2393382071C12C001886DD /* Project object */; +} diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift new file mode 100644 index 00000000..9cd476cd --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift @@ -0,0 +1,42 @@ +// +// AppDelegate.swift +// SimpleMovieEncoding +// +// Created by Josh Bernfeld on 4/1/18. +// Copyright © 2018 Sunset Lake Software LLC. All rights reserved. +// + +import UIKit + +@UIApplicationMain +class AppDelegate: UIResponder, UIApplicationDelegate { + var window: UIWindow? + + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { + // Override point for customization after application launch. + return true + } + + func applicationWillResignActive(_ application: UIApplication) { + // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. + // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. + } + + func applicationDidEnterBackground(_ application: UIApplication) { + // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. + // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. + } + + func applicationWillEnterForeground(_ application: UIApplication) { + // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. + } + + func applicationDidBecomeActive(_ application: UIApplication) { + // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. + } + + func applicationWillTerminate(_ application: UIApplication) { + // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. + } + +} diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 00000000..1d060ed2 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,93 @@ +{ + "images" : [ + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "3x" + }, + { + "idiom" : "iphone", + "size" : "29x29", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "29x29", + "scale" : "3x" + }, + { + "idiom" : "iphone", + "size" : "40x40", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "40x40", + "scale" : "3x" + }, + { + "idiom" : "iphone", + "size" : "60x60", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "60x60", + "scale" : "3x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "29x29", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "29x29", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "40x40", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "40x40", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "76x76", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "76x76", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "83.5x83.5", + "scale" : "2x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 00000000..f83f6fd5 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard new file mode 100644 index 00000000..e411a78f --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist new file mode 100644 index 00000000..16be3b68 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist @@ -0,0 +1,45 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + APPL + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + LSRequiresIPhoneOS + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIRequiredDeviceCapabilities + + armv7 + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift new file mode 100755 index 00000000..0e159b17 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift @@ -0,0 +1,128 @@ +// +// ViewController.swift +// SimpleMovieEncoding +// +// Created by Josh Bernfeld on 4/1/18. +// Copyright © 2018 Sunset Lake Software LLC. All rights reserved. +// + +import UIKit +import GPUImage +import CoreAudio +import AVFoundation + +class ViewController: UIViewController { + @IBOutlet var progressView: UIProgressView! + + var movieInput: MovieInput! + var movieOutput: MovieOutput! + var filter: MissEtikateFilter! + + override func viewDidLoad() { + super.viewDidLoad() + // Do any additional setup after loading the view, typically from a nib. + + let bundleURL = Bundle.main.resourceURL! + // The movie you want to reencode + let movieURL = URL(string: "sample_iPod.m4v", relativeTo: bundleURL)! + + let documentsDir = FileManager().urls(for: .documentDirectory, in: .userDomainMask).first! + // The location you want to save the new video + let exportedURL = URL(string: "test.mp4", relativeTo: documentsDir)! + + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] + let asset = AVURLAsset(url: movieURL, options: inputOptions) + + guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } + let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first + + let audioDecodingSettings: [String: Any]? + let audioEncodingSettings: [String: Any]? + var audioSourceFormatHint: CMFormatDescription? + + let shouldPassthroughAudio = false + if shouldPassthroughAudio { + audioDecodingSettings = nil + audioEncodingSettings = nil + // A format hint is required when writing to certain file types with passthrough audio + // A conditional downcast would not work here for some reason + if let description = audioTrack?.formatDescriptions.first { audioSourceFormatHint = (description as! CMFormatDescription) } + } else { + audioDecodingSettings = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples + var acl = AudioChannelLayout() + memset(&acl, 0, MemoryLayout.size) + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo + audioEncodingSettings = [ + AVFormatIDKey: kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey: 2, + AVSampleRateKey: AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey: NSData(bytes: &acl, length: MemoryLayout.size), + AVEncoderBitRateKey: 96000 + ] + audioSourceFormatHint = nil + } + + do { + movieInput = try MovieInput(asset: asset, videoComposition: nil, playAtActualSpeed: false, loop: false, audioSettings: audioDecodingSettings) + } catch { + print("ERROR: Unable to setup MovieInput with error: \(error)") + return + } + + try? FileManager().removeItem(at: exportedURL) + + let videoEncodingSettings: [String: Any] = [ + AVVideoCompressionPropertiesKey: [ + AVVideoExpectedSourceFrameRateKey: videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey: videoTrack.estimatedDataRate, + AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey: videoTrack.requiresFrameReordering], + AVVideoCodecKey: AVVideoCodecH264] + + do { + movieOutput = try MovieOutput(URL: exportedURL, size: Size(width: Float(videoTrack.naturalSize.width), height: Float(videoTrack.naturalSize.height)), fileType: AVFileType.mp4.rawValue, liveVideo: false, videoSettings: videoEncodingSettings, videoNaturalTimeScale: videoTrack.naturalTimeScale, audioSettings: audioEncodingSettings, audioSourceFormatHint: audioSourceFormatHint) + } catch { + print("ERROR: Unable to setup MovieOutput with error: \(error)") + return + } + + filter = MissEtikateFilter() + + if audioTrack != nil { movieInput.audioEncodingTarget = movieOutput } + movieInput.synchronizedMovieOutput = movieOutput + // movieInput.synchronizedEncodingDebug = true + movieInput --> filter --> movieOutput + + movieInput.completion = { + self.movieOutput.finishRecording { + self.movieInput.audioEncodingTarget = nil + self.movieInput.synchronizedMovieOutput = nil + + DispatchQueue.main.async { + print("Encoding finished") + } + } + } + movieInput.progress = { progressVal in + DispatchQueue.main.async { + self.progressView.progress = Float(progressVal) + } + } + + movieOutput.startRecording { started, error in + if !started { + print("ERROR: MovieOutput unable to start writing with error: \(String(describing: error))") + return + } + self.movieInput.start() + print("Encoding started") + } + } + + override func didReceiveMemoryWarning() { + super.didReceiveMemoryWarning() + // Dispose of any resources that can be recreated. + } + +} diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/lookup_miss_etikate.png b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/lookup_miss_etikate.png new file mode 100644 index 00000000..e1317d78 Binary files /dev/null and b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/lookup_miss_etikate.png differ diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj index d570a4a3..c6875e4e 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj @@ -7,6 +7,9 @@ objects = { /* Begin PBXBuildFile section */ + 1F2393812071FDE4001886DD /* Assets-iOS.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1F23937D2071FDE4001886DD /* Assets-iOS.xcassets */; }; + 1FDF369F2071965100089948 /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1FDF369E2071965100089948 /* CoreAudio.framework */; }; + 1FDF36A12071966B00089948 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1FDF36A02071966B00089948 /* AVFoundation.framework */; }; BC9E367C1E525BCF00B8604F /* GPUImage.framework in CopyFiles */ = {isa = PBXBuildFile; fileRef = BC9E36771E525BC000B8604F /* GPUImage.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; BCC49F931CD6E1D800B63EEB /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC49F921CD6E1D800B63EEB /* AppDelegate.swift */; }; BCC49F951CD6E1D800B63EEB /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC49F941CD6E1D800B63EEB /* ViewController.swift */; }; @@ -67,6 +70,9 @@ /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ + 1F23937D2071FDE4001886DD /* Assets-iOS.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = "Assets-iOS.xcassets"; path = "../../../SharedAssets/Assets-iOS.xcassets"; sourceTree = ""; }; + 1FDF369E2071965100089948 /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = System/Library/Frameworks/CoreAudio.framework; sourceTree = SDKROOT; }; + 1FDF36A02071966B00089948 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; BC9E366B1E525BC000B8604F /* GPUImage.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = GPUImage.xcodeproj; path = ../../../../framework/GPUImage.xcodeproj; sourceTree = ""; }; BCC49F8F1CD6E1D800B63EEB /* SimpleMovieFilter.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SimpleMovieFilter.app; sourceTree = BUILT_PRODUCTS_DIR; }; BCC49F921CD6E1D800B63EEB /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; @@ -82,17 +88,28 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 1FDF36A12071966B00089948 /* AVFoundation.framework in Frameworks */, + 1FDF369F2071965100089948 /* CoreAudio.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 1FDF369D2071965000089948 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 1FDF36A02071966B00089948 /* AVFoundation.framework */, + 1FDF369E2071965100089948 /* CoreAudio.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; BC9E366C1E525BC000B8604F /* Products */ = { isa = PBXGroup; children = ( BC9E36731E525BC000B8604F /* GPUImage.framework */, - BC9E36751E525BC000B8604F /* GPUImage.xctest */, + BC9E36751E525BC000B8604F /* GPUImageTests_macOS.xctest */, BC9E36771E525BC000B8604F /* GPUImage.framework */, BC9E36791E525BC000B8604F /* GPUImageTests_iOS.xctest */, ); @@ -105,6 +122,7 @@ BCC49F911CD6E1D800B63EEB /* Application */, BCC49FA41CD6E1E300B63EEB /* Resources */, BCC49F901CD6E1D800B63EEB /* Products */, + 1FDF369D2071965000089948 /* Frameworks */, ); sourceTree = ""; }; @@ -122,6 +140,7 @@ BCC49F921CD6E1D800B63EEB /* AppDelegate.swift */, BCC49F941CD6E1D800B63EEB /* ViewController.swift */, BCC49F961CD6E1D800B63EEB /* Main.storyboard */, + 1F23937D2071FDE4001886DD /* Assets-iOS.xcassets */, BC9E366B1E525BC000B8604F /* GPUImage.xcodeproj */, ); name = Application; @@ -209,10 +228,10 @@ remoteRef = BC9E36721E525BC000B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E36751E525BC000B8604F /* GPUImage.xctest */ = { + BC9E36751E525BC000B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E36741E525BC000B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -239,6 +258,7 @@ files = ( BCC49FA61CD6E20000B63EEB /* sample_iPod.m4v in Resources */, BCC49F9D1CD6E1D800B63EEB /* LaunchScreen.storyboard in Resources */, + 1F2393812071FDE4001886DD /* Assets-iOS.xcassets in Resources */, BCC49F981CD6E1D800B63EEB /* Main.storyboard in Resources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift index e6bc194a..7b25de29 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift @@ -10,10 +10,8 @@ import UIKit @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? - func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { // Override point for customization after application launch. return true @@ -41,6 +39,4 @@ class AppDelegate: UIResponder, UIApplicationDelegate { // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. } - } - diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard index 67c04ee8..769960fe 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard @@ -1,8 +1,13 @@ - - + + + + + - + + + @@ -14,18 +19,61 @@ - + - - - + + + + + + + + + - + + + + + + + + @@ -34,6 +82,7 @@ + diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift index 173b6b8a..ee04dcb9 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift @@ -1,25 +1,34 @@ import UIKit import GPUImage +import CoreAudio +import AVFoundation class ViewController: UIViewController { - @IBOutlet weak var renderView: RenderView! - var movie:MovieInput! - var filter:Pixellate! + var movie: MovieInput! + var filter: Pixellate! + var speaker: SpeakerOutput! - override func viewDidLayoutSubviews() { - super.viewDidLayoutSubviews() + override func viewDidLoad() { + super.viewDidLoad() let bundleURL = Bundle.main.resourceURL! - let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + let movieURL = URL(string: "sample_iPod.m4v", relativeTo: bundleURL)! do { - movie = try MovieInput(url:movieURL, playAtActualSpeed:true) + let audioDecodeSettings = [AVFormatIDKey: kAudioFormatLinearPCM] + + movie = try MovieInput(url: movieURL, playAtActualSpeed: true, loop: true, audioSettings: audioDecodeSettings) + speaker = SpeakerOutput() + movie.audioEncodingTarget = speaker + filter = Pixellate() movie --> filter --> renderView movie.runBenchmark = true + movie.start() + speaker.start() } catch { print("Couldn't process movie with error: \(error)") } @@ -28,5 +37,19 @@ class ViewController: UIViewController { // let fileURL = NSURL(string:"test.png", relativeToURL:documentsDir)! // try pngImage.writeToURL(fileURL, options:.DataWritingAtomic) } + + @IBAction func pause() { + movie.pause() + speaker.cancel() + } + + @IBAction func cancel() { + movie.cancel() + speaker.cancel() + } + + @IBAction func play() { + movie.start() + speaker.start() + } } - diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj index 757f18c9..1f49e1f5 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj @@ -92,7 +92,7 @@ isa = PBXGroup; children = ( BC9E36861E525C2A00B8604F /* GPUImage.framework */, - BC9E36881E525C2A00B8604F /* GPUImage.xctest */, + BC9E36881E525C2A00B8604F /* GPUImageTests_macOS.xctest */, BC9E368A1E525C2A00B8604F /* GPUImage.framework */, BC9E368C1E525C2A00B8604F /* GPUImageTests_iOS.xctest */, ); @@ -208,10 +208,10 @@ remoteRef = BC9E36851E525C2A00B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E36881E525C2A00B8604F /* GPUImage.xctest */ = { + BC9E36881E525C2A00B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E36871E525C2A00B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift index e2bb4729..c62fcaf2 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift @@ -2,13 +2,9 @@ import UIKit @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? - func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { - - return true } @@ -31,6 +27,4 @@ class AppDelegate: UIResponder, UIApplicationDelegate { // Pause camera if not already } - } - diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift index ae28dce9..cb6de04e 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift @@ -17,13 +17,13 @@ class ViewController: UIViewController { }() let saturationFilter = SaturationAdjustment() let blendFilter = AlphaBlend() - var camera:Camera! + var camera: Camera! override func viewDidLoad() { super.viewDidLoad() do { - camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) + camera = try Camera(sessionPreset: AVCaptureSessionPreset640x480) camera.runBenchmark = true camera.delegate = self camera --> saturationFilter --> blendFilter --> renderView @@ -46,8 +46,8 @@ class ViewController: UIViewController { @IBAction func capture(_ sender: AnyObject) { print("Capture") do { - let documentsDir = try FileManager.default.url(for:.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) - saturationFilter.saveNextFrameToURL(URL(string:"TestImage.png", relativeTo:documentsDir)!, format:.png) + let documentsDir = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) + saturationFilter.saveNextFrameToURL(URL(string: "TestImage.png", relativeTo: documentsDir)!, format: .png) } catch { print("Couldn't save image: \(error)") } @@ -78,7 +78,7 @@ extension ViewController: CameraDelegate { let flip = CGAffineTransform(scaleX: 1, y: -1) let rotate = flip.rotated(by: CGFloat(-M_PI_2)) let translate = rotate.translatedBy(x: -1, y: -1) - let xform = translate.scaledBy(x: CGFloat(2/fbSize.width), y: CGFloat(2/fbSize.height)) + let xform = translate.scaledBy(x: CGFloat(2 / fbSize.width), y: CGFloat(2 / fbSize.height)) let glRect = bounds.applying(xform) let x = Float(glRect.origin.x) @@ -91,9 +91,9 @@ extension ViewController: CameraDelegate { let bl = Position(x, y + height) let br = Position(x + width, y + height) - return [.segment(p1:tl, p2:tr), // top - .segment(p1:tr, p2:br), // right - .segment(p1:br, p2:bl), // bottom - .segment(p1:bl, p2:tl)] // left + return [.segment(p1: tl, p2: tr), // top + .segment(p1: tr, p2: br), // right + .segment(p1: br, p2: bl), // bottom + .segment(p1: bl, p2: tl)] // left } } diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj index 937e99bf..90e3466c 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj @@ -92,7 +92,7 @@ isa = PBXGroup; children = ( BC9E36991E525C9900B8604F /* GPUImage.framework */, - BC9E369B1E525C9900B8604F /* GPUImage.xctest */, + BC9E369B1E525C9900B8604F /* GPUImageTests_macOS.xctest */, BC9E369D1E525C9900B8604F /* GPUImage.framework */, BC9E369F1E525C9900B8604F /* GPUImageTests_iOS.xctest */, ); @@ -209,10 +209,10 @@ remoteRef = BC9E36981E525C9900B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E369B1E525C9900B8604F /* GPUImage.xctest */ = { + BC9E369B1E525C9900B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E369A1E525C9900B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -322,7 +322,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -362,7 +362,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; @@ -379,7 +379,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoRecorder; PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 5.0; }; name = Debug; }; @@ -392,7 +392,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoRecorder; PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 5.0; }; name = Release; }; diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift index 9da71688..1fabf495 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift @@ -2,12 +2,9 @@ import UIKit @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? - func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { return true } } - diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift index 82ed237e..14f28f50 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift @@ -4,16 +4,16 @@ import AVFoundation class ViewController: UIViewController { @IBOutlet weak var renderView: RenderView! - var camera:Camera! - var filter:SaturationAdjustment! + var camera: Camera! + var filter: SaturationAdjustment! var isRecording = false - var movieOutput:MovieOutput? = nil + var movieOutput: MovieOutput? override func viewDidLoad() { super.viewDidLoad() do { - camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) + camera = try Camera(sessionPreset: AVCaptureSession.Preset.vga640x480) camera.runBenchmark = true filter = SaturationAdjustment() camera --> filter --> renderView @@ -28,20 +28,40 @@ class ViewController: UIViewController { } @IBAction func capture(_ sender: AnyObject) { - if (!isRecording) { + if !isRecording { do { self.isRecording = true - let documentsDir = try FileManager.default.url(for:.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) - let fileURL = URL(string:"test.mp4", relativeTo:documentsDir)! + let documentsDir = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) + let fileURL = URL(string: "test.mp4", relativeTo: documentsDir)! do { - try FileManager.default.removeItem(at:fileURL) + try FileManager.default.removeItem(at: fileURL) } catch { } - movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), liveVideo:true) + // Do this now so we can access the audioOutput recommendedAudioSettings before initializing the MovieOutput + do { + try self.camera.addAudioInputsAndOutputs() + } catch { + fatalError("ERROR: Could not connect audio target with error: \(error)") + } + + let audioSettings = self.camera!.audioOutput?.recommendedAudioSettingsForAssetWriter(writingTo: AVFileType.mp4) as? [String: Any] + var videoSettings: [String: Any]? + if #available(iOS 11.0, *) { + videoSettings = self.camera!.videoOutput.recommendedVideoSettings(forVideoCodecType: .h264, assetWriterOutputFileType: AVFileType.mp4) as? [String: Any] + videoSettings![AVVideoWidthKey] = nil + videoSettings![AVVideoHeightKey] = nil + } + + movieOutput = try MovieOutput(URL: fileURL, size: Size(width: 480, height: 640), fileType: AVFileType.mp4, liveVideo: true, videoSettings: videoSettings, audioSettings: audioSettings) camera.audioEncodingTarget = movieOutput filter --> movieOutput! - movieOutput!.startRecording() + movieOutput!.startRecording { started, error in + if !started { + self.isRecording = false + fatalError("ERROR: Could not start writing with error: \(String(describing: error))") + } + } DispatchQueue.main.async { // Label not updating on the main thread, for some reason, so dispatching slightly after this (sender as! UIButton).titleLabel!.text = "Stop" @@ -50,7 +70,7 @@ class ViewController: UIViewController { fatalError("Couldn't initialize movie, error: \(error)") } } else { - movieOutput?.finishRecording{ + movieOutput?.finishRecording { self.isRecording = false DispatchQueue.main.async { (sender as! UIButton).titleLabel!.text = "Record" diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index a18a613c..db6f6501 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -7,6 +7,20 @@ objects = { /* Begin PBXBuildFile section */ + 1F499A731FDA0F9F0000E37E /* NSObject+Exception.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */; }; + 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */; }; + 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; + 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; + 1F6D1CB32048F81D00317B5F /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */; }; + 1F6D1CB52048F8DD00317B5F /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */; }; + 1F6D1CB82048FB0300317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */; }; + 1F6D1CB92048FB0300317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */; }; + 1F6D1CBA2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; + 1F6D1CBB2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; + 1F6D1CC02048FFD900317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */; }; + 262E656D240F5F27002C27AB /* CILookupFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 262E656B240F5EE0002C27AB /* CILookupFilter.swift */; }; + 264B6AD9237303370090979C /* MoviePlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 264B6AD6237303040090979C /* MoviePlayer.swift */; }; + 264B6ADA2373033B0090979C /* FramebufferGenerator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 264B6AD5237303040090979C /* FramebufferGenerator.swift */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; BC0923A11C92661D00A2ADFA /* Pipeline_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */; }; BC0923A21C92664900A2ADFA /* Framebuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB279EB1C8D11630013E213 /* Framebuffer.swift */; }; @@ -350,6 +364,8 @@ BCFF46FC1CBAF85000A0C521 /* TransformOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFF46FB1CBAF85000A0C521 /* TransformOperation.swift */; }; BCFF46FE1CBB0C1F00A0C521 /* AverageColorExtractor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFF46FD1CBB0C1F00A0C521 /* AverageColorExtractor.swift */; }; BCFF47081CBB443B00A0C521 /* CameraConversion.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFF47071CBB443B00A0C521 /* CameraConversion.swift */; }; + D1D81C8E226EE95D00013E68 /* ResizeCrop.swift in Sources */ = {isa = PBXBuildFile; fileRef = D1D81C8D226EE95C00013E68 /* ResizeCrop.swift */; }; + D1D81C8F226EE99000013E68 /* ResizeCrop.swift in Sources */ = {isa = PBXBuildFile; fileRef = D1D81C8D226EE95C00013E68 /* ResizeCrop.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -370,6 +386,17 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ + 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSObject+Exception.m"; path = "Source/NSObject+Exception.m"; sourceTree = ""; }; + 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSObject+Exception.h"; path = "Source/NSObject+Exception.h"; sourceTree = ""; }; + 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "GPUImage-Bridging-Header.h"; path = "Source/GPUImage-Bridging-Header.h"; sourceTree = ""; }; + 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS11.2.sdk/System/Library/Frameworks/AudioToolbox.framework; sourceTree = DEVELOPER_DIR; }; + 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS11.2.sdk/System/Library/Frameworks/AVFoundation.framework; sourceTree = DEVELOPER_DIR; }; + 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = TPCircularBuffer.h; path = Source/TPCircularBuffer.h; sourceTree = ""; }; + 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = TPCircularBuffer.m; path = Source/TPCircularBuffer.m; sourceTree = ""; }; + 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SpeakerOutput.swift; path = Source/iOS/SpeakerOutput.swift; sourceTree = ""; }; + 262E656B240F5EE0002C27AB /* CILookupFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = CILookupFilter.swift; path = Source/iOS/CILookupFilter.swift; sourceTree = ""; }; + 264B6AD5237303040090979C /* FramebufferGenerator.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = FramebufferGenerator.swift; path = Source/iOS/FramebufferGenerator.swift; sourceTree = ""; }; + 264B6AD6237303040090979C /* MoviePlayer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = MoviePlayer.swift; path = Source/iOS/MoviePlayer.swift; sourceTree = ""; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Pipeline_Tests.swift; path = Tests/Pipeline_Tests.swift; sourceTree = SOURCE_ROOT; }; BC1E12F41C9F2FD7008F844F /* ThreeInput.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ThreeInput.vsh; path = Source/Operations/Shaders/ThreeInput.vsh; sourceTree = ""; }; @@ -686,6 +713,7 @@ BCFF46FF1CBB0D8900A0C521 /* AverageColor_GL.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = AverageColor_GL.fsh; path = Source/Operations/Shaders/AverageColor_GL.fsh; sourceTree = ""; }; BCFF47001CBB0D8900A0C521 /* AverageColor.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = AverageColor.vsh; path = Source/Operations/Shaders/AverageColor.vsh; sourceTree = ""; }; BCFF47071CBB443B00A0C521 /* CameraConversion.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = CameraConversion.swift; path = Source/CameraConversion.swift; sourceTree = ""; }; + D1D81C8D226EE95C00013E68 /* ResizeCrop.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ResizeCrop.swift; path = Source/Operations/ResizeCrop.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -710,6 +738,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 1F6D1CB52048F8DD00317B5F /* AVFoundation.framework in Frameworks */, + 1F6D1CB32048F81D00317B5F /* AudioToolbox.framework in Frameworks */, BC9E35021E524BE200B8604F /* OpenGLES.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -901,6 +931,7 @@ BCFF46AD1CB7554700A0C521 /* AdaptiveThreshold.swift */, BCFF46AF1CB7561D00A0C521 /* AdaptiveThreshold_GL.fsh */, BC2C48031CB80E860085E4BC /* Crop.swift */, + D1D81C8D226EE95C00013E68 /* ResizeCrop.swift */, BCFF46C51CB968DE00A0C521 /* ImageBuffer.swift */, BCFF46C71CB96AB100A0C521 /* LowPassFilter.swift */, BCFF46C91CB96BD700A0C521 /* HighPassFilter.swift */, @@ -961,8 +992,13 @@ BC6E7CAD1C39A9D8006DF678 /* Other */ = { isa = PBXGroup; children = ( + 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */, + 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */, + 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */, + 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */, BC4C85ED1C9F042900FD95D8 /* ConvertedShaders_GL.swift */, BC9E35531E52521F00B8604F /* ConvertedShaders_GLES.swift */, + 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */, ); name = Other; sourceTree = ""; @@ -1002,6 +1038,8 @@ BC6E7CCB1C39ADDD006DF678 /* Frameworks */ = { isa = PBXGroup; children = ( + 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */, + 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */, BC9E35011E524BE200B8604F /* OpenGLES.framework */, BC6E7CC91C39ADCC006DF678 /* OpenGL.framework */, ); @@ -1100,6 +1138,8 @@ BC9E350A1E524C8000B8604F /* iOS */ = { isa = PBXGroup; children = ( + 264B6AD5237303040090979C /* FramebufferGenerator.swift */, + 264B6AD6237303040090979C /* MoviePlayer.swift */, BC9E350B1E524CB900B8604F /* Camera.swift */, BC9E35111E524CE400B8604F /* YUVConversionFullRange_GLES.fsh */, BC9E35131E524CE400B8604F /* YUVConversionFullRangeUVPlanar_GLES.fsh */, @@ -1123,6 +1163,8 @@ BC9E35201E524D2A00B8604F /* iOS */ = { isa = PBXGroup; children = ( + 262E656B240F5EE0002C27AB /* CILookupFilter.swift */, + 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */, BC9E35231E524D4D00B8604F /* RenderView.swift */, BC9E35221E524D4D00B8604F /* PictureOutput.swift */, BC9E35211E524D4D00B8604F /* MovieOutput.swift */, @@ -1198,6 +1240,8 @@ isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( + 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, + 1F6D1CB82048FB0300317B5F /* TPCircularBuffer.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1205,6 +1249,8 @@ isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( + 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, + 1F6D1CB92048FB0300317B5F /* TPCircularBuffer.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1295,13 +1341,14 @@ TargetAttributes = { BC6E7CAA1C39A9D8006DF678 = { CreatedOnToolsVersion = 7.2; - LastSwiftMigration = 0800; + LastSwiftMigration = 0910; }; BC6E7CB41C39A9D8006DF678 = { CreatedOnToolsVersion = 7.2; }; BC9E34E81E524A2200B8604F = { CreatedOnToolsVersion = 8.2.1; + LastSwiftMigration = 0910; ProvisioningStyle = Automatic; }; BC9E34F01E524A2200B8604F = { @@ -1316,6 +1363,7 @@ developmentRegion = English; hasScannedForEncodings = 0; knownRegions = ( + English, en, ); mainGroup = BC6E7CA11C39A9D8006DF678; @@ -1404,6 +1452,7 @@ BCFF46E21CBADB3E00A0C521 /* SingleComponentGaussianBlur.swift in Sources */, BC7FD0F71CB0620E00037949 /* ChromaKeyBlend.swift in Sources */, BC7FD0861CA62E1100037949 /* BrightnessAdjustment.swift in Sources */, + D1D81C8E226EE95D00013E68 /* ResizeCrop.swift in Sources */, BC7FD1631CB17C8D00037949 /* ImageOrientation.swift in Sources */, BC9673411C8B897100FB64C2 /* FramebufferCache.swift in Sources */, BC7FD0BB1CA7799B00037949 /* Halftone.swift in Sources */, @@ -1447,6 +1496,7 @@ BCFF46C01CB9556B00A0C521 /* WhiteBalance.swift in Sources */, BC7FD14E1CB0BD3900037949 /* ZoomBlur.swift in Sources */, BCFB07921CBF37A1009B2333 /* TextureInput.swift in Sources */, + 1F6D1CBA2048FB0300317B5F /* TPCircularBuffer.m in Sources */, BC6E7CC71C39AD9E006DF678 /* ShaderProgram.swift in Sources */, BCFF46CA1CB96BD700A0C521 /* HighPassFilter.swift in Sources */, BC7FD1321CB0A57F00037949 /* HighlightsAndShadows.swift in Sources */, @@ -1454,6 +1504,7 @@ BC7FD11C1CB0795A00037949 /* NormalBlend.swift in Sources */, BC4EE15E1CB3481F00AD8A65 /* ThresholdSobelEdgeDetection.swift in Sources */, BC7FD1911CB1D2A300037949 /* ImageGenerator.swift in Sources */, + 1F499A731FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC7FD1201CB079B200037949 /* SaturationBlend.swift in Sources */, BCA4E2491CC3EF26007B51BA /* ColourFASTFeatureDetection.swift in Sources */, BC7FD0FD1CB06E0000037949 /* Position.swift in Sources */, @@ -1575,6 +1626,7 @@ BC9E35931E52574100B8604F /* UnsharpMask.swift in Sources */, BC9E35AF1E52579900B8604F /* BulgeDistortion.swift in Sources */, BC9E35741E5256DE00B8604F /* LookupFilter.swift in Sources */, + 264B6AD9237303370090979C /* MoviePlayer.swift in Sources */, BC9E35901E52573700B8604F /* Dilation.swift in Sources */, BC9E35B71E5257B300B8604F /* ThresholdSketch.swift in Sources */, BC9E35711E5256D500B8604F /* LevelsAdjustment.swift in Sources */, @@ -1630,6 +1682,7 @@ BC9E35511E52518F00B8604F /* Timestamp.swift in Sources */, BC9E35781E5256EB00B8604F /* ColorMatrixFilter.swift in Sources */, BC9E35D11E52580400B8604F /* ScreenBlend.swift in Sources */, + 1F6D1CBB2048FB0300317B5F /* TPCircularBuffer.m in Sources */, BC9E356A1E5256C200B8604F /* Haze.swift in Sources */, BC9E35D31E52580A00B8604F /* SourceOverBlend.swift in Sources */, BC9E357E1E5256FE00B8604F /* Vibrance.swift in Sources */, @@ -1637,10 +1690,13 @@ BC9E356E1E5256CE00B8604F /* FalseColor.swift in Sources */, BC9E35881E52572000B8604F /* ThresholdSobelEdgeDetection.swift in Sources */, BC9E356F1E5256D000B8604F /* HighlightsAndShadows.swift in Sources */, + 1F6D1CC02048FFD900317B5F /* SpeakerOutput.swift in Sources */, + 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC9E35AA1E52578900B8604F /* Halftone.swift in Sources */, BC9E35961E52574A00B8604F /* ImageBuffer.swift in Sources */, BC9E35831E52571100B8604F /* LocalBinaryPattern.swift in Sources */, BC9E35C71E5257E700B8604F /* ExclusionBlend.swift in Sources */, + 262E656D240F5F27002C27AB /* CILookupFilter.swift in Sources */, BC9E35A31E52577300B8604F /* KuwaharaFilter.swift in Sources */, BC9E35481E524DA700B8604F /* CrosshairGenerator.swift in Sources */, BC9E35B81E5257B500B8604F /* SmoothToonFilter.swift in Sources */, @@ -1668,11 +1724,13 @@ BC9E35CA1E5257F100B8604F /* LightenBlend.swift in Sources */, BC9E35291E524D5B00B8604F /* MovieOutput.swift in Sources */, BC9E356B1E5256C500B8604F /* ColorInversion.swift in Sources */, + D1D81C8F226EE99000013E68 /* ResizeCrop.swift in Sources */, BC9E35621E5256A500B8604F /* OperationGroup.swift in Sources */, BC9E35381E524D7E00B8604F /* OpenGLRendering.swift in Sources */, BC9E35B41E5257A900B8604F /* ToonFilter.swift in Sources */, BC9E354F1E52508A00B8604F /* RawDataInput.swift in Sources */, BC9E35681E5256BD00B8604F /* GammaAdjustment.swift in Sources */, + 264B6ADA2373033B0090979C /* FramebufferGenerator.swift in Sources */, BC9E35A81E52578400B8604F /* Vignette.swift in Sources */, BC9E355A1E5252C400B8604F /* TextureOutput.swift in Sources */, BC9E35841E52571300B8604F /* ColorLocalBinaryPattern.swift in Sources */, @@ -1783,11 +1841,12 @@ GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; INSTALL_PATH = /Library/Frameworks; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; MACOSX_DEPLOYMENT_TARGET = 10.9; ONLY_ACTIVE_ARCH = YES; SDKROOT = macosx; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "Source/GPUImage-Bridging-Header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 3.0; VERSIONING_SYSTEM = "apple-generic"; @@ -1827,10 +1886,11 @@ GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; INSTALL_PATH = /Library/Frameworks; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; MACOSX_DEPLOYMENT_TARGET = 10.9; SDKROOT = macosx; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "Source/GPUImage-Bridging-Header.h"; SWIFT_VERSION = 3.0; VERSIONING_SYSTEM = "apple-generic"; VERSION_INFO_PREFIX = ""; @@ -1910,6 +1970,7 @@ isa = XCBuildConfiguration; buildSettings = { CLANG_ANALYZER_NONNULL = YES; + CLANG_ENABLE_MODULES = YES; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; @@ -1922,7 +1983,7 @@ DYLIB_INSTALL_NAME_BASE = "@rpath"; INFOPLIST_FILE = "$(SRCROOT)/GPUImage.xcodeproj/GPUImage_Info.plist"; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; OTHER_SWIFT_FLAGS = "-DDEBUG -DGLES"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.GPUImage; @@ -1930,7 +1991,8 @@ SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; - SWIFT_VERSION = 3.0; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; @@ -1939,6 +2001,7 @@ isa = XCBuildConfiguration; buildSettings = { CLANG_ANALYZER_NONNULL = YES; + CLANG_ENABLE_MODULES = YES; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; @@ -1951,7 +2014,7 @@ DYLIB_INSTALL_NAME_BASE = "@rpath"; INFOPLIST_FILE = "$(SRCROOT)/GPUImage.xcodeproj/GPUImage_Info.plist"; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; OTHER_SWIFT_FLAGS = "-DGLES"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.GPUImage; @@ -1959,7 +2022,7 @@ SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; diff --git a/framework/Package.swift b/framework/Package.swift index 66277ad0..6d6a5b81 100644 --- a/framework/Package.swift +++ b/framework/Package.swift @@ -16,22 +16,21 @@ let excludes = ["Linux", "Mac"] #elseif os(Linux) -let excludes = ["iOS", "Mac"] +let excludes = ["iOS", "Mac"] #endif - #if os(Linux) || os(macOS) || os(Linux) let package = Package( name: "GPUImage", providers: [ - .Apt("libv4l-dev"), - ], + .Apt("libv4l-dev") + ], targets: [ Target(name: "GPUImage") - ], - dependencies:[], + ], + dependencies: [], exclude: excludes ) @@ -39,11 +38,10 @@ let package = Package( package.dependencies.append([ .Package(url: "./Packages/CVideo4Linux", majorVersion: 1), .Package(url: "./Packages/COpenGL", majorVersion: 1), - .Package(url: "./Packages/CFreeGLUT", majorVersion: 1), - ]) + .Package(url: "./Packages/CFreeGLUT", majorVersion: 1) + ]) #endif - #else fatalError("Unsupported OS") diff --git a/framework/Packages/CFreeGLUT/Package.swift b/framework/Packages/CFreeGLUT/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/CFreeGLUT/Package.swift +++ b/framework/Packages/CFreeGLUT/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Packages/COpenGL/Package.swift b/framework/Packages/COpenGL/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/COpenGL/Package.swift +++ b/framework/Packages/COpenGL/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Packages/COpenGLES/Package.swift b/framework/Packages/COpenGLES/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/COpenGLES/Package.swift +++ b/framework/Packages/COpenGLES/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Packages/CVideo4Linux/Package.swift b/framework/Packages/CVideo4Linux/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/CVideo4Linux/Package.swift +++ b/framework/Packages/CVideo4Linux/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Packages/CVideoCore/Package.swift b/framework/Packages/CVideoCore/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/CVideoCore/Package.swift +++ b/framework/Packages/CVideoCore/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 60ad430d..79f372e8 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -1,26 +1,26 @@ import Foundation -public func defaultVertexShaderForInputs(_ inputCount:UInt) -> String { +public func defaultVertexShaderForInputs(_ inputCount: UInt) -> String { switch inputCount { - case 1: return OneInputVertexShader - case 2: return TwoInputVertexShader - case 3: return ThreeInputVertexShader - case 4: return FourInputVertexShader - case 5: return FiveInputVertexShader - default: return OneInputVertexShader + case 1: return OneInputVertexShader + case 2: return TwoInputVertexShader + case 3: return ThreeInputVertexShader + case 4: return FourInputVertexShader + case 5: return FiveInputVertexShader + default: return OneInputVertexShader } } open class BasicOperation: ImageProcessingOperation { - public let maximumInputs:UInt - public var overriddenOutputSize:Size? - public var overriddenOutputRotation:Rotation? + public let maximumInputs: UInt + public var overriddenOutputSize: Size? + public var overriddenOutputRotation: Rotation? public var backgroundColor = Color.black - public var drawUnmodifiedImageOutsideOfMask:Bool = true - public var mask:ImageSource? { + public var drawUnmodifiedImageOutsideOfMask = true + public var mask: ImageSource? { didSet { if let mask = mask { - maskImageRelay.newImageCallback = {[weak self] framebuffer in + maskImageRelay.newImageCallback = { [weak self] framebuffer in self?.maskFramebuffer?.unlock() framebuffer.lock() self?.maskFramebuffer = framebuffer @@ -33,44 +33,48 @@ open class BasicOperation: ImageProcessingOperation { } } } - public var activatePassthroughOnNextFrame:Bool = false + public var activatePassthroughOnNextFrame = false public var uniformSettings = ShaderUniformSettings() - + // MARK: - // MARK: Internal - + public let targets = TargetContainer() public let sources = SourceContainer() - var shader:ShaderProgram - var inputFramebuffers = [UInt:Framebuffer]() - var renderFramebuffer:Framebuffer! - var outputFramebuffer:Framebuffer { get { return renderFramebuffer } } - let usesAspectRatio:Bool + var shader: ShaderProgram + public var inputFramebuffers = [UInt: Framebuffer]() + var renderFramebuffer: Framebuffer! + var outputFramebuffer: Framebuffer { get { return renderFramebuffer } } + let usesAspectRatio: Bool let maskImageRelay = ImageRelay() - var maskFramebuffer:Framebuffer? + var maskFramebuffer: Framebuffer? + + #if DEBUG + public var debugRenderInfo: String = "" + #endif // MARK: - // MARK: Initialization and teardown - - public init(shader:ShaderProgram, numberOfInputs:UInt = 1) { + + public init(shader: ShaderProgram, numberOfInputs: UInt = 1) { self.maximumInputs = numberOfInputs self.shader = shader usesAspectRatio = shader.uniformIndex("aspectRatio") != nil } - public init(vertexShader:String? = nil, fragmentShader:String, numberOfInputs:UInt = 1, operationName:String = #file) { - let compiledShader = crashOnShaderCompileFailure(operationName){try sharedImageProcessingContext.programForVertexShader(vertexShader ?? defaultVertexShaderForInputs(numberOfInputs), fragmentShader:fragmentShader)} + public init(vertexShader: String? = nil, fragmentShader: String, numberOfInputs: UInt = 1, operationName: String = #file) { + let compiledShader = crashOnShaderCompileFailure(operationName) { try sharedImageProcessingContext.programForVertexShader(vertexShader ?? defaultVertexShaderForInputs(numberOfInputs), fragmentShader: fragmentShader) } self.maximumInputs = numberOfInputs self.shader = compiledShader usesAspectRatio = shader.uniformIndex("aspectRatio") != nil } - - public init(vertexShaderFile:URL? = nil, fragmentShaderFile:URL, numberOfInputs:UInt = 1, operationName:String = #file) throws { - let compiledShader:ShaderProgram + + public init(vertexShaderFile: URL? = nil, fragmentShaderFile: URL, numberOfInputs: UInt = 1, operationName: String = #file) throws { + let compiledShader: ShaderProgram if let vertexShaderFile = vertexShaderFile { - compiledShader = crashOnShaderCompileFailure(operationName){try sharedImageProcessingContext.programForVertexShader(vertexShaderFile, fragmentShader:fragmentShaderFile)} + compiledShader = crashOnShaderCompileFailure(operationName) { try sharedImageProcessingContext.programForVertexShader(vertexShaderFile, fragmentShader: fragmentShaderFile) } } else { - compiledShader = crashOnShaderCompileFailure(operationName){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(numberOfInputs), fragmentShader:fragmentShaderFile)} + compiledShader = crashOnShaderCompileFailure(operationName) { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(numberOfInputs), fragmentShader: fragmentShaderFile) } } self.maximumInputs = numberOfInputs self.shader = compiledShader @@ -84,27 +88,46 @@ open class BasicOperation: ImageProcessingOperation { // MARK: - // MARK: Rendering - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + open func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { if let previousFramebuffer = inputFramebuffers[fromSourceIndex] { previousFramebuffer.unlock() } inputFramebuffers[fromSourceIndex] = framebuffer - - guard (!activatePassthroughOnNextFrame) else { // Use this to allow a bootstrap of cyclical processing, like with a low pass filter + + guard !activatePassthroughOnNextFrame else { // Use this to allow a bootstrap of cyclical processing, like with a low pass filter activatePassthroughOnNextFrame = false updateTargetsWithFramebuffer(framebuffer) return } - if (UInt(inputFramebuffers.count) >= maximumInputs) { + if UInt(inputFramebuffers.count) >= maximumInputs { renderFrame() updateTargetsWithFramebuffer(outputFramebuffer) } } - func renderFrame() { - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil:mask != nil) + open func renderFrame() { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + var inputsDebugInfo = "" + for framebuffer in inputFramebuffers { + inputsDebugInfo.append("\(framebuffer.value.debugRenderInfo), ") + } + debugRenderInfo = """ +{ + \(Self.self): { + inputs: [ + \(inputsDebugInfo) + ], + output: { size: \(renderFramebuffer.debugRenderInfo), time: \((CACurrentMediaTime() - startTime) * 1000.0)ms } + } +}, +""" + } + #endif + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil: mask != nil) let textureProperties = initialTextureProperties() configureFramebufferSpecificUniforms(inputFramebuffers[0]!) @@ -113,45 +136,49 @@ open class BasicOperation: ImageProcessingOperation { clearFramebufferWithColor(backgroundColor) if let maskFramebuffer = maskFramebuffer { if drawUnmodifiedImageOutsideOfMask { - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:nil, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: nil, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) } renderStencilMaskFromFramebuffer(maskFramebuffer) - internalRenderFunction(inputFramebuffers[0]!, textureProperties:textureProperties) + internalRenderFunction(inputFramebuffers[0]!, textureProperties: textureProperties) disableStencil() } else { - internalRenderFunction(inputFramebuffers[0]!, textureProperties:textureProperties) + internalRenderFunction(inputFramebuffers[0]!, textureProperties: textureProperties) } } - func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + func internalRenderFunction(_ inputFramebuffer: Framebuffer, textureProperties: [InputTextureProperties]) { + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) releaseIncomingFramebuffers() } func releaseIncomingFramebuffers() { - var remainingFramebuffers = [UInt:Framebuffer]() + var remainingFramebuffers = [UInt: Framebuffer]() // If all inputs are still images, have this output behave as one renderFramebuffer.timingStyle = .stillImage - var latestTimestamp:Timestamp? + var latestTimestamp: Timestamp? for (key, framebuffer) in inputFramebuffers { - // When there are multiple transient input sources, use the latest timestamp as the value to pass along if let timestamp = framebuffer.timingStyle.timestamp { if !(timestamp < (latestTimestamp ?? timestamp)) { latestTimestamp = timestamp - renderFramebuffer.timingStyle = .videoFrame(timestamp:timestamp) + renderFramebuffer.timingStyle = .videoFrame(timestamp: timestamp) } + framebuffer.unlock() + } else if framebuffer.shouldReturnToCache && framebuffer.cache != nil { framebuffer.unlock() } else { remainingFramebuffers[key] = framebuffer } } + + renderFramebuffer.userInfo = inputFramebuffers[0]!.userInfo + inputFramebuffers = remainingFramebuffers } - func sizeOfInitialStageBasedOnFramebuffer(_ inputFramebuffer:Framebuffer) -> GLSize { + func sizeOfInitialStageBasedOnFramebuffer(_ inputFramebuffer: Framebuffer) -> GLSize { if let outputSize = overriddenOutputSize { return GLSize(outputSize) } else { @@ -175,19 +202,17 @@ open class BasicOperation: ImageProcessingOperation { return inputTextureProperties } - func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { if usesAspectRatio { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) uniformSettings["aspectRatio"] = inputFramebuffer.aspectRatioForRotation(outputRotation) } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { - sharedImageProcessingContext.runOperationAsynchronously{ - guard let renderFramebuffer = self.renderFramebuffer, (!renderFramebuffer.timingStyle.isTransient()) else { return } - - renderFramebuffer.lock() - target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) - } + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { + // guard let renderFramebuffer = self.renderFramebuffer, (!renderFramebuffer.timingStyle.isTransient()) else { return } + + // renderFramebuffer.lock() + // target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) } } diff --git a/framework/Source/CameraConversion.swift b/framework/Source/CameraConversion.swift index 3d120540..d3f5390e 100644 --- a/framework/Source/CameraConversion.swift +++ b/framework/Source/CameraConversion.swift @@ -1,38 +1,55 @@ // Note: the original name of YUVToRGBConversion.swift for this file chokes the compiler on Linux for some reason // BT.601, which is the standard for SDTV. -public let colorConversionMatrix601Default = Matrix3x3(rowMajorValues:[ - 1.164, 1.164, 1.164, +public let colorConversionMatrix601Default = Matrix3x3(rowMajorValues: [ + 1.164, 1.164, 1.164, 0.0, -0.392, 2.017, - 1.596, -0.813, 0.0 + 1.596, -0.813, 0.0 ]) // BT.601 full range (ref: http://www.equasys.de/colorconversion.html) -public let colorConversionMatrix601FullRangeDefault = Matrix3x3(rowMajorValues:[ - 1.0, 1.0, 1.0, - 0.0, -0.343, 1.765, - 1.4, -0.711, 0.0, +public let colorConversionMatrix601FullRangeDefault = Matrix3x3(rowMajorValues: [ + 1.0, 1.0, 1.0, + 0.0, -0.343, 1.765, + 1.4, -0.711, 0.0 ]) // BT.709, which is the standard for HDTV. -public let colorConversionMatrix709Default = Matrix3x3(rowMajorValues:[ - 1.164, 1.164, 1.164, +public let colorConversionMatrix709Default = Matrix3x3(rowMajorValues: [ + 1.164, 1.164, 1.164, 0.0, -0.213, 2.112, - 1.793, -0.533, 0.0, + 1.793, -0.533, 0.0 ]) -public func convertYUVToRGB(shader:ShaderProgram, luminanceFramebuffer:Framebuffer, chrominanceFramebuffer:Framebuffer, secondChrominanceFramebuffer:Framebuffer? = nil, resultFramebuffer:Framebuffer, colorConversionMatrix:Matrix3x3) { - let textureProperties:[InputTextureProperties] - if let secondChrominanceFramebuffer = secondChrominanceFramebuffer { - textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), secondChrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)] +public func convertYUVToRGB(shader: ShaderProgram, luminanceFramebuffer: Framebuffer, chrominanceFramebuffer: Framebuffer, secondChrominanceFramebuffer: Framebuffer? = nil, resizeOutput: ResizeOutputInfo? = nil, resultFramebuffer: Framebuffer, colorConversionMatrix: Matrix3x3) { + let textureProperties: [InputTextureProperties] + let luminanceTextureProperties: InputTextureProperties + let chrominanceTextureProperties: InputTextureProperties + var secondChrominanceTextureProperties: InputTextureProperties? + if let resizeOutput = resizeOutput { + luminanceTextureProperties = InputTextureProperties(textureCoordinates: luminanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin: resizeOutput.normalizedOffsetFromOrigin, cropSize: resizeOutput.normalizedCropSize), texture: luminanceFramebuffer.texture) + chrominanceTextureProperties = InputTextureProperties(textureCoordinates: chrominanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin: resizeOutput.normalizedOffsetFromOrigin, cropSize: resizeOutput.normalizedCropSize), texture: chrominanceFramebuffer.texture) + if let secondChrominanceFramebuffer = secondChrominanceFramebuffer { + secondChrominanceTextureProperties = InputTextureProperties(textureCoordinates: secondChrominanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin: resizeOutput.normalizedOffsetFromOrigin, cropSize: resizeOutput.normalizedCropSize), texture: secondChrominanceFramebuffer.texture) + } } else { - textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)] + luminanceTextureProperties = luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation) + chrominanceTextureProperties = chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation) + if let secondChrominanceFramebuffer = secondChrominanceFramebuffer { + secondChrominanceTextureProperties = secondChrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation) + } + } + + if let secondChrominanceTextureProperties = secondChrominanceTextureProperties { + textureProperties = [luminanceTextureProperties, chrominanceTextureProperties, secondChrominanceTextureProperties] + } else { + textureProperties = [luminanceTextureProperties, chrominanceTextureProperties] } resultFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) var uniformSettings = ShaderUniformSettings() uniformSettings["colorConversionMatrix"] = colorConversionMatrix - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) luminanceFramebuffer.unlock() chrominanceFramebuffer.unlock() secondChrominanceFramebuffer?.unlock() diff --git a/framework/Source/Color.swift b/framework/Source/Color.swift index 370e4f00..efb099c1 100644 --- a/framework/Source/Color.swift +++ b/framework/Source/Color.swift @@ -1,20 +1,20 @@ public struct Color { - public let redComponent:Float - public let greenComponent:Float - public let blueComponent:Float - public let alphaComponent:Float + public let redComponent: Float + public let greenComponent: Float + public let blueComponent: Float + public let alphaComponent: Float - public init(red:Float, green:Float, blue:Float, alpha:Float = 1.0) { + public init(red: Float, green: Float, blue: Float, alpha: Float = 1.0) { self.redComponent = red self.greenComponent = green self.blueComponent = blue self.alphaComponent = alpha } - public static let black = Color(red:0.0, green:0.0, blue:0.0, alpha:1.0) - public static let white = Color(red:1.0, green:1.0, blue:1.0, alpha:1.0) - public static let red = Color(red:1.0, green:0.0, blue:0.0, alpha:1.0) - public static let green = Color(red:0.0, green:1.0, blue:0.0, alpha:1.0) - public static let blue = Color(red:0.0, green:0.0, blue:1.0, alpha:1.0) - public static let transparent = Color(red:0.0, green:0.0, blue:0.0, alpha:0.0) + public static let black = Color(red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0) + public static let white = Color(red: 1.0, green: 1.0, blue: 1.0, alpha: 1.0) + public static let red = Color(red: 1.0, green: 0.0, blue: 0.0, alpha: 1.0) + public static let green = Color(red: 0.0, green: 1.0, blue: 0.0, alpha: 1.0) + public static let blue = Color(red: 0.0, green: 0.0, blue: 1.0, alpha: 1.0) + public static let transparent = Color(red: 0.0, green: 0.0, blue: 0.0, alpha: 0.0) } diff --git a/framework/Source/FillMode.swift b/framework/Source/FillMode.swift index 650e9c96..ddd38d2e 100644 --- a/framework/Source/FillMode.swift +++ b/framework/Source/FillMode.swift @@ -13,24 +13,23 @@ import Glibc #endif #endif - public enum FillMode { case stretch case preserveAspectRatio case preserveAspectRatioAndFill - func transformVertices(_ vertices:[GLfloat], fromInputSize:GLSize, toFitSize:GLSize) -> [GLfloat] { - guard (vertices.count == 8) else { fatalError("Attempted to transform a non-quad to account for fill mode.") } + func transformVertices(_ vertices: [GLfloat], fromInputSize: GLSize, toFitSize: GLSize) -> [GLfloat] { + guard vertices.count == 8 else { fatalError("Attempted to transform a non-quad to account for fill mode.") } let aspectRatio = GLfloat(fromInputSize.height) / GLfloat(fromInputSize.width) let targetAspectRatio = GLfloat(toFitSize.height) / GLfloat(toFitSize.width) - let yRatio:GLfloat - let xRatio:GLfloat + let yRatio: GLfloat + let xRatio: GLfloat switch self { case .stretch: return vertices case .preserveAspectRatio: - if (aspectRatio > targetAspectRatio) { + if aspectRatio > targetAspectRatio { yRatio = 1.0 // xRatio = (GLfloat(toFitSize.height) / GLfloat(fromInputSize.height)) * (GLfloat(fromInputSize.width) / GLfloat(toFitSize.width)) xRatio = (GLfloat(fromInputSize.width) / GLfloat(toFitSize.width)) * (GLfloat(toFitSize.height) / GLfloat(fromInputSize.height)) @@ -39,7 +38,7 @@ public enum FillMode { yRatio = (GLfloat(fromInputSize.height) / GLfloat(toFitSize.height)) * (GLfloat(toFitSize.width) / GLfloat(fromInputSize.width)) } case .preserveAspectRatioAndFill: - if (aspectRatio > targetAspectRatio) { + if aspectRatio > targetAspectRatio { xRatio = 1.0 yRatio = (GLfloat(fromInputSize.height) / GLfloat(toFitSize.height)) * (GLfloat(toFitSize.width) / GLfloat(fromInputSize.width)) } else { @@ -51,21 +50,21 @@ public enum FillMode { // return [vertices[0] * xRatio, vertices[1] * yRatio, vertices[2] * xRatio, vertices[3] * yRatio, vertices[4] * xRatio, vertices[5] * yRatio, vertices[6] * xRatio, vertices[7] * yRatio] // TODO: Determine if this is misaligning things - let xConversionRatio:GLfloat = xRatio * GLfloat(toFitSize.width) / 2.0 - let xConversionDivisor:GLfloat = GLfloat(toFitSize.width) / 2.0 - let yConversionRatio:GLfloat = yRatio * GLfloat(toFitSize.height) / 2.0 - let yConversionDivisor:GLfloat = GLfloat(toFitSize.height) / 2.0 + let xConversionRatio: GLfloat = xRatio * GLfloat(toFitSize.width) / 2.0 + let xConversionDivisor = GLfloat(toFitSize.width) / 2.0 + let yConversionRatio: GLfloat = yRatio * GLfloat(toFitSize.height) / 2.0 + let yConversionDivisor = GLfloat(toFitSize.height) / 2.0 // The Double casting here is required by Linux - let value1:GLfloat = GLfloat(round(Double(vertices[0] * xConversionRatio))) / xConversionDivisor - let value2:GLfloat = GLfloat(round(Double(vertices[1] * yConversionRatio))) / yConversionDivisor - let value3:GLfloat = GLfloat(round(Double(vertices[2] * xConversionRatio))) / xConversionDivisor - let value4:GLfloat = GLfloat(round(Double(vertices[3] * yConversionRatio))) / yConversionDivisor - let value5:GLfloat = GLfloat(round(Double(vertices[4] * xConversionRatio))) / xConversionDivisor - let value6:GLfloat = GLfloat(round(Double(vertices[5] * yConversionRatio))) / yConversionDivisor - let value7:GLfloat = GLfloat(round(Double(vertices[6] * xConversionRatio))) / xConversionDivisor - let value8:GLfloat = GLfloat(round(Double(vertices[7] * yConversionRatio))) / yConversionDivisor + let value1 = GLfloat(round(Double(vertices[0] * xConversionRatio))) / xConversionDivisor + let value2 = GLfloat(round(Double(vertices[1] * yConversionRatio))) / yConversionDivisor + let value3 = GLfloat(round(Double(vertices[2] * xConversionRatio))) / xConversionDivisor + let value4 = GLfloat(round(Double(vertices[3] * yConversionRatio))) / yConversionDivisor + let value5 = GLfloat(round(Double(vertices[4] * xConversionRatio))) / xConversionDivisor + let value6 = GLfloat(round(Double(vertices[5] * yConversionRatio))) / yConversionDivisor + let value7 = GLfloat(round(Double(vertices[6] * xConversionRatio))) / xConversionDivisor + let value8 = GLfloat(round(Double(vertices[7] * yConversionRatio))) / yConversionDivisor return [value1, value2, value3, value4, value5, value6, value7, value8] diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 4af692a1..879d5abf 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -15,15 +15,16 @@ import Glibc #endif import Foundation +import AVFoundation // TODO: Add a good lookup table to this to allow for detailed error messages -struct FramebufferCreationError:Error { - let errorCode:GLenum +struct FramebufferCreationError: Error { + let errorCode: GLenum } public enum FramebufferTimingStyle { case stillImage - case videoFrame(timestamp:Timestamp) + case videoFrame(timestamp: Timestamp) func isTransient() -> Bool { switch self { @@ -32,7 +33,7 @@ public enum FramebufferTimingStyle { } } - var timestamp:Timestamp? { + public var timestamp: Timestamp? { get { switch self { case .stillImage: return nil @@ -42,24 +43,26 @@ public enum FramebufferTimingStyle { } } -public class Framebuffer { - public var timingStyle:FramebufferTimingStyle = .stillImage - public var orientation:ImageOrientation - - public let texture:GLuint - let framebuffer:GLuint? - let stencilBuffer:GLuint? - public let size:GLSize - let internalFormat:Int32 - let format:Int32 - let type:Int32 +public class Framebuffer: Hashable { + public var timingStyle: FramebufferTimingStyle = .stillImage + public var orientation: ImageOrientation + public var userInfo: [AnyHashable: Any]? + + public let texture: GLuint + let framebuffer: GLuint? + let stencilBuffer: GLuint? + public let size: GLSize + let internalFormat: Int32 + let format: Int32 + let type: Int32 - let hash:Int64 - let textureOverride:Bool + let hash: Int64 + let textureOverride: Bool + let id = UUID().uuidString - weak var context:OpenGLContext? + unowned var context: OpenGLContext - public init(context:OpenGLContext, orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false, overriddenTexture:GLuint? = nil) throws { + public init(context: OpenGLContext, orientation: ImageOrientation, size: GLSize, textureOnly: Bool = false, minFilter: Int32 = GL_LINEAR, magFilter: Int32 = GL_LINEAR, wrapS: Int32 = GL_CLAMP_TO_EDGE, wrapT: Int32 = GL_CLAMP_TO_EDGE, internalFormat: Int32 = GL_RGBA, format: Int32 = GL_BGRA, type: Int32 = GL_UNSIGNED_BYTE, stencil: Bool = false, overriddenTexture: GLuint? = nil) throws { self.context = context self.size = size self.orientation = orientation @@ -67,19 +70,19 @@ public class Framebuffer { self.format = format self.type = type - self.hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) + self.hash = hashForFramebufferWithProperties(orientation: orientation, size: size, textureOnly: textureOnly, minFilter: minFilter, magFilter: magFilter, wrapS: wrapS, wrapT: wrapT, internalFormat: internalFormat, format: format, type: type, stencil: stencil) if let newTexture = overriddenTexture { textureOverride = true texture = newTexture } else { textureOverride = false - texture = generateTexture(minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT) + texture = generateTexture(minFilter: minFilter, magFilter: magFilter, wrapS: wrapS, wrapT: wrapT) } - if (!textureOnly) { + if !textureOnly { do { - let (createdFrameBuffer, createdStencil) = try generateFramebufferForTexture(texture, width:size.width, height:size.height, internalFormat:internalFormat, format:format, type:type, stencil:stencil) + let (createdFrameBuffer, createdStencil) = try generateFramebufferForTexture(texture, width: size.width, height: size.height, internalFormat: internalFormat, format: format, type: type, stencil: stencil) framebuffer = createdFrameBuffer stencilBuffer = createdStencil } catch { @@ -94,32 +97,38 @@ public class Framebuffer { } deinit { - if (!textureOverride) { + if !textureOverride { var mutableTexture = texture - glDeleteTextures(1, &mutableTexture) - debugPrint("Delete texture at size: \(size)") + context.runOperationAsynchronously { + glDeleteTextures(1, &mutableTexture) + } + // debugPrint("Delete texture at size: \(size)") } if let framebuffer = framebuffer { var mutableFramebuffer = framebuffer - glDeleteFramebuffers(1, &mutableFramebuffer) + context.runOperationAsynchronously { + glDeleteFramebuffers(1, &mutableFramebuffer) + } } if let stencilBuffer = stencilBuffer { var mutableStencil = stencilBuffer - glDeleteRenderbuffers(1, &mutableStencil) + context.runOperationAsynchronously { + glDeleteRenderbuffers(1, &mutableStencil) + } } } - func sizeForTargetOrientation(_ targetOrientation:ImageOrientation) -> GLSize { + public func sizeForTargetOrientation(_ targetOrientation: ImageOrientation) -> GLSize { if self.orientation.rotationNeededForOrientation(targetOrientation).flipsDimensions() { - return GLSize(width:size.height, height:size.width) + return GLSize(width: size.height, height: size.width) } else { return size } } - func aspectRatioForRotation(_ rotation:Rotation) -> Float { + public func aspectRatioForRotation(_ rotation: Rotation) -> Float { if rotation.flipsDimensions() { return Float(size.width) / Float(size.height) } else { @@ -127,27 +136,27 @@ public class Framebuffer { } } - public func texelSize(for rotation:Rotation) -> Size { + public func texelSize(for rotation: Rotation) -> Size { if rotation.flipsDimensions() { - return Size(width:1.0 / Float(size.height), height:1.0 / Float(size.width)) + return Size(width: 1.0 / Float(size.height), height: 1.0 / Float(size.width)) } else { - return Size(width:1.0 / Float(size.width), height:1.0 / Float(size.height)) + return Size(width: 1.0 / Float(size.width), height: 1.0 / Float(size.height)) } } - func initialStageTexelSize(for rotation:Rotation) -> Size { + func initialStageTexelSize(for rotation: Rotation) -> Size { if rotation.flipsDimensions() { - return Size(width:1.0 / Float(size.height), height:0.0) + return Size(width: 1.0 / Float(size.height), height: 0.0) } else { - return Size(width:0.0, height:1.0 / Float(size.height)) + return Size(width: 0.0, height: 1.0 / Float(size.height)) } } - public func texturePropertiesForOutputRotation(_ rotation:Rotation) -> InputTextureProperties { - return InputTextureProperties(textureVBO:context!.textureVBO(for:rotation), texture:texture) + public func texturePropertiesForOutputRotation(_ rotation: Rotation) -> InputTextureProperties { + return InputTextureProperties(textureVBO: context.textureVBO(for: rotation), texture: texture) } - public func texturePropertiesForTargetOrientation(_ targetOrientation:ImageOrientation) -> InputTextureProperties { + public func texturePropertiesForTargetOrientation(_ targetOrientation: ImageOrientation) -> InputTextureProperties { return texturePropertiesForOutputRotation(self.orientation.rotationNeededForOrientation(targetOrientation)) } @@ -160,9 +169,10 @@ public class Framebuffer { // MARK: - // MARK: Framebuffer cache - weak var cache:FramebufferCache? + public weak var cache: FramebufferCache? + public var shouldReturnToCache = true var framebufferRetainCount = 0 - func lock() { + public func lock() { framebufferRetainCount += 1 } @@ -172,21 +182,35 @@ public class Framebuffer { public func unlock() { framebufferRetainCount -= 1 - if (framebufferRetainCount < 1) { - if ((framebufferRetainCount < 0) && (cache != nil)) { + if framebufferRetainCount < 1 { + if (framebufferRetainCount < 0) && (cache != nil) { print("WARNING: Tried to overrelease a framebuffer") } framebufferRetainCount = 0 cache?.returnToCache(self) } } + + public static func == (lhs: Framebuffer, rhs: Framebuffer) -> Bool { + return lhs.id == rhs.id + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(id) + } + + #if DEBUG + public var debugRenderInfo: String { + "{ size: \(size.width)x\(size.height) }" + } + #endif } -func hashForFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Int64 { - var result:Int64 = 1 - let prime:Int64 = 31 - let yesPrime:Int64 = 1231 - let noPrime:Int64 = 1237 +func hashForFramebufferWithProperties(orientation: ImageOrientation, size: GLSize, textureOnly: Bool = false, minFilter: Int32 = GL_LINEAR, magFilter: Int32 = GL_LINEAR, wrapS: Int32 = GL_CLAMP_TO_EDGE, wrapT: Int32 = GL_CLAMP_TO_EDGE, internalFormat: Int32 = GL_RGBA, format: Int32 = GL_BGRA, type: Int32 = GL_UNSIGNED_BYTE, stencil: Bool = false) -> Int64 { + var result: Int64 = 1 + let prime: Int64 = 31 + let yesPrime: Int64 = 1231 + let noPrime: Int64 = 1237 // TODO: Complete the rest of this result = prime * result + Int64(size.width) @@ -216,7 +240,7 @@ extension Rotation { } } - func croppedTextureCoordinates(offsetFromOrigin:Position, cropSize:Size) -> [GLfloat] { + func croppedTextureCoordinates(offsetFromOrigin: Position, cropSize: Size) -> [GLfloat] { let minX = GLfloat(offsetFromOrigin.x) let minY = GLfloat(offsetFromOrigin.y) let maxX = GLfloat(offsetFromOrigin.x) + GLfloat(cropSize.width) @@ -244,3 +268,29 @@ public extension Size { return GLint(round(Double(self.height))) } } + +#if DEBUG +import UIKit +public extension Framebuffer { + func debugUIImage() -> UIImage? { + let bufferSize = Int(size.width * size.height * 4) + guard let buffer = NSMutableData(capacity: bufferSize) else { return nil } + glReadPixels(0, 0, size.width, size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), buffer.mutableBytes) + let dataProvider = CGDataProvider(dataInfo: nil, data: buffer.mutableBytes, size: bufferSize) { _, _, _ in } + guard let provider = dataProvider else { return nil } + let cgImage = CGImage(width: Int(size.width), + height: Int(size.height), + bitsPerComponent: 8, + bitsPerPixel: 32, + bytesPerRow: 4 * Int(size.width), + space: CGColorSpaceCreateDeviceRGB(), + bitmapInfo: .byteOrder32Big, + provider: provider, + decode: nil, + shouldInterpolate: false, + intent: .defaultIntent) + guard let cgImg = cgImage else { return nil } + return UIImage(cgImage: cgImg) + } +} +#endif diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index f62575c7..77c79aed 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -15,25 +15,31 @@ // TODO: Add mechanism to purge framebuffers on low memory public class FramebufferCache { - var framebufferCache = [Int64:[Framebuffer]]() - let context:OpenGLContext + var framebufferCache = [Int64: Set]() + let context: OpenGLContext - init(context:OpenGLContext) { + init(context: OpenGLContext) { self.context = context } - public func requestFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Framebuffer { - let hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) - let framebuffer:Framebuffer - if ((framebufferCache[hash]?.count ?? -1) > 0) { -// print("Restoring previous framebuffer") - framebuffer = framebufferCache[hash]!.removeLast() + public func requestFramebufferWithProperties(orientation: ImageOrientation, size: GLSize, textureOnly: Bool = false, minFilter: Int32 = GL_LINEAR, magFilter: Int32 = GL_LINEAR, wrapS: Int32 = GL_CLAMP_TO_EDGE, wrapT: Int32 = GL_CLAMP_TO_EDGE, internalFormat: Int32 = GL_RGBA, format: Int32 = GL_BGRA, type: Int32 = GL_UNSIGNED_BYTE, stencil: Bool = false) -> Framebuffer { + __dispatch_assert_queue(context.serialDispatchQueue) + let hash = hashForFramebufferWithProperties(orientation: orientation, size: size, textureOnly: textureOnly, minFilter: minFilter, magFilter: magFilter, wrapS: wrapS, wrapT: wrapT, internalFormat: internalFormat, format: format, type: type, stencil: stencil) + let framebuffer: Framebuffer + + if framebufferCache.count > 10 { + print("Warning: Runaway framebuffer cache with size: \(framebufferCache.count)") + } + + if (framebufferCache[hash]?.count ?? -1) > 0 { + // print("Restoring previous framebuffer") + framebuffer = framebufferCache[hash]!.removeFirst() framebuffer.orientation = orientation } else { do { - debugPrint("Generating new framebuffer at size: \(size)") + // debugPrint("Generating new framebuffer at size: \(size)") - framebuffer = try Framebuffer(context:context, orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) + framebuffer = try Framebuffer(context: context, orientation: orientation, size: size, textureOnly: textureOnly, minFilter: minFilter, magFilter: magFilter, wrapS: wrapS, wrapT: wrapT, internalFormat: internalFormat, format: format, type: type, stencil: stencil) framebuffer.cache = self } catch { fatalError("Could not create a framebuffer of the size (\(size.width), \(size.height)), error: \(error)") @@ -42,19 +48,30 @@ public class FramebufferCache { return framebuffer } - public func purgeAllUnassignedFramebuffers() { - framebufferCache.removeAll() + public func purgeAllUnassignedFramebuffers(sync: Bool = false) { + if sync { + context.runOperationSynchronously { + self.framebufferCache.removeAll() + } + } else { + context.runOperationAsynchronously { + self.framebufferCache.removeAll() + } + } } - func returnToCache(_ framebuffer:Framebuffer) { -// print("Returning to cache: \(framebuffer)") - context.runOperationSynchronously{ - if (self.framebufferCache[framebuffer.hash] != nil) { - self.framebufferCache[framebuffer.hash]!.append(framebuffer) + func returnToCache(_ framebuffer: Framebuffer) { + // sprint("Returning to cache: \(framebuffer)") + context.runOperationSynchronously { + if self.framebufferCache[framebuffer.hash] != nil { + if self.framebufferCache[framebuffer.hash]!.contains(framebuffer) { + print("WARNING: add duplicated buffer to cache.") + } else { + self.framebufferCache[framebuffer.hash]!.insert(framebuffer) + } } else { - self.framebufferCache[framebuffer.hash] = [framebuffer] + self.framebufferCache[framebuffer.hash] = Set([framebuffer]) } } } } - diff --git a/framework/Source/GPUImage-Bridging-Header.h b/framework/Source/GPUImage-Bridging-Header.h new file mode 100644 index 00000000..379e1ac3 --- /dev/null +++ b/framework/Source/GPUImage-Bridging-Header.h @@ -0,0 +1,15 @@ +// +// GPUImage-Bridging-Header.h +// GPUImage +// +// Created by Josh Bernfeld on 12/7/17. +// Copyright © 2017 Sunset Lake Software LLC. All rights reserved. +// + +#ifndef GPUImage_Bridging_Header_h +#define GPUImage_Bridging_Header_h + +#import "NSObject+Exception.h" +#import "TPCircularBuffer.h" + +#endif /* GPUImage_Bridging_Header_h */ diff --git a/framework/Source/ImageGenerator.swift b/framework/Source/ImageGenerator.swift index ea79f99b..604f7032 100644 --- a/framework/Source/ImageGenerator.swift +++ b/framework/Source/ImageGenerator.swift @@ -1,21 +1,25 @@ public class ImageGenerator: ImageSource { - public var size:Size + public var size: Size + + #if DEBUG + public var debugRenderInfo: String { "{ ImageGenerator: { output: \(imageFramebuffer.debugRenderInfo) } }," } + #endif public let targets = TargetContainer() - var imageFramebuffer:Framebuffer! + var imageFramebuffer: Framebuffer! - public init(size:Size) { + public init(size: Size) { self.size = size do { - imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:GLSize(size)) + imageFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(size)) } catch { fatalError("Could not construct framebuffer of size: \(size), error:\(error)") } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { imageFramebuffer.lock() - target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) + target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex: atIndex) } func notifyTargets() { diff --git a/framework/Source/ImageOrientation.swift b/framework/Source/ImageOrientation.swift index 59013707..0943d490 100644 --- a/framework/Source/ImageOrientation.swift +++ b/framework/Source/ImageOrientation.swift @@ -4,7 +4,7 @@ public enum ImageOrientation { case landscapeLeft case landscapeRight - func rotationNeededForOrientation(_ targetOrientation:ImageOrientation) -> Rotation { + public func rotationNeededForOrientation(_ targetOrientation: ImageOrientation) -> Rotation { switch (self, targetOrientation) { case (.portrait, .portrait), (.portraitUpsideDown, .portraitUpsideDown), (.landscapeLeft, .landscapeLeft), (.landscapeRight, .landscapeRight): return .noRotation case (.portrait, .portraitUpsideDown): return .rotate180 @@ -21,6 +21,15 @@ public enum ImageOrientation { case (.landscapeRight, .portraitUpsideDown): return .rotateClockwise } } + + var cgImageOrientation: CGImagePropertyOrientation { + switch self { + case .portrait: return .up + case .portraitUpsideDown: return .down + case .landscapeLeft: return .left + case .landscapeRight: return .right + } + } } public enum Rotation { @@ -33,10 +42,41 @@ public enum Rotation { case rotateClockwiseAndFlipVertically case rotateClockwiseAndFlipHorizontally - func flipsDimensions() -> Bool { + public func flipsDimensions() -> Bool { switch self { case .noRotation, .rotate180, .flipHorizontally, .flipVertically: return false case .rotateCounterclockwise, .rotateClockwise, .rotateClockwiseAndFlipVertically, .rotateClockwiseAndFlipHorizontally: return true } } } + +public extension UIImage.Orientation { + var gpuOrientation: ImageOrientation { + switch self { + case .up, .upMirrored: + return .portrait + case .down, .downMirrored: + return .portraitUpsideDown + case .left, .leftMirrored: + return .landscapeLeft + case .right, .rightMirrored: + return .landscapeRight + @unknown default: + return .portrait + } + } + + var cgImageOrientation: CGImagePropertyOrientation { + switch self { + case .up: return .up + case .down: return .down + case .left: return .left + case .right: return .right + case .upMirrored: return .upMirrored + case .downMirrored: return .downMirrored + case .leftMirrored: return .leftMirrored + case .rightMirrored: return .rightMirrored + @unknown default: return .up + } + } +} diff --git a/framework/Source/Linux/GLUTRenderWindow.swift b/framework/Source/Linux/GLUTRenderWindow.swift index fefea5a4..7062760c 100755 --- a/framework/Source/Linux/GLUTRenderWindow.swift +++ b/framework/Source/Linux/GLUTRenderWindow.swift @@ -5,20 +5,19 @@ import Foundation public class GLUTRenderWindow: ImageConsumer { public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - private lazy var displayShader:ShaderProgram = { + public let maximumInputs: UInt = 1 + private lazy var displayShader: ShaderProgram = { sharedImageProcessingContext.makeCurrentContext() // self.openGLContext = sharedImageProcessingContext.context - return crashOnShaderCompileFailure("GLUTRenderWindow"){try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + return crashOnShaderCompileFailure("GLUTRenderWindow") { try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() - - public init(width:UInt32, height:UInt32, title:String) { + public init(width: UInt32, height: UInt32, title: String) { var localArgc = Process.argc glutInit(&localArgc, Process.unsafeArgv) glutInitDisplayMode(UInt32(GLUT_DOUBLE)) glutInitWindowSize(Int32(width), Int32(height)) - glutInitWindowPosition(100,100) + glutInitWindowPosition(100, 100) glutCreateWindow(title) glViewport(0, 0, GLsizei(width), GLsizei(height)) @@ -28,7 +27,7 @@ public class GLUTRenderWindow: ImageConsumer { // glutReshapeFunc(void (*func)(int width, int height) // Maybe use this to get window reshape events } - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(framebuffer: Framebuffer, fromSourceIndex: UInt) { glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), 0) @@ -39,21 +38,20 @@ public class GLUTRenderWindow: ImageConsumer { glClearColor(0.0, 0.0, 0.0, 0.0) glClear(GLenum(GL_COLOR_BUFFER_BIT)) - renderQuadWithShader(self.displayShader, vertices:verticallyInvertedImageVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(.Portrait)]) + renderQuadWithShader(self.displayShader, vertices: verticallyInvertedImageVertices, inputTextures: [framebuffer.texturePropertiesForTargetOrientation(.Portrait)]) framebuffer.unlock() glutSwapBuffers() } - public func loopWithFunction(idleFunction:() -> ()) { + public func loopWithFunction(idleFunction:() -> Void) { loopFunction = idleFunction glutIdleFunc(glutCallbackFunction) glutMainLoop() } } -var loopFunction:(() -> ())! = nil +var loopFunction:(() -> Void)! = nil func glutCallbackFunction() { loopFunction() } - diff --git a/framework/Source/Linux/OpenGLContext-RPi.swift b/framework/Source/Linux/OpenGLContext-RPi.swift index 5d0f06d3..89572389 100755 --- a/framework/Source/Linux/OpenGLContext-RPi.swift +++ b/framework/Source/Linux/OpenGLContext-RPi.swift @@ -2,13 +2,13 @@ import COpenGLES.gles2 import CVideoCore public class OpenGLContext: SerialDispatch { - lazy var framebufferCache:FramebufferCache = { - return FramebufferCache(context:self) + lazy var framebufferCache: FramebufferCache = { + return FramebufferCache(context: self) }() - var shaderCache:[String:ShaderProgram] = [:] + var shaderCache: [String: ShaderProgram] = [:] - lazy var passthroughShader:ShaderProgram = { - return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + lazy var passthroughShader: ShaderProgram = { + return crashOnShaderCompileFailure("OpenGLContext") { return try self.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() // MARK: - @@ -33,25 +33,25 @@ public class OpenGLContext: SerialDispatch { // MARK: - // MARK: Device capabilities - public var maximumTextureSizeForThisDevice:GLint {get { return _maximumTextureSizeForThisDevice } } - private lazy var _maximumTextureSizeForThisDevice:GLint = { + public var maximumTextureSizeForThisDevice: GLint { get { return _maximumTextureSizeForThisDevice } } + private lazy var _maximumTextureSizeForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_SIZE) }() - public var maximumTextureUnitsForThisDevice:GLint {get { return _maximumTextureUnitsForThisDevice } } - private lazy var _maximumTextureUnitsForThisDevice:GLint = { + public var maximumTextureUnitsForThisDevice: GLint { get { return _maximumTextureUnitsForThisDevice } } + private lazy var _maximumTextureUnitsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_IMAGE_UNITS) }() - public var maximumVaryingVectorsForThisDevice:GLint {get { return _maximumVaryingVectorsForThisDevice } } - private lazy var _maximumVaryingVectorsForThisDevice:GLint = { + public var maximumVaryingVectorsForThisDevice: GLint { get { return _maximumVaryingVectorsForThisDevice } } + private lazy var _maximumVaryingVectorsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_VARYING_VECTORS) }() - lazy var extensionString:String = { - return self.runOperationSynchronously{ + lazy var extensionString: String = { + return self.runOperationSynchronously { self.makeCurrentContext() - return String(cString:unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to:UnsafePointer.self)) + return String(cString: unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to: UnsafePointer.self)) } }() -} \ No newline at end of file +} diff --git a/framework/Source/Linux/OpenGLContext.swift b/framework/Source/Linux/OpenGLContext.swift index e56337d9..546841ab 100755 --- a/framework/Source/Linux/OpenGLContext.swift +++ b/framework/Source/Linux/OpenGLContext.swift @@ -1,20 +1,19 @@ import COpenGL public class OpenGLContext: SerialDispatch { - lazy var framebufferCache:FramebufferCache = { - return FramebufferCache(context:self) + lazy var framebufferCache: FramebufferCache = { + return FramebufferCache(context: self) }() - var shaderCache:[String:ShaderProgram] = [:] + var shaderCache: [String: ShaderProgram] = [:] - lazy var passthroughShader:ShaderProgram = { - return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + lazy var passthroughShader: ShaderProgram = { + return crashOnShaderCompileFailure("OpenGLContext") { return try self.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() // MARK: - // MARK: Initialization and teardown init() { - glDisable(GLenum(GL_DEPTH_TEST)) glEnable(GLenum(GL_TEXTURE_2D)) } @@ -31,25 +30,25 @@ public class OpenGLContext: SerialDispatch { // MARK: - // MARK: Device capabilities - public var maximumTextureSizeForThisDevice:GLint {get { return _maximumTextureSizeForThisDevice } } - private lazy var _maximumTextureSizeForThisDevice:GLint = { + public var maximumTextureSizeForThisDevice: GLint { get { return _maximumTextureSizeForThisDevice } } + private lazy var _maximumTextureSizeForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_SIZE) }() - public var maximumTextureUnitsForThisDevice:GLint {get { return _maximumTextureUnitsForThisDevice } } - private lazy var _maximumTextureUnitsForThisDevice:GLint = { + public var maximumTextureUnitsForThisDevice: GLint { get { return _maximumTextureUnitsForThisDevice } } + private lazy var _maximumTextureUnitsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_IMAGE_UNITS) }() - public var maximumVaryingVectorsForThisDevice:GLint {get { return _maximumVaryingVectorsForThisDevice } } - private lazy var _maximumVaryingVectorsForThisDevice:GLint = { + public var maximumVaryingVectorsForThisDevice: GLint { get { return _maximumVaryingVectorsForThisDevice } } + private lazy var _maximumVaryingVectorsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_VARYING_VECTORS) }() - lazy var extensionString:String = { - return self.runOperationSynchronously{ + lazy var extensionString: String = { + return self.runOperationSynchronously { self.makeCurrentContext() return String.fromCString(UnsafePointer(glGetString(GLenum(GL_EXTENSIONS))))! } }() -} \ No newline at end of file +} diff --git a/framework/Source/Linux/RPiRenderWindow.swift b/framework/Source/Linux/RPiRenderWindow.swift index 603c97d0..c9079785 100755 --- a/framework/Source/Linux/RPiRenderWindow.swift +++ b/framework/Source/Linux/RPiRenderWindow.swift @@ -3,31 +3,31 @@ import CVideoCore import Foundation -var nativewindow = EGL_DISPMANX_WINDOW_T(element:0, width:0, height:0) // This needs to be retained at the top level or its deallocation will destroy the window system +var nativewindow = EGL_DISPMANX_WINDOW_T(element: 0, width: 0, height: 0) // This needs to be retained at the top level or its deallocation will destroy the window system public class RPiRenderWindow: ImageConsumer { public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - private lazy var displayShader:ShaderProgram = { + public let maximumInputs: UInt = 1 + private lazy var displayShader: ShaderProgram = { sharedImageProcessingContext.makeCurrentContext() - return crashOnShaderCompileFailure("RPiRenderWindow"){try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + return crashOnShaderCompileFailure("RPiRenderWindow") { try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() - let display:EGLDisplay - let surface:EGLSurface - let context:EGLContext + let display: EGLDisplay + let surface: EGLSurface + let context: EGLContext - let windowWidth:UInt32 - let windowHeight:UInt32 + let windowWidth: UInt32 + let windowHeight: UInt32 - public init(width:UInt32? = nil, height:UInt32? = nil) { + public init(width: UInt32? = nil, height: UInt32? = nil) { sharedImageProcessingContext.makeCurrentContext() display = eglGetDisplay(nil /* EGL_DEFAULT_DISPLAY */) // guard (display != EGL_NO_DISPLAY) else {throw renderingError(errorString:"Could not obtain display")} // guard (eglInitialize(display, nil, nil) != EGL_FALSE) else {throw renderingError(errorString:"Could not initialize display")} eglInitialize(display, nil, nil) - let attributes:[EGLint] = [ + let attributes: [EGLint] = [ EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8, @@ -36,38 +36,38 @@ public class RPiRenderWindow: ImageConsumer { EGL_NONE ] - var config:EGLConfig? = nil - var num_config:EGLint = 0 + var config: EGLConfig? + var num_config: EGLint = 0 // guard (eglChooseConfig(display, attributes, &config, 1, &num_config) != EGL_FALSE) else {throw renderingError(errorString:"Could not get a framebuffer configuration")} eglChooseConfig(display, attributes, &config, 1, &num_config) eglBindAPI(EGLenum(EGL_OPENGL_ES_API)) - //context = eglCreateContext(display, config, EGL_NO_CONTEXT, context_attributes) - let context_attributes:[EGLint] = [ + // context = eglCreateContext(display, config, EGL_NO_CONTEXT, context_attributes) + let context_attributes: [EGLint] = [ EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE ] context = eglCreateContext(display, config, nil /* EGL_NO_CONTEXT*/, context_attributes) - //guard (context != EGL_NO_CONTEXT) else {throw renderingError(errorString:"Could not create a rendering context")} + // guard (context != EGL_NO_CONTEXT) else {throw renderingError(errorString:"Could not create a rendering context")} - var screen_width:UInt32 = 0 - var screen_height:UInt32 = 0 + var screen_width: UInt32 = 0 + var screen_height: UInt32 = 0 graphics_get_display_size(0 /* LCD */, &screen_width, &screen_height) self.windowWidth = width ?? screen_width self.windowHeight = height ?? screen_height let dispman_display = vc_dispmanx_display_open( 0 /* LCD */) let dispman_update = vc_dispmanx_update_start( 0 ) - var dst_rect = VC_RECT_T(x:0, y:0, width:Int32(windowWidth), height:Int32(windowHeight)) - var src_rect = VC_RECT_T(x:0, y:0, width:Int32(windowWidth) << 16, height:Int32(windowHeight) << 16) + var dst_rect = VC_RECT_T(x: 0, y: 0, width: Int32(windowWidth), height: Int32(windowHeight)) + var src_rect = VC_RECT_T(x: 0, y: 0, width: Int32(windowWidth) << 16, height: Int32(windowHeight) << 16) let dispman_element = vc_dispmanx_element_add(dispman_update, dispman_display, 0/*layer*/, &dst_rect, 0/*src*/, &src_rect, DISPMANX_PROTECTION_T(DISPMANX_PROTECTION_NONE), nil /*alpha*/, nil/*clamp*/, DISPMANX_TRANSFORM_T(0)/*transform*/) vc_dispmanx_update_submit_sync(dispman_update) - nativewindow = EGL_DISPMANX_WINDOW_T(element:dispman_element, width:Int32(windowWidth), height:Int32(windowHeight)) + nativewindow = EGL_DISPMANX_WINDOW_T(element: dispman_element, width: Int32(windowWidth), height: Int32(windowHeight)) surface = eglCreateWindowSurface(display, config, &nativewindow, nil) - //guard (surface != EGL_NO_SURFACE) else {throw renderingError(errorString:"Could not create a rendering surface")} + // guard (surface != EGL_NO_SURFACE) else {throw renderingError(errorString:"Could not create a rendering surface")} eglMakeCurrent(display, surface, surface, context) @@ -76,7 +76,7 @@ public class RPiRenderWindow: ImageConsumer { glClear(GLenum(GL_COLOR_BUFFER_BIT)) } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), 0) @@ -85,8 +85,8 @@ public class RPiRenderWindow: ImageConsumer { glClearColor(0.0, 0.0, 0.0, 0.0) glClear(GLenum(GL_COLOR_BUFFER_BIT)) - renderQuadWithShader(self.displayShader, vertices:verticallyInvertedImageVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(.portrait)]) + renderQuadWithShader(self.displayShader, vertices: verticallyInvertedImageVertices, inputTextures: [framebuffer.texturePropertiesForTargetOrientation(.portrait)]) framebuffer.unlock() eglSwapBuffers(display, surface) } -} \ No newline at end of file +} diff --git a/framework/Source/Linux/V4LCamera.swift b/framework/Source/Linux/V4LCamera.swift index 0f26146f..9d345e9b 100755 --- a/framework/Source/Linux/V4LCamera.swift +++ b/framework/Source/Linux/V4LCamera.swift @@ -122,4 +122,4 @@ public class V4LCamera:ImageSource { // Not needed for camera inputs } } -*/ \ No newline at end of file +*/ diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index b8f02ae6..65d59e0a 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -4,10 +4,10 @@ import AVFoundation let initialBenchmarkFramesToIgnore = 5 public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { - public var orientation:ImageOrientation - public var runBenchmark:Bool = false - public var logFPS:Bool = false - public var audioEncodingTarget:AudioEncodingTarget? { + public var orientation: ImageOrientation + public var runBenchmark = false + public var logFPS = false + public var audioEncodingTarget: AudioEncodingTarget? { didSet { guard let audioEncodingTarget = audioEncodingTarget else { self.removeAudioInputsAndOutputs() @@ -23,28 +23,28 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } public let targets = TargetContainer() - let captureSession:AVCaptureSession - let inputCamera:AVCaptureDevice - let videoInput:AVCaptureDeviceInput! - let videoOutput:AVCaptureVideoDataOutput! - var microphone:AVCaptureDevice? - var audioInput:AVCaptureDeviceInput? - var audioOutput:AVCaptureAudioDataOutput? - - var supportsFullYUVRange:Bool = false - let captureAsYUV:Bool - let yuvConversionShader:ShaderProgram? - let frameRenderingSemaphore = DispatchSemaphore(value:1) - let cameraProcessingQueue = DispatchQueue.global(priority:standardProcessingQueuePriority) - let audioProcessingQueue = DispatchQueue.global(priority:lowProcessingQueuePriority) + let captureSession: AVCaptureSession + let inputCamera: AVCaptureDevice + let videoInput: AVCaptureDeviceInput! + let videoOutput: AVCaptureVideoDataOutput! + var microphone: AVCaptureDevice? + var audioInput: AVCaptureDeviceInput? + var audioOutput: AVCaptureAudioDataOutput? + + var supportsFullYUVRange = false + let captureAsYUV: Bool + let yuvConversionShader: ShaderProgram? + let frameRenderingSemaphore = DispatchSemaphore(value: 1) + let cameraProcessingQueue = standardProcessingQueue + let audioProcessingQueue = lowProcessingQueue var numberOfFramesCaptured = 0 - var totalFrameTimeDuringCapture:Double = 0.0 + var totalFrameTimeDuringCapture: Double = 0.0 var framesSinceLastCheck = 0 var lastCheckTime = CFAbsoluteTimeGetCurrent() - public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, orientation:ImageOrientation = .portrait, captureAsYUV:Bool = true) throws { - self.inputCamera = cameraDevice ?? AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeVideo) + public init(sessionPreset: String, cameraDevice: AVCaptureDevice? = nil, orientation: ImageOrientation = .portrait, captureAsYUV: Bool = true) throws { + self.inputCamera = cameraDevice ?? AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) self.orientation = orientation self.captureAsYUV = captureAsYUV @@ -52,7 +52,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.captureSession.beginConfiguration() do { - self.videoInput = try AVCaptureDeviceInput(device:inputCamera) + self.videoInput = try AVCaptureDeviceInput(device: inputCamera) } catch { self.videoInput = nil self.videoOutput = nil @@ -60,7 +60,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer super.init() throw error } - if (captureSession.canAddInput(videoInput)) { + if captureSession.canAddInput(videoInput) { captureSession.addInput(videoInput) } @@ -72,24 +72,24 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer supportsFullYUVRange = false let supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes for currentPixelFormat in supportedPixelFormats! { - if ((currentPixelFormat as! NSNumber).int32Value == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { + if (currentPixelFormat as! NSNumber).int32Value == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { supportsFullYUVRange = true } } - if (supportsFullYUVRange) { - yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + if supportsFullYUVRange { + yuvConversionShader = crashOnShaderCompileFailure("Camera") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] } else { - yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] + yuvConversionShader = crashOnShaderCompileFailure("Camera") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionVideoRangeFragmentShader) } + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: NSNumber(value: Int32(kCVPixelFormatType_32BGRA))] } - if (captureSession.canAddOutput(videoOutput)) { + if captureSession.canAddOutput(videoOutput) { captureSession.addOutput(videoOutput) } captureSession.sessionPreset = sessionPreset @@ -97,24 +97,24 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer super.init() - videoOutput.setSampleBufferDelegate(self, queue:cameraProcessingQueue) + videoOutput.setSampleBufferDelegate(self, queue: cameraProcessingQueue) } deinit { - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { self.stopCapture() - self.videoOutput.setSampleBufferDelegate(nil, queue:nil) - self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) + self.videoOutput.setSampleBufferDelegate(nil, queue: nil) + self.audioOutput?.setSampleBufferDelegate(nil, queue: nil) } } - - public func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) { - guard (captureOutput != audioOutput) else { + + public func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + guard captureOutput != audioOutput else { self.processAudioSampleBuffer(sampleBuffer) return } - guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return } + guard frameRenderingSemaphore.wait(timeout: DispatchTime.now()) == DispatchTimeoutResult.success else { return } let startTime = CFAbsoluteTimeGetCurrent() let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer)! @@ -123,45 +123,45 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - sharedImageProcessingContext.runOperationAsynchronously{ - let cameraFramebuffer:Framebuffer + sharedImageProcessingContext.runOperationAsynchronously { + let cameraFramebuffer: Framebuffer - if (self.captureAsYUV) { - let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.orientation, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + if self.captureAsYUV { + let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.orientation, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) luminanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE0)) glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 0)) - let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.orientation, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.orientation, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) chrominanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 1)) - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) - let conversionMatrix:Matrix3x3 - if (self.supportsFullYUVRange) { + let conversionMatrix: Matrix3x3 + if self.supportsFullYUVRange { conversionMatrix = colorConversionMatrix601FullRangeDefault } else { conversionMatrix = colorConversionMatrix601Default } - convertYUVToRGB(shader:self.yuvConversionShader!, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:cameraFramebuffer, colorConversionMatrix:conversionMatrix) + convertYUVToRGB(shader: self.yuvConversionShader!, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: cameraFramebuffer, colorConversionMatrix: conversionMatrix) } else { - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.orientation, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.orientation, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) glActiveTexture(GLenum(GL_TEXTURE0)) glBindTexture(GLenum(GL_TEXTURE_2D), cameraFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(cameraFrame)) } CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) + cameraFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) if self.runBenchmark { self.numberOfFramesCaptured += 1 - if (self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore) { + if self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore { let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) self.totalFrameTimeDuringCapture += currentFrameTime print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured - initialBenchmarkFramesToIgnore)) ms") @@ -170,7 +170,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } if self.logFPS { - if ((CFAbsoluteTimeGetCurrent() - self.lastCheckTime) > 1.0) { + if (CFAbsoluteTimeGetCurrent() - self.lastCheckTime) > 1.0 { self.lastCheckTime = CFAbsoluteTimeGetCurrent() print("FPS: \(self.framesSinceLastCheck)") self.framesSinceLastCheck = 0 @@ -184,23 +184,23 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } public func startCapture() { - sharedImageProcessingContext.runOperationAsynchronously{ + sharedImageProcessingContext.runOperationAsynchronously { self.numberOfFramesCaptured = 0 self.totalFrameTimeDuringCapture = 0 } - if (!captureSession.isRunning) { + if !captureSession.isRunning { captureSession.startRunning() } } public func stopCapture() { - if (captureSession.isRunning) { + if captureSession.isRunning { captureSession.stopRunning() } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Not needed for camera inputs } @@ -208,14 +208,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer // MARK: Audio processing func addAudioInputsAndOutputs() throws { - guard (audioOutput == nil) else { return } + guard audioOutput == nil else { return } captureSession.beginConfiguration() defer { captureSession.commitConfiguration() } microphone = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) - audioInput = try AVCaptureDeviceInput(device:microphone) + audioInput = try AVCaptureDeviceInput(device: microphone) if captureSession.canAddInput(audioInput) { captureSession.addInput(audioInput) } @@ -223,11 +223,11 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if captureSession.canAddOutput(audioOutput) { captureSession.addOutput(audioOutput) } - audioOutput?.setSampleBufferDelegate(self, queue:audioProcessingQueue) + audioOutput?.setSampleBufferDelegate(self, queue: audioProcessingQueue) } func removeAudioInputsAndOutputs() { - guard (audioOutput != nil) else { return } + guard audioOutput != nil else { return } captureSession.beginConfiguration() captureSession.removeInput(audioInput!) @@ -238,7 +238,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.commitConfiguration() } - func processAudioSampleBuffer(_ sampleBuffer:CMSampleBuffer) { + func processAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) { self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) } } diff --git a/framework/Source/Mac/MovieInput.swift b/framework/Source/Mac/MovieInput.swift index ec6cec15..964a0a98 100644 --- a/framework/Source/Mac/MovieInput.swift +++ b/framework/Source/Mac/MovieInput.swift @@ -4,39 +4,39 @@ public class MovieInput: ImageSource { public let targets = TargetContainer() public var runBenchmark = false - let yuvConversionShader:ShaderProgram - let asset:AVAsset - let assetReader:AVAssetReader - let playAtActualSpeed:Bool - let loop:Bool + let yuvConversionShader: ShaderProgram + let asset: AVAsset + let assetReader: AVAssetReader + let playAtActualSpeed: Bool + let loop: Bool var videoEncodingIsFinished = false var previousFrameTime = kCMTimeZero var previousActualFrameTime = CFAbsoluteTimeGetCurrent() var numberOfFramesCaptured = 0 - var totalFrameTimeDuringCapture:Double = 0.0 + var totalFrameTimeDuringCapture: Double = 0.0 // TODO: Add movie reader synchronization // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works - public init(asset:AVAsset, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + public init(asset: AVAsset, playAtActualSpeed: Bool = false, loop: Bool = false) throws { self.asset = asset self.playAtActualSpeed = playAtActualSpeed self.loop = loop - self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} + self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } - assetReader = try AVAssetReader(asset:self.asset) + assetReader = try AVAssetReader(asset: self.asset) - let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) + let outputSettings: [String: AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String): NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings: outputSettings) readerVideoTrackOutput.alwaysCopiesSampleData = false assetReader.add(readerVideoTrackOutput) // TODO: Audio here } - public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] - let inputAsset = AVURLAsset(url:url, options:inputOptions) - try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop) + public convenience init(url: URL, playAtActualSpeed: Bool = false, loop: Bool = false) throws { + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] + let inputAsset = AVURLAsset(url: url, options: inputOptions) + try self.init(asset: inputAsset, playAtActualSpeed: playAtActualSpeed, loop: loop) } // MARK: - @@ -44,30 +44,30 @@ public class MovieInput: ImageSource { public func start() { asset.loadValuesAsynchronously(forKeys: ["tracks"], completionHandler: { - DispatchQueue.global(priority:standardProcessingQueuePriority).async { - guard (self.asset.statusOfValue(forKey:"tracks", error:nil) == .loaded) else { return } + DispatchQueue.global().async { + guard self.asset.statusOfValue(forKey: "tracks", error: nil) == .loaded else { return } guard self.assetReader.startReading() else { debugPrint("Couldn't start reading") return } - var readerVideoTrackOutput:AVAssetReaderOutput? = nil; + var readerVideoTrackOutput: AVAssetReaderOutput? for output in self.assetReader.outputs { - if(output.mediaType == AVMediaTypeVideo) { - readerVideoTrackOutput = output; + if output.mediaType == AVMediaTypeVideo { + readerVideoTrackOutput = output } } - while (self.assetReader.status == .reading) { - self.readNextVideoFrame(from:readerVideoTrackOutput!) + while self.assetReader.status == .reading { + self.readNextVideoFrame(from: readerVideoTrackOutput!) } - if (self.assetReader.status == .completed) { + if self.assetReader.status == .completed { self.assetReader.cancelReading() - if (self.loop) { + if self.loop { // TODO: Restart movie processing } else { self.endProcessing() @@ -83,16 +83,15 @@ public class MovieInput: ImageSource { } func endProcessing() { - } // MARK: - // MARK: Internal processing functions - func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { - if ((assetReader.status == .reading) && !videoEncodingIsFinished) { + func readNextVideoFrame(from videoTrackOutput: AVAssetReaderOutput) { + if (assetReader.status == .reading) && !videoEncodingIsFinished { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { - if (playAtActualSpeed) { + if playAtActualSpeed { // Do this outside of the video processing queue to not slow that down while waiting let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime) @@ -101,7 +100,7 @@ public class MovieInput: ImageSource { let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame) let actualTimeDifference = currentActualTime - previousActualFrameTime - if (frameTimeDifference > actualTimeDifference) { + if frameTimeDifference > actualTimeDifference { usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference)))) } @@ -109,14 +108,14 @@ public class MovieInput: ImageSource { previousActualFrameTime = CFAbsoluteTimeGetCurrent() } - sharedImageProcessingContext.runOperationSynchronously{ - self.process(movieFrame:sampleBuffer) + sharedImageProcessingContext.runOperationSynchronously { + self.process(movieFrame: sampleBuffer) CMSampleBufferInvalidate(sampleBuffer) } } else { - if (!loop) { + if !loop { videoEncodingIsFinished = true - if (videoEncodingIsFinished) { + if videoEncodingIsFinished { self.endProcessing() } } @@ -130,15 +129,15 @@ public class MovieInput: ImageSource { } - func process(movieFrame frame:CMSampleBuffer) { + func process(movieFrame frame: CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! // processingFrameTime = currentSampleTime - self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) + self.process(movieFrame: movieFrame, withSampleTime: currentSampleTime) } - func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { + func process(movieFrame: CVPixelBuffer, withSampleTime: CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) @@ -157,24 +156,24 @@ public class MovieInput: ImageSource { let startTime = CFAbsoluteTimeGetCurrent() - let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) luminanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE0)) glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 0)) - let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) chrominanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 1)) - let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) - convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) + convertYUVToRGB(shader: self.yuvConversionShader, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: movieFramebuffer, colorConversionMatrix: conversionMatrix) CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) + movieFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime)) self.updateTargetsWithFramebuffer(movieFramebuffer) if self.runBenchmark { @@ -186,7 +185,7 @@ public class MovieInput: ImageSource { } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Not needed for movie inputs } } diff --git a/framework/Source/Mac/MovieOutput.swift b/framework/Source/Mac/MovieOutput.swift index a9b9705d..e6a16c5c 100644 --- a/framework/Source/Mac/MovieOutput.swift +++ b/framework/Source/Mac/MovieOutput.swift @@ -2,78 +2,78 @@ import AVFoundation public protocol AudioEncodingTarget { func activateAudioTrack() - func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) + func processAudioBuffer(_ sampleBuffer: CMSampleBuffer) } public class MovieOutput: ImageConsumer, AudioEncodingTarget { public let sources = SourceContainer() - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 - let assetWriter:AVAssetWriter - let assetWriterVideoInput:AVAssetWriterInput - var assetWriterAudioInput:AVAssetWriterInput? - let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor - let size:Size + let assetWriter: AVAssetWriter + let assetWriterVideoInput: AVAssetWriterInput + var assetWriterAudioInput: AVAssetWriterInput? + let assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor + let size: Size private var isRecording = false private var videoEncodingIsFinished = false private var audioEncodingIsFinished = false - private var startTime:CMTime? + private var startTime: CMTime? private var previousFrameTime = kCMTimeNegativeInfinity private var previousAudioTime = kCMTimeNegativeInfinity - private var encodingLiveVideo:Bool + private var encodingLiveVideo: Bool - public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { + public init(URL: Foundation.URL, size: Size, fileType: String = AVFileTypeQuickTimeMovie, liveVideo: Bool = false, settings: [String: AnyObject]? = nil) throws { self.size = size - assetWriter = try AVAssetWriter(url:URL, fileType:fileType) + assetWriter = try AVAssetWriter(url: URL, fileType: fileType) // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) - var localSettings:[String:AnyObject] + var localSettings: [String: AnyObject] if let settings = settings { localSettings = settings } else { - localSettings = [String:AnyObject]() + localSettings = [String: AnyObject]() } - localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) - localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString + localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value: size.width) + localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value: size.height) + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString - assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) + assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. - let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value:Int32(kCVPixelFormatType_32BGRA)), - kCVPixelBufferWidthKey as String:NSNumber(value:size.width), - kCVPixelBufferHeightKey as String:NSNumber(value:size.height)] + let sourcePixelBufferAttributesDictionary: [String: AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: Int32(kCVPixelFormatType_32BGRA)), + kCVPixelBufferWidthKey as String: NSNumber(value: size.width), + kCVPixelBufferHeightKey as String: NSNumber(value: size.height)] - assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) + assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) assetWriter.add(assetWriterVideoInput) } public func startRecording() { startTime = nil - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { self.isRecording = self.assetWriter.startWriting() } } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { self.isRecording = false - if (self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown) { - sharedImageProcessingContext.runOperationAsynchronously{ + if self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown { + sharedImageProcessingContext.runOperationAsynchronously { completionCallback?() } return } - if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) { + if (self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished) { self.videoEncodingIsFinished = true self.assetWriterVideoInput.markAsFinished() } - if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) { + if (self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished) { self.audioEncodingIsFinished = true self.assetWriterAudioInput?.markAsFinished() } @@ -82,13 +82,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if let callback = completionCallback { self.assetWriter.finishWriting(completionHandler: callback) } else { - self.assetWriter.finishWriting{} + self.assetWriter.finishWriting {} } } } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { defer { framebuffer.unlock() } @@ -97,10 +97,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Ignore still images and other non-video updates (do I still need this?) guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case - guard (frameTime != previousFrameTime) else { return } + guard frameTime != previousFrameTime else { return } - if (startTime == nil) { - if (assetWriter.status != .writing) { + if startTime == nil { + if assetWriter.status != .writing { assetWriter.startWriting() } @@ -109,35 +109,33 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } // TODO: Run the following on an internal movie recording dispatch queue, context - guard (assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo)) else { + guard assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo) else { debugPrint("Had to drop a frame at time \(frameTime)") return } - var pixelBufferFromPool:CVPixelBuffer? = nil + var pixelBufferFromPool: CVPixelBuffer? let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBufferFromPool) guard let pixelBuffer = pixelBufferFromPool, (pixelBufferStatus == kCVReturnSuccess) else { return } - - - renderIntoPixelBuffer(pixelBuffer, framebuffer:framebuffer) + renderIntoPixelBuffer(pixelBuffer, framebuffer: framebuffer) - if (!assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime:frameTime)) { + if !assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime: frameTime) { print("Problem appending pixel buffer at time: \(frameTime)") } CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) } - func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { - let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size)) + func renderIntoPixelBuffer(_ pixelBuffer: CVPixelBuffer, framebuffer: Framebuffer) { + let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: framebuffer.orientation, size: GLSize(self.size)) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)]) CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) @@ -149,18 +147,18 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func activateAudioTrack() { // TODO: Add ability to set custom output settings - assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:nil) + assetWriterAudioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: nil) assetWriter.add(assetWriterAudioInput!) assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } - public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) { + public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer) { guard let assetWriterAudioInput = assetWriterAudioInput else { return } - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - if (self.startTime == nil) { - if (self.assetWriter.status != .writing) { + if self.startTime == nil { + if self.assetWriter.status != .writing { self.assetWriter.startWriting() } @@ -168,27 +166,26 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.startTime = currentSampleTime } - guard (assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { + guard assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo) else { return } - if (!assetWriterAudioInput.append(sampleBuffer)) { + if !assetWriterAudioInput.append(sampleBuffer) { print("Trouble appending audio sample buffer") } } } } - public extension Timestamp { - public init(_ time:CMTime) { + public init(_ time: CMTime) { self.value = time.value self.timescale = time.timescale - self.flags = TimestampFlags(rawValue:time.flags.rawValue) + self.flags = TimestampFlags(rawValue: time.flags.rawValue) self.epoch = time.epoch } - public var asCMTime:CMTime { + public var asCMTime: CMTime { get { return CMTimeMakeWithEpoch(value, timescale, epoch) } diff --git a/framework/Source/Mac/OpenGLContext.swift b/framework/Source/Mac/OpenGLContext.swift index fab790dc..9a45bb8e 100755 --- a/framework/Source/Mac/OpenGLContext.swift +++ b/framework/Source/Mac/OpenGLContext.swift @@ -4,47 +4,47 @@ import Cocoa // TODO: Figure out way to allow for multiple contexts for different GPUs public class OpenGLContext: SerialDispatch { - public lazy var framebufferCache:FramebufferCache = { - return FramebufferCache(context:self) + public lazy var framebufferCache: FramebufferCache = { + return FramebufferCache(context: self) }() - var shaderCache:[String:ShaderProgram] = [:] - public let standardImageVBO:GLuint - var textureVBOs:[Rotation:GLuint] = [:] + var shaderCache: [String: ShaderProgram] = [:] + public let standardImageVBO: GLuint + var textureVBOs: [Rotation: GLuint] = [:] - let context:NSOpenGLContext + let context: NSOpenGLContext - lazy var passthroughShader:ShaderProgram = { - return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + lazy var passthroughShader: ShaderProgram = { + return crashOnShaderCompileFailure("OpenGLContext") { return try self.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() - public let serialDispatchQueue:DispatchQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) + public let serialDispatchQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) public let dispatchQueueKey = DispatchSpecificKey() // MARK: - // MARK: Initialization and teardown init() { - serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:81) + serialDispatchQueue.setSpecific(key: dispatchQueueKey, value: 81) - let pixelFormatAttributes:[NSOpenGLPixelFormatAttribute] = [ + let pixelFormatAttributes: [NSOpenGLPixelFormatAttribute] = [ NSOpenGLPixelFormatAttribute(NSOpenGLPFADoubleBuffer), NSOpenGLPixelFormatAttribute(NSOpenGLPFAAccelerated), 0, 0 ] - guard let pixelFormat = NSOpenGLPixelFormat(attributes:pixelFormatAttributes) else { + guard let pixelFormat = NSOpenGLPixelFormat(attributes: pixelFormatAttributes) else { fatalError("No appropriate pixel format found when creating OpenGL context.") } // TODO: Take into account the sharegroup - guard let generatedContext = NSOpenGLContext(format:pixelFormat, share:nil) else { + guard let generatedContext = NSOpenGLContext(format: pixelFormat, share: nil) else { fatalError("Unable to create an OpenGL context. The GPUImage framework requires OpenGL support to work.") } self.context = generatedContext generatedContext.makeCurrentContext() - standardImageVBO = generateVBO(for:standardImageVertices) + standardImageVBO = generateVBO(for: standardImageVertices) generateTextureVBOs() glDisable(GLenum(GL_DEPTH_TEST)) @@ -65,25 +65,25 @@ public class OpenGLContext: SerialDispatch { // MARK: - // MARK: Device capabilities - public var maximumTextureSizeForThisDevice:GLint {get { return _maximumTextureSizeForThisDevice } } - private lazy var _maximumTextureSizeForThisDevice:GLint = { + public var maximumTextureSizeForThisDevice: GLint { get { return _maximumTextureSizeForThisDevice } } + private lazy var _maximumTextureSizeForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_SIZE) }() - public var maximumTextureUnitsForThisDevice:GLint {get { return _maximumTextureUnitsForThisDevice } } - private lazy var _maximumTextureUnitsForThisDevice:GLint = { + public var maximumTextureUnitsForThisDevice: GLint { get { return _maximumTextureUnitsForThisDevice } } + private lazy var _maximumTextureUnitsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_IMAGE_UNITS) }() - public var maximumVaryingVectorsForThisDevice:GLint {get { return _maximumVaryingVectorsForThisDevice } } - private lazy var _maximumVaryingVectorsForThisDevice:GLint = { + public var maximumVaryingVectorsForThisDevice: GLint { get { return _maximumVaryingVectorsForThisDevice } } + private lazy var _maximumVaryingVectorsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_VARYING_VECTORS) }() - lazy var extensionString:String = { - return self.runOperationSynchronously{ + lazy var extensionString: String = { + return self.runOperationSynchronously { self.makeCurrentContext() - return String(cString:unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to:UnsafePointer.self)) + return String(cString: unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to: UnsafePointer.self)) } }() } diff --git a/framework/Source/Mac/PictureInput.swift b/framework/Source/Mac/PictureInput.swift index fae3511d..bd5d045c 100755 --- a/framework/Source/Mac/PictureInput.swift +++ b/framework/Source/Mac/PictureInput.swift @@ -3,30 +3,30 @@ import Cocoa public class PictureInput: ImageSource { public let targets = TargetContainer() - var imageFramebuffer:Framebuffer! - var hasProcessedImage:Bool = false + var imageFramebuffer: Framebuffer! + var hasProcessedImage = false - public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { + public init(image: CGImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation = .portrait) { // TODO: Dispatch this whole thing asynchronously to move image loading off main thread let widthOfImage = GLint(image.width) let heightOfImage = GLint(image.height) // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. - guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } + guard (widthOfImage > 0) && (heightOfImage > 0) else { fatalError("Tried to pass in a zero-sized image") } var widthToUseForTexture = widthOfImage var heightToUseForTexture = heightOfImage var shouldRedrawUsingCoreGraphics = false // For now, deal with images larger than the maximum texture size by resizing to be within that limit - let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width:Float(widthOfImage), height:Float(heightOfImage)))) - if ((scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage)) { + let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width: Float(widthOfImage), height: Float(heightOfImage)))) + if (scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage) { widthToUseForTexture = scaledImageSizeToFitOnGPU.width heightToUseForTexture = scaledImageSizeToFitOnGPU.height shouldRedrawUsingCoreGraphics = true } - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill let powerClosestToWidth = ceil(log2(Float(widthToUseForTexture))) let powerClosestToHeight = ceil(log2(Float(heightToUseForTexture))) @@ -36,33 +36,32 @@ public class PictureInput: ImageSource { shouldRedrawUsingCoreGraphics = true } - var imageData:UnsafeMutablePointer! - var dataFromImageDataProvider:CFData! + var imageData: UnsafeMutablePointer! + var dataFromImageDataProvider: CFData! var format = GL_BGRA - if (!shouldRedrawUsingCoreGraphics) { + if !shouldRedrawUsingCoreGraphics { /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to * tell GL about the memory layout with GLES. */ - if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) - { + if (image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8) { shouldRedrawUsingCoreGraphics = true } else { /* Check that the bitmap pixel format is compatible with GL */ let bitmapInfo = image.bitmapInfo - if (bitmapInfo.contains(.floatComponents)) { + if bitmapInfo.contains(.floatComponents) { /* We don't support float components for use directly in GL */ shouldRedrawUsingCoreGraphics = true } else { - let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) - if (bitmapInfo.contains(.byteOrder32Little)) { + let alphaInfo = CGImageAlphaInfo(rawValue: bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) + if bitmapInfo.contains(.byteOrder32Little) { /* Little endian, for alpha-first we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { + if (alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst) { shouldRedrawUsingCoreGraphics = true } - } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { + } else if (bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big)) { /* Big endian, for alpha-last we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { + if (alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast) { shouldRedrawUsingCoreGraphics = true } else { /* Can access directly using GL_RGBA pixel format */ @@ -75,24 +74,24 @@ public class PictureInput: ImageSource { // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); - if (shouldRedrawUsingCoreGraphics) { + if shouldRedrawUsingCoreGraphics { // For resized or incompatible image: redraw - imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData = UnsafeMutablePointer.allocate(capacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() - let imageContext = CGContext(data:imageData, width:Int(widthToUseForTexture), height:Int(heightToUseForTexture), bitsPerComponent:8, bytesPerRow:Int(widthToUseForTexture) * 4, space:genericRGBColorspace, bitmapInfo:CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) + let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html - imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) + imageContext?.draw(image, in: CGRect(x: 0.0, y: 0.0, width: CGFloat(widthToUseForTexture), height: CGFloat(heightToUseForTexture))) } else { // Access the raw image bytes directly dataFromImageDataProvider = image.dataProvider?.data - imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)!) + imageData = UnsafeMutablePointer(mutating: CFDataGetBytePtr(dataFromImageDataProvider)!) } sharedImageProcessingContext.makeCurrentContext() do { - imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) + imageFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: orientation, size: GLSize(width: widthToUseForTexture, height: heightToUseForTexture), textureOnly: true) imageFramebuffer.timingStyle = .stillImage } catch { fatalError("ERROR: Unable to initialize framebuffer of size (\(widthToUseForTexture), \(heightToUseForTexture)) with error: \(error)") @@ -100,40 +99,40 @@ public class PictureInput: ImageSource { glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), imageFramebuffer.texture) - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR_MIPMAP_LINEAR) } glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, widthToUseForTexture, heightToUseForTexture, 0, GLenum(format), GLenum(GL_UNSIGNED_BYTE), imageData) - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { glGenerateMipmap(GLenum(GL_TEXTURE_2D)) } glBindTexture(GLenum(GL_TEXTURE_2D), 0) - if (shouldRedrawUsingCoreGraphics) { - imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + if shouldRedrawUsingCoreGraphics { + imageData.deallocate(capacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) } } - public convenience init(image:NSImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - self.init(image:image.cgImage(forProposedRect:nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(image: NSImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation = .portrait) { + self.init(image: image.cgImage(forProposedRect: nil, context: nil, hints: nil)!, smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation) } - public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - guard let image = NSImage(named:imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } - self.init(image:image.cgImage(forProposedRect:nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(imageName: String, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation = .portrait) { + guard let image = NSImage(named: imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } + self.init(image: image.cgImage(forProposedRect: nil, context: nil, hints: nil)!, smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation) } - public func processImage(synchronously:Bool = false) { + public func processImage(synchronously: Bool = false) { if synchronously { - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { sharedImageProcessingContext.makeCurrentContext() self.updateTargetsWithFramebuffer(self.imageFramebuffer) self.hasProcessedImage = true } } else { - sharedImageProcessingContext.runOperationAsynchronously{ + sharedImageProcessingContext.runOperationAsynchronously { sharedImageProcessingContext.makeCurrentContext() self.updateTargetsWithFramebuffer(self.imageFramebuffer) self.hasProcessedImage = true @@ -141,10 +140,10 @@ public class PictureInput: ImageSource { } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { if hasProcessedImage { imageFramebuffer.lock() - target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) + target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex: atIndex) } } } diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index 59232ce4..138e6122 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -7,14 +7,14 @@ public enum PictureFileFormat { } public class PictureOutput: ImageConsumer { - public var encodedImageAvailableCallback:((Data) -> ())? - public var encodedImageFormat:PictureFileFormat = .png - public var imageAvailableCallback:((NSImage) -> ())? - public var onlyCaptureNextFrame:Bool = true + public var encodedImageAvailableCallback: ((Data) -> Void)? + public var encodedImageFormat: PictureFileFormat = .png + public var imageAvailableCallback: ((NSImage) -> Void)? + public var onlyCaptureNextFrame = true public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - var url:URL! + public let maximumInputs: UInt = 1 + var url: URL! public init() { } @@ -22,14 +22,14 @@ public class PictureOutput: ImageConsumer { deinit { } - public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url: URL, format: PictureFileFormat) { onlyCaptureNextFrame = true encodedImageFormat = format self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured encodedImageAvailableCallback = {imageData in do { // FIXME: Xcode 8 beta 2 - try imageData.write(to: self.url, options:.atomic) + try imageData.write(to: self.url, options: .atomic) // try imageData.write(to: self.url, options:NSData.WritingOptions.dataWritingAtomic) } catch { // TODO: Handle this better @@ -39,8 +39,8 @@ public class PictureOutput: ImageConsumer { } // TODO: Replace with texture caches and a safer capture routine - func cgImageFromFramebuffer(_ framebuffer:Framebuffer) -> CGImage { - let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) + func cgImageFromFramebuffer(_ framebuffer: Framebuffer) -> CGImage { + let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: framebuffer.orientation, size: framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.transparent) @@ -48,26 +48,26 @@ public class PictureOutput: ImageConsumer { // Need the blending here to enable non-1.0 alpha on output image enableAdditiveBlending() - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)]) disableBlending() framebuffer.unlock() let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) - let data = UnsafeMutablePointer.allocate(capacity:imageByteSize) + let data = UnsafeMutablePointer.allocate(capacity: imageByteSize) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) renderFramebuffer.unlock() - guard let dataProvider = CGDataProvider(dataInfo: nil, data: data, size: imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not create CGDataProvider")} + guard let dataProvider = CGDataProvider(dataInfo: nil, data: data, size: imageByteSize, releaseData: dataProviderReleaseCallback) else { fatalError("Could not create CGDataProvider") } let defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB() - return CGImage(width: Int(framebuffer.size.width), height: Int(framebuffer.size.height), bitsPerComponent:8, bitsPerPixel:32, bytesPerRow:4 * Int(framebuffer.size.width), space:defaultRGBColorSpace, bitmapInfo:CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider:dataProvider, decode:nil, shouldInterpolate:false, intent:.defaultIntent)! + return CGImage(width: Int(framebuffer.size.width), height: Int(framebuffer.size.height), bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: 4 * Int(framebuffer.size.width), space: defaultRGBColorSpace, bitmapInfo: CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider: dataProvider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)! } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { if let imageCallback = imageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) - let image = NSImage(cgImage:cgImageFromBytes, size:NSZeroSize) + let image = NSImage(cgImage: cgImageFromBytes, size: NSSize.zero) imageCallback(image) @@ -78,11 +78,11 @@ public class PictureOutput: ImageConsumer { if let imageCallback = encodedImageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) - let bitmapRepresentation = NSBitmapImageRep(cgImage:cgImageFromBytes) - let imageData:Data + let bitmapRepresentation = NSBitmapImageRep(cgImage: cgImageFromBytes) + let imageData: Data switch encodedImageFormat { - case .png: imageData = bitmapRepresentation.representation(using: .PNG, properties: ["":""])! - case .jpeg: imageData = bitmapRepresentation.representation(using: .JPEG, properties: ["":""])! + case .png: imageData = bitmapRepresentation.representation(using: .PNG, properties: ["": ""])! + case .jpeg: imageData = bitmapRepresentation.representation(using: .JPEG, properties: ["": ""])! } imageCallback(imageData) @@ -95,37 +95,37 @@ public class PictureOutput: ImageConsumer { } public extension ImageSource { - public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url: URL, format: PictureFileFormat) { let pictureOutput = PictureOutput() - pictureOutput.saveNextFrameToURL(url, format:format) + pictureOutput.saveNextFrameToURL(url, format: format) self --> pictureOutput } } public extension NSImage { - public func filterWithOperation(_ operation:T) -> NSImage { - return filterWithPipeline{input, output in + public func filterWithOperation(_ operation: T) -> NSImage { + return filterWithPipeline {input, output in input --> operation --> output } } - public func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) -> NSImage { - let picture = PictureInput(image:self) - var outputImage:NSImage? + public func filterWithPipeline(_ pipeline: (PictureInput, PictureOutput) -> Void) -> NSImage { + let picture = PictureInput(image: self) + var outputImage: NSImage? let pictureOutput = PictureOutput() pictureOutput.onlyCaptureNextFrame = true pictureOutput.imageAvailableCallback = {image in outputImage = image } pipeline(picture, pictureOutput) - picture.processImage(synchronously:true) + picture.processImage(synchronously: true) return outputImage! } } // Why are these flipped in the callback definition? -func dataProviderReleaseCallback(_ context:UnsafeMutableRawPointer?, data:UnsafeRawPointer, size:Int) { +func dataProviderReleaseCallback(_ context: UnsafeMutableRawPointer?, data: UnsafeRawPointer, size: Int) { // UnsafeMutablePointer(data).deallocate(capacity:size) // FIXME: Verify this is correct - data.deallocate(bytes:size, alignedTo:1) + data.deallocate(bytes: size, alignedTo: 1) } diff --git a/framework/Source/Mac/RenderView.swift b/framework/Source/Mac/RenderView.swift index ddcb502f..d48d1d6e 100755 --- a/framework/Source/Mac/RenderView.swift +++ b/framework/Source/Mac/RenderView.swift @@ -1,13 +1,13 @@ import Cocoa -public class RenderView:NSOpenGLView, ImageConsumer { +public class RenderView: NSOpenGLView, ImageConsumer { public var backgroundColor = Color.black public var fillMode = FillMode.preserveAspectRatio - public var sizeInPixels:Size { get { return Size(width:Float(self.frame.size.width), height:Float(self.frame.size.width)) } } + public var sizeInPixels: Size { get { return Size(width: Float(self.frame.size.width), height: Float(self.frame.size.width)) } } public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - private lazy var displayShader:ShaderProgram = { + public let maximumInputs: UInt = 1 + private lazy var displayShader: ShaderProgram = { sharedImageProcessingContext.makeCurrentContext() self.openGLContext = sharedImageProcessingContext.context return sharedImageProcessingContext.passthroughShader @@ -15,18 +15,18 @@ public class RenderView:NSOpenGLView, ImageConsumer { // TODO: Need to set viewport to appropriate size, resize viewport on view reshape - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), 0) - let viewSize = GLSize(width:GLint(round(self.bounds.size.width)), height:GLint(round(self.bounds.size.height))) + let viewSize = GLSize(width: GLint(round(self.bounds.size.width)), height: GLint(round(self.bounds.size.height))) glViewport(0, 0, viewSize.width, viewSize.height) clearFramebufferWithColor(backgroundColor) // TODO: Cache these scaled vertices - let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(.portrait), toFitSize:viewSize) - renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(.portrait)]) + let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize: framebuffer.sizeForTargetOrientation(.portrait), toFitSize: viewSize) + renderQuadWithShader(self.displayShader, vertices: scaledVertices, inputTextures: [framebuffer.texturePropertiesForTargetOrientation(.portrait)]) sharedImageProcessingContext.presentBufferForDisplay() framebuffer.unlock() diff --git a/framework/Source/Matrix.swift b/framework/Source/Matrix.swift index 475fff5b..199d2acc 100644 --- a/framework/Source/Matrix.swift +++ b/framework/Source/Matrix.swift @@ -3,12 +3,12 @@ import QuartzCore #endif public struct Matrix4x4 { - public let m11:Float, m12:Float, m13:Float, m14:Float - public let m21:Float, m22:Float, m23:Float, m24:Float - public let m31:Float, m32:Float, m33:Float, m34:Float - public let m41:Float, m42:Float, m43:Float, m44:Float + public let m11: Float, m12: Float, m13: Float, m14: Float + public let m21: Float, m22: Float, m23: Float, m24: Float + public let m31: Float, m32: Float, m33: Float, m34: Float + public let m41: Float, m42: Float, m43: Float, m44: Float - public init(rowMajorValues:[Float]) { + public init(rowMajorValues: [Float]) { guard rowMajorValues.count > 15 else { fatalError("Tried to initialize a 4x4 matrix with fewer than 16 values") } self.m11 = rowMajorValues[0] @@ -32,18 +32,18 @@ public struct Matrix4x4 { self.m44 = rowMajorValues[15] } - public static let identity = Matrix4x4(rowMajorValues:[1.0, 0.0, 0.0, 0.0, + public static let identity = Matrix4x4(rowMajorValues: [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0]) } public struct Matrix3x3 { - public let m11:Float, m12:Float, m13:Float - public let m21:Float, m22:Float, m23:Float - public let m31:Float, m32:Float, m33:Float + public let m11: Float, m12: Float, m13: Float + public let m21: Float, m22: Float, m23: Float + public let m31: Float, m32: Float, m33: Float - public init(rowMajorValues:[Float]) { + public init(rowMajorValues: [Float]) { guard rowMajorValues.count > 8 else { fatalError("Tried to initialize a 3x3 matrix with fewer than 9 values") } self.m11 = rowMajorValues[0] @@ -59,16 +59,16 @@ public struct Matrix3x3 { self.m33 = rowMajorValues[8] } - public static let identity = Matrix3x3(rowMajorValues:[1.0, 0.0, 0.0, + public static let identity = Matrix3x3(rowMajorValues: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]) - public static let centerOnly = Matrix3x3(rowMajorValues:[0.0, 0.0, 0.0, + public static let centerOnly = Matrix3x3(rowMajorValues: [0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]) } -func orthographicMatrix(_ left:Float, right:Float, bottom:Float, top:Float, near:Float, far:Float, anchorTopLeft:Bool = false) -> Matrix4x4 { +func orthographicMatrix(_ left: Float, right: Float, bottom: Float, top: Float, near: Float, far: Float, anchorTopLeft: Bool = false) -> Matrix4x4 { let r_l = right - left let t_b = top - bottom let f_n = far - near @@ -76,8 +76,8 @@ func orthographicMatrix(_ left:Float, right:Float, bottom:Float, top:Float, near var ty = -(top + bottom) / (top - bottom) let tz = -(far + near) / (far - near) - let scale:Float - if (anchorTopLeft) { + let scale: Float + if anchorTopLeft { scale = 4.0 tx = -1.0 ty = -1.0 @@ -85,17 +85,16 @@ func orthographicMatrix(_ left:Float, right:Float, bottom:Float, top:Float, near scale = 2.0 } - return Matrix4x4(rowMajorValues:[ + return Matrix4x4(rowMajorValues: [ scale / r_l, 0.0, 0.0, tx, 0.0, scale / t_b, 0.0, ty, 0.0, 0.0, scale / f_n, tz, 0.0, 0.0, 0.0, 1.0]) } - #if !os(Linux) public extension Matrix4x4 { - public init (_ transform3D:CATransform3D) { + init (_ transform3D: CATransform3D) { self.m11 = Float(transform3D.m11) self.m12 = Float(transform3D.m12) self.m13 = Float(transform3D.m13) @@ -117,7 +116,7 @@ public extension Matrix4x4 { self.m44 = Float(transform3D.m44) } - public init (_ transform:CGAffineTransform) { + init (_ transform: CGAffineTransform) { self.init(CATransform3DMakeAffineTransform(transform)) } } diff --git a/framework/Source/NSObject+Exception.h b/framework/Source/NSObject+Exception.h new file mode 100644 index 00000000..bb0bf010 --- /dev/null +++ b/framework/Source/NSObject+Exception.h @@ -0,0 +1,14 @@ +// +// NSObject+Exception.h +// GPUImage2 +// +// Created by Josh Bernfeld on 11/23/17. +// + +#import + +@interface NSObject (Exception) + ++ (BOOL)catchException:(void(^)(void))tryBlock error:(__autoreleasing NSError **)error; + +@end diff --git a/framework/Source/NSObject+Exception.m b/framework/Source/NSObject+Exception.m new file mode 100644 index 00000000..ed6d3711 --- /dev/null +++ b/framework/Source/NSObject+Exception.m @@ -0,0 +1,24 @@ +// +// NSObject+Exception.m +// GPUImage2 +// +// Created by Josh Bernfeld on 11/23/17. +// +// Source: https://stackoverflow.com/a/36454808/1275014 + +#import "NSObject+Exception.h" + +@implementation NSObject (Exception) + ++ (BOOL)catchException:(void(^)(void))tryBlock error:(__autoreleasing NSError **)error { + @try { + tryBlock(); + return YES; + } + @catch (NSException *exception) { + *error = [[NSError alloc] initWithDomain:exception.name code:0 userInfo:exception.userInfo]; + return NO; + } +} + +@end diff --git a/framework/Source/OpenGLContext_Shared.swift b/framework/Source/OpenGLContext_Shared.swift index 20473a30..a68dd1dd 100755 --- a/framework/Source/OpenGLContext_Shared.swift +++ b/framework/Source/OpenGLContext_Shared.swift @@ -1,4 +1,3 @@ - #if os(Linux) #if GLES import COpenGLES.gles2 @@ -18,37 +17,39 @@ import Foundation public let sharedImageProcessingContext = OpenGLContext() extension OpenGLContext { - public func programForVertexShader(_ vertexShader:String, fragmentShader:String) throws -> ShaderProgram { - let lookupKeyForShaderProgram = "V: \(vertexShader) - F: \(fragmentShader)" - if let shaderFromCache = shaderCache[lookupKeyForShaderProgram] { - return shaderFromCache - } else { - return try sharedImageProcessingContext.runOperationSynchronously{ - let program = try ShaderProgram(vertexShader:vertexShader, fragmentShader:fragmentShader) + public func programForVertexShader(_ vertexShader: String, fragmentShader: String) throws -> ShaderProgram { + return try self.runOperationSynchronously { + let lookupKeyForShaderProgram = "V: \(vertexShader) - F: \(fragmentShader)" + if let shaderFromCache = shaderCache[lookupKeyForShaderProgram] { +// debugPrint("load from cache: \(lookupKeyForShaderProgram)") + return shaderFromCache + } else { + let program = try ShaderProgram(vertexShader: vertexShader, fragmentShader: fragmentShader) self.shaderCache[lookupKeyForShaderProgram] = program +// debugPrint("create cache: \(lookupKeyForShaderProgram)") return program } } } - public func programForVertexShader(_ vertexShader:String, fragmentShader:URL) throws -> ShaderProgram { - return try programForVertexShader(vertexShader, fragmentShader:try shaderFromFile(fragmentShader)) + public func programForVertexShader(_ vertexShader: String, fragmentShader: URL) throws -> ShaderProgram { + return try programForVertexShader(vertexShader, fragmentShader: try shaderFromFile(fragmentShader)) } - public func programForVertexShader(_ vertexShader:URL, fragmentShader:URL) throws -> ShaderProgram { - return try programForVertexShader(try shaderFromFile(vertexShader), fragmentShader:try shaderFromFile(fragmentShader)) + public func programForVertexShader(_ vertexShader: URL, fragmentShader: URL) throws -> ShaderProgram { + return try programForVertexShader(try shaderFromFile(vertexShader), fragmentShader: try shaderFromFile(fragmentShader)) } - public func openGLDeviceSettingForOption(_ option:Int32) -> GLint { - return self.runOperationSynchronously{() -> GLint in + public func openGLDeviceSettingForOption(_ option: Int32) -> GLint { + return self.runOperationSynchronously {() -> GLint in self.makeCurrentContext() - var openGLValue:GLint = 0 + var openGLValue: GLint = 0 glGetIntegerv(GLenum(option), &openGLValue) return openGLValue } } - public func deviceSupportsExtension(_ openGLExtension:String) -> Bool { + public func deviceSupportsExtension(_ openGLExtension: String) -> Bool { #if os(Linux) return false #else @@ -66,41 +67,49 @@ extension OpenGLContext { return deviceSupportsExtension("GL_EXT_shader_framebuffer_fetch") } - public func sizeThatFitsWithinATextureForSize(_ size:Size) -> Size { + public func sizeThatFitsWithinATextureForSize(_ size: Size) -> Size { let maxTextureSize = Float(self.maximumTextureSizeForThisDevice) - if ( (size.width < maxTextureSize) && (size.height < maxTextureSize) ) { + if (size.width < maxTextureSize) && (size.height < maxTextureSize) { return size } - let adjustedSize:Size - if (size.width > size.height) { - adjustedSize = Size(width:maxTextureSize, height:(maxTextureSize / size.width) * size.height) + let adjustedSize: Size + if size.width > size.height { + adjustedSize = Size(width: maxTextureSize, height: (maxTextureSize / size.width) * size.height) } else { - adjustedSize = Size(width:(maxTextureSize / size.height) * size.width, height:maxTextureSize) + adjustedSize = Size(width: (maxTextureSize / size.height) * size.width, height: maxTextureSize) } return adjustedSize } func generateTextureVBOs() { - textureVBOs[.noRotation] = generateVBO(for:Rotation.noRotation.textureCoordinates()) - textureVBOs[.rotateCounterclockwise] = generateVBO(for:Rotation.rotateCounterclockwise.textureCoordinates()) - textureVBOs[.rotateClockwise] = generateVBO(for:Rotation.rotateClockwise.textureCoordinates()) - textureVBOs[.rotate180] = generateVBO(for:Rotation.rotate180.textureCoordinates()) - textureVBOs[.flipHorizontally] = generateVBO(for:Rotation.flipHorizontally.textureCoordinates()) - textureVBOs[.flipVertically] = generateVBO(for:Rotation.flipVertically.textureCoordinates()) - textureVBOs[.rotateClockwiseAndFlipVertically] = generateVBO(for:Rotation.rotateClockwiseAndFlipVertically.textureCoordinates()) - textureVBOs[.rotateClockwiseAndFlipHorizontally] = generateVBO(for:Rotation.rotateClockwiseAndFlipHorizontally.textureCoordinates()) + textureVBOs[.noRotation] = generateVBO(for: Rotation.noRotation.textureCoordinates()) + textureVBOs[.rotateCounterclockwise] = generateVBO(for: Rotation.rotateCounterclockwise.textureCoordinates()) + textureVBOs[.rotateClockwise] = generateVBO(for: Rotation.rotateClockwise.textureCoordinates()) + textureVBOs[.rotate180] = generateVBO(for: Rotation.rotate180.textureCoordinates()) + textureVBOs[.flipHorizontally] = generateVBO(for: Rotation.flipHorizontally.textureCoordinates()) + textureVBOs[.flipVertically] = generateVBO(for: Rotation.flipVertically.textureCoordinates()) + textureVBOs[.rotateClockwiseAndFlipVertically] = generateVBO(for: Rotation.rotateClockwiseAndFlipVertically.textureCoordinates()) + textureVBOs[.rotateClockwiseAndFlipHorizontally] = generateVBO(for: Rotation.rotateClockwiseAndFlipHorizontally.textureCoordinates()) } - public func textureVBO(for rotation:Rotation) -> GLuint { - guard let textureVBO = textureVBOs[rotation] else {fatalError("GPUImage doesn't have a texture VBO set for the rotation \(rotation)") } + public func textureVBO(for rotation: Rotation) -> GLuint { + guard let textureVBO = textureVBOs[rotation] else { fatalError("GPUImage doesn't have a texture VBO set for the rotation \(rotation)") } return textureVBO } } -@_semantics("sil.optimize.never") public func debugPrint(_ stringToPrint:String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { +public var GPUImageLogger: (String, StaticString, UInt, StaticString) -> Void = { stringToPrint, file, line, function in + Swift.print("\(stringToPrint) --> \((String(describing: file) as NSString).lastPathComponent): \(function): \(line)") +} + +@_semantics("sil.optimize.never") public func debugPrint(_ stringToPrint: String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { #if DEBUG - print("\(stringToPrint) --> \((String(describing:file) as NSString).lastPathComponent): \(function): \(line)") + print("[GPUImage] " + stringToPrint, file: file, line: line, function: function) #endif } + +@_semantics("sil.optimize.never") public func print(_ stringToPrint: String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { + GPUImageLogger(stringToPrint, file, line, function) +} diff --git a/framework/Source/OpenGLRendering.swift b/framework/Source/OpenGLRendering.swift index 020b0b12..aeb9ff87 100755 --- a/framework/Source/OpenGLRendering.swift +++ b/framework/Source/OpenGLRendering.swift @@ -23,10 +23,10 @@ public enum InputTextureStorageFormat { } public struct InputTextureProperties { - public let textureStorage:InputTextureStorageFormat - public let texture:GLuint + public let textureStorage: InputTextureStorageFormat + public let texture: GLuint - public init(textureCoordinates:[GLfloat]? = nil, textureVBO:GLuint? = nil, texture:GLuint) { + public init(textureCoordinates: [GLfloat]? = nil, textureVBO: GLuint? = nil, texture: GLuint) { self.texture = texture switch (textureCoordinates, textureVBO) { case let (.some(coordinates), .none): self.textureStorage = .textureCoordinates(coordinates) @@ -38,32 +38,36 @@ public struct InputTextureProperties { } public struct GLSize { - public let width:GLint - public let height:GLint + public let width: GLint + public let height: GLint - public init(width:GLint, height:GLint) { + public init(width: GLint, height: GLint) { self.width = width self.height = height } - public init(_ size:Size) { + public init(_ size: Size) { self.width = size.glWidth() self.height = size.glHeight() } + + #if DEBUG + public var debugRenderInfo: String { "\(width)x\(height)" } + #endif } extension Size { - init(_ size:GLSize) { + init(_ size: GLSize) { self.width = Float(size.width) self.height = Float(size.height) } } -public let standardImageVertices:[GLfloat] = [-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0] -public let verticallyInvertedImageVertices:[GLfloat] = [-1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0] +public let standardImageVertices: [GLfloat] = [-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0] +public let verticallyInvertedImageVertices: [GLfloat] = [-1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0] // "position" and "inputTextureCoordinate", "inputTextureCoordinate2" attribute naming follows the convention of the old GPUImage -public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderUniformSettings? = nil, vertices:[GLfloat]? = nil, vertexBufferObject:GLuint? = nil, inputTextures:[InputTextureProperties]) { +public func renderQuadWithShader(_ shader: ShaderProgram, uniformSettings: ShaderUniformSettings? = nil, vertices: [GLfloat]? = nil, vertexBufferObject: GLuint? = nil, inputTextures: [InputTextureProperties], context: OpenGLContext = sharedImageProcessingContext) { switch (vertices, vertexBufferObject) { case (.none, .some): break case (.some, .none): break @@ -71,13 +75,12 @@ public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderU case (.none, .none): fatalError("Can't specify both vertices and a VBO in renderQuadWithShader()") } - sharedImageProcessingContext.makeCurrentContext() + context.makeCurrentContext() shader.use() uniformSettings?.restoreShaderSettings(shader) guard let positionAttribute = shader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } - if let boundVBO = vertexBufferObject { glBindBuffer(GLenum(GL_ARRAY_BUFFER), boundVBO) glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, nil) @@ -96,19 +99,23 @@ public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderU glVertexAttribPointer(textureCoordinateAttribute, 2, GLenum(GL_FLOAT), 0, 0, nil) glBindBuffer(GLenum(GL_ARRAY_BUFFER), 0) } - } else if (index == 0) { + } else if index == 0 { fatalError("The required attribute named inputTextureCoordinate was missing from the shader program during rendering.") } glActiveTexture(textureUnitForIndex(index)) glBindTexture(GLenum(GL_TEXTURE_2D), inputTexture.texture) - shader.setValue(GLint(index), forUniform:"inputImageTexture".withNonZeroSuffix(index)) + shader.setValue(GLint(index), forUniform: "inputImageTexture".withNonZeroSuffix(index)) + } + + if let initTime = shader.initTime { + shader.setValue(GLfloat(CACurrentMediaTime() - initTime), forUniform: "inputTime") } glDrawArrays(GLenum(GL_TRIANGLE_STRIP), 0, 4) - if (vertexBufferObject != nil) { + if vertexBufferObject != nil { glBindBuffer(GLenum(GL_ARRAY_BUFFER), 0) } @@ -118,16 +125,16 @@ public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderU } } -public func clearFramebufferWithColor(_ color:Color) { +public func clearFramebufferWithColor(_ color: Color) { glClearColor(GLfloat(color.redComponent), GLfloat(color.greenComponent), GLfloat(color.blueComponent), GLfloat(color.alphaComponent)) glClear(GLenum(GL_COLOR_BUFFER_BIT)) } -func renderStencilMaskFromFramebuffer(_ framebuffer:Framebuffer) { +func renderStencilMaskFromFramebuffer(_ framebuffer: Framebuffer) { let inputTextureProperties = framebuffer.texturePropertiesForOutputRotation(.noRotation) glEnable(GLenum(GL_STENCIL_TEST)) glClearStencil(0) - glClear (GLenum(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT)) + glClear(GLenum(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT)) glColorMask(GLboolean(GL_FALSE), GLboolean(GL_FALSE), GLboolean(GL_FALSE), GLboolean(GL_FALSE)) glStencilFunc(GLenum(GL_ALWAYS), 1, 1) glStencilOp(GLenum(GL_KEEP), GLenum(GL_KEEP), GLenum(GL_REPLACE)) @@ -135,10 +142,10 @@ func renderStencilMaskFromFramebuffer(_ framebuffer:Framebuffer) { #if GL glEnable(GLenum(GL_ALPHA_TEST)) glAlphaFunc(GLenum(GL_NOTEQUAL), 0.0) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, vertices:standardImageVertices, inputTextures:[inputTextureProperties]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, vertices: standardImageVertices, inputTextures: [inputTextureProperties]) #else - let alphaTestShader = crashOnShaderCompileFailure("Stencil"){return try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader:AlphaTestFragmentShader)} - renderQuadWithShader(alphaTestShader, vertices:standardImageVertices, inputTextures:[inputTextureProperties]) + let alphaTestShader = crashOnShaderCompileFailure("Stencil") { return try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader: AlphaTestFragmentShader) } + renderQuadWithShader(alphaTestShader, vertices: standardImageVertices, inputTextures: [inputTextureProperties]) #endif glColorMask(GLboolean(GL_TRUE), GLboolean(GL_TRUE), GLboolean(GL_TRUE), GLboolean(GL_TRUE)) @@ -155,7 +162,7 @@ func disableStencil() { glDisable(GLenum(GL_STENCIL_TEST)) } -func textureUnitForIndex(_ index:Int) -> GLenum { +func textureUnitForIndex(_ index: Int) -> GLenum { switch index { case 0: return GLenum(GL_TEXTURE0) case 1: return GLenum(GL_TEXTURE1) @@ -170,8 +177,8 @@ func textureUnitForIndex(_ index:Int) -> GLenum { } } -public func generateTexture(minFilter:Int32, magFilter:Int32, wrapS:Int32, wrapT:Int32) -> GLuint { - var texture:GLuint = 0 +public func generateTexture(minFilter: Int32, magFilter: Int32, wrapS: Int32, wrapT: Int32) -> GLuint { + var texture: GLuint = 0 glActiveTexture(GLenum(GL_TEXTURE1)) glGenTextures(1, &texture) @@ -186,15 +193,15 @@ public func generateTexture(minFilter:Int32, magFilter:Int32, wrapS:Int32, wrapT return texture } -public func uploadLocalArray(data:[GLfloat], into texture:GLuint, size:GLSize) { +public func uploadLocalArray(data: [GLfloat], into texture: GLuint, size: GLSize) { glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, size.width, size.height, 0, GLenum(GL_RGBA), GLenum(GL_FLOAT), data) glBindTexture(GLenum(GL_TEXTURE_2D), 0) } -func generateFramebufferForTexture(_ texture:GLuint, width:GLint, height:GLint, internalFormat:Int32, format:Int32, type:Int32, stencil:Bool) throws -> (GLuint, GLuint?) { - var framebuffer:GLuint = 0 +func generateFramebufferForTexture(_ texture: GLuint, width: GLint, height: GLint, internalFormat: Int32, format: Int32, type: Int32, stencil: Bool) throws -> (GLuint, GLuint?) { + var framebuffer: GLuint = 0 glActiveTexture(GLenum(GL_TEXTURE1)) glGenFramebuffers(1, &framebuffer) @@ -205,13 +212,13 @@ func generateFramebufferForTexture(_ texture:GLuint, width:GLint, height:GLint, glFramebufferTexture2D(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_TEXTURE_2D), texture, 0) let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) - if (status != GLenum(GL_FRAMEBUFFER_COMPLETE)) { - throw FramebufferCreationError(errorCode:status) + if status != GLenum(GL_FRAMEBUFFER_COMPLETE) { + throw FramebufferCreationError(errorCode: status) } - let stencilBuffer:GLuint? + let stencilBuffer: GLuint? if stencil { - stencilBuffer = try attachStencilBuffer(width:width, height:height) + stencilBuffer = try attachStencilBuffer(width: width, height: height) } else { stencilBuffer = nil } @@ -221,9 +228,9 @@ func generateFramebufferForTexture(_ texture:GLuint, width:GLint, height:GLint, return (framebuffer, stencilBuffer) } -func attachStencilBuffer(width:GLint, height:GLint) throws -> GLuint { - var stencilBuffer:GLuint = 0 - glGenRenderbuffers(1, &stencilBuffer); +func attachStencilBuffer(width: GLint, height: GLint) throws -> GLuint { + var stencilBuffer: GLuint = 0 + glGenRenderbuffers(1, &stencilBuffer) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), stencilBuffer) glRenderbufferStorage(GLenum(GL_RENDERBUFFER), GLenum(GL_DEPTH24_STENCIL8), width, height) // iOS seems to only support combination depth + stencil, from references #if os(iOS) @@ -234,8 +241,8 @@ func attachStencilBuffer(width:GLint, height:GLint) throws -> GLuint { glBindRenderbuffer(GLenum(GL_RENDERBUFFER), 0) let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) - if (status != GLenum(GL_FRAMEBUFFER_COMPLETE)) { - throw FramebufferCreationError(errorCode:status) + if status != GLenum(GL_FRAMEBUFFER_COMPLETE) { + throw FramebufferCreationError(errorCode: status) } return stencilBuffer @@ -251,8 +258,8 @@ public func disableBlending() { glDisable(GLenum(GL_BLEND)) } -public func generateVBO(for vertices:[GLfloat]) -> GLuint { - var newBuffer:GLuint = 0 +public func generateVBO(for vertices: [GLfloat]) -> GLuint { + var newBuffer: GLuint = 0 glGenBuffers(1, &newBuffer) glBindBuffer(GLenum(GL_ARRAY_BUFFER), newBuffer) glBufferData(GLenum(GL_ARRAY_BUFFER), MemoryLayout.size * vertices.count, vertices, GLenum(GL_STATIC_DRAW)) @@ -260,13 +267,13 @@ public func generateVBO(for vertices:[GLfloat]) -> GLuint { return newBuffer } -public func deleteVBO(_ vbo:GLuint) { +public func deleteVBO(_ vbo: GLuint) { var deletedVBO = vbo glDeleteBuffers(1, &deletedVBO) } extension String { - func withNonZeroSuffix(_ suffix:Int) -> String { + func withNonZeroSuffix(_ suffix: Int) -> String { if suffix == 0 { return self } else { @@ -274,12 +281,11 @@ extension String { } } - func withGLChar(_ operation:(UnsafePointer) -> ()) { - if let value = self.cString(using:String.Encoding.utf8) { + func withGLChar(_ operation: (UnsafePointer) -> Void) { + if let value = self.cString(using: String.Encoding.utf8) { operation(UnsafePointer(value)) } else { fatalError("Could not convert this string to UTF8: \(self)") } } } - diff --git a/framework/Source/OperationGroup.swift b/framework/Source/OperationGroup.swift index 8e6f5675..edfa5af0 100644 --- a/framework/Source/OperationGroup.swift +++ b/framework/Source/OperationGroup.swift @@ -1,23 +1,34 @@ open class OperationGroup: ImageProcessingOperation { - let inputImageRelay = ImageRelay() - let outputImageRelay = ImageRelay() + public let inputImageRelay = ImageRelay() + public let outputImageRelay = ImageRelay() - public var sources:SourceContainer { get { return inputImageRelay.sources } } - public var targets:TargetContainer { get { return outputImageRelay.targets } } - public let maximumInputs:UInt = 1 + public var sources: SourceContainer { get { return inputImageRelay.sources } } + public var targets: TargetContainer { get { return outputImageRelay.targets } } + public let maximumInputs: UInt = 1 + + #if DEBUG + public var debugRenderInfo: String = "" + + public func debugGetOnePassRenderInfos() -> String { + return """ +{ OperationGroup: \(Self.self), subfilters: \(inputImageRelay.debugGetOnePassRenderInfos()) +} +""" + } + #endif public init() { } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - inputImageRelay.newFramebufferAvailable(framebuffer, fromSourceIndex:fromSourceIndex) + open func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + inputImageRelay.newFramebufferAvailable(framebuffer, fromSourceIndex: fromSourceIndex) } - public func configureGroup(_ configurationOperation:(_ input:ImageRelay, _ output:ImageRelay) -> ()) { + public func configureGroup(_ configurationOperation:(_ input: ImageRelay, _ output: ImageRelay) -> Void) { configurationOperation(inputImageRelay, outputImageRelay) } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { - outputImageRelay.transmitPreviousImage(to:target, atIndex:atIndex) + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { + outputImageRelay.transmitPreviousImage(to: target, atIndex: atIndex) } } diff --git a/framework/Source/Operations/AdaptiveThreshold.swift b/framework/Source/Operations/AdaptiveThreshold.swift index 2bac7c9d..827a835a 100644 --- a/framework/Source/Operations/AdaptiveThreshold.swift +++ b/framework/Source/Operations/AdaptiveThreshold.swift @@ -3,15 +3,15 @@ public class AdaptiveThreshold: OperationGroup { let luminance = Luminance() let boxBlur = BoxBlur() - let adaptiveThreshold = BasicOperation(fragmentShader:AdaptiveThresholdFragmentShader, numberOfInputs:2) + let adaptiveThreshold = BasicOperation(fragmentShader: AdaptiveThresholdFragmentShader, numberOfInputs: 2) public override init() { blurRadiusInPixels = 4.0 super.init() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.luminance --> self.boxBlur --> self.adaptiveThreshold --> output self.luminance --> self.adaptiveThreshold } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/AddBlend.swift b/framework/Source/Operations/AddBlend.swift index 329537a9..dd06f470 100644 --- a/framework/Source/Operations/AddBlend.swift +++ b/framework/Source/Operations/AddBlend.swift @@ -1,6 +1,5 @@ public class AddBlend: BasicOperation { - public init() { - super.init(fragmentShader:AddBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: AddBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/AlphaBlend.swift b/framework/Source/Operations/AlphaBlend.swift index c3931988..471985b6 100644 --- a/framework/Source/Operations/AlphaBlend.swift +++ b/framework/Source/Operations/AlphaBlend.swift @@ -1,9 +1,9 @@ public class AlphaBlend: BasicOperation { - public var mix:Float = 0.5 { didSet { uniformSettings["mixturePercent"] = mix } } + public var mix: Float = 0.5 { didSet { uniformSettings["mixturePercent"] = mix } } public init() { - super.init(fragmentShader:AlphaBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: AlphaBlendFragmentShader, numberOfInputs: 2) - ({mix = 0.5})() + ({ mix = 0.5 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/AmatorkaFilter.swift b/framework/Source/Operations/AmatorkaFilter.swift index fc569eaf..6296a3c4 100755 --- a/framework/Source/Operations/AmatorkaFilter.swift +++ b/framework/Source/Operations/AmatorkaFilter.swift @@ -11,8 +11,12 @@ public class AmatorkaFilter: LookupFilter { public override init() { super.init() - ({lookupImage = PictureInput(imageName:"lookup_amatorka.png")})() - ({intensity = 1.0})() + do { + try ({ lookupImage = try PictureInput(imageName: "lookup_amatorka.png") })() + } catch { + print("ERROR: Unable to create PictureInput \(error)") + } + ({ intensity = 1.0 })() } } #endif diff --git a/framework/Source/Operations/AverageColorExtractor.swift b/framework/Source/Operations/AverageColorExtractor.swift index 1e4911ab..ad3c3779 100755 --- a/framework/Source/Operations/AverageColorExtractor.swift +++ b/framework/Source/Operations/AverageColorExtractor.swift @@ -16,19 +16,19 @@ import Glibc import Foundation public class AverageColorExtractor: BasicOperation { - public var extractedColorCallback:((Color) -> ())? + public var extractedColorCallback: ((Color) -> Void)? public init() { - super.init(vertexShader:AverageColorVertexShader, fragmentShader:AverageColorFragmentShader) + super.init(vertexShader: AverageColorVertexShader, fragmentShader: AverageColorFragmentShader) } - override func renderFrame() { - averageColorBySequentialReduction(inputFramebuffer:inputFramebuffers[0]!, shader:shader, extractAverageOperation:extractAverageColorFromFramebuffer) + override open func renderFrame() { + averageColorBySequentialReduction(inputFramebuffer: inputFramebuffers[0]!, shader: shader, extractAverageOperation: extractAverageColorFromFramebuffer) releaseIncomingFramebuffers() } - func extractAverageColorFromFramebuffer(_ framebuffer:Framebuffer) { - var data = [UInt8](repeating:0, count:Int(framebuffer.size.width * framebuffer.size.height * 4)) + func extractAverageColorFromFramebuffer(_ framebuffer: Framebuffer) { + var data = [UInt8](repeating: 0, count: Int(framebuffer.size.width * framebuffer.size.height * 4)) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), &data) renderFramebuffer = framebuffer framebuffer.resetRetainCount() @@ -43,13 +43,13 @@ public class AverageColorExtractor: BasicOperation { alphaTotal += Int(data[(currentPixel * 4) + 3]) } - let returnColor = Color(red:Float(redTotal) / Float(totalNumberOfPixels) / 255.0, green:Float(greenTotal) / Float(totalNumberOfPixels) / 255.0, blue:Float(blueTotal) / Float(totalNumberOfPixels) / 255.0, alpha:Float(alphaTotal) / Float(totalNumberOfPixels) / 255.0) + let returnColor = Color(red: Float(redTotal) / Float(totalNumberOfPixels) / 255.0, green: Float(greenTotal) / Float(totalNumberOfPixels) / 255.0, blue: Float(blueTotal) / Float(totalNumberOfPixels) / 255.0, alpha: Float(alphaTotal) / Float(totalNumberOfPixels) / 255.0) extractedColorCallback?(returnColor) } } -func averageColorBySequentialReduction(inputFramebuffer:Framebuffer, shader:ShaderProgram, extractAverageOperation:(Framebuffer) -> ()) { +func averageColorBySequentialReduction(inputFramebuffer: Framebuffer, shader: ShaderProgram, extractAverageOperation: (Framebuffer) -> Void) { var uniformSettings = ShaderUniformSettings() let inputSize = Size(inputFramebuffer.size) let numberOfReductionsInX = floor(log(Double(inputSize.width)) / log(4.0)) @@ -58,14 +58,14 @@ func averageColorBySequentialReduction(inputFramebuffer:Framebuffer, shader:Shad inputFramebuffer.lock() var previousFramebuffer = inputFramebuffer for currentReduction in 0.. ())? + public var extractedLuminanceCallback: ((Float) -> Void)? public init() { - super.init(vertexShader:AverageColorVertexShader, fragmentShader:AverageLuminanceFragmentShader) + super.init(vertexShader: AverageColorVertexShader, fragmentShader: AverageLuminanceFragmentShader) } - override func renderFrame() { + override open func renderFrame() { // Reduce to luminance before passing into the downsampling // TODO: Combine this with the first stage of the downsampling by doing reduction here - let luminancePassShader = crashOnShaderCompileFailure("AverageLuminance"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:LuminanceFragmentShader)} - let luminancePassFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:inputFramebuffers[0]!.orientation, size:inputFramebuffers[0]!.size) + let luminancePassShader = crashOnShaderCompileFailure("AverageLuminance") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader: LuminanceFragmentShader) } + let luminancePassFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: inputFramebuffers[0]!.orientation, size: inputFramebuffers[0]!.size) luminancePassFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(luminancePassShader, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[inputFramebuffers[0]!.texturePropertiesForTargetOrientation(luminancePassFramebuffer.orientation)]) + renderQuadWithShader(luminancePassShader, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [inputFramebuffers[0]!.texturePropertiesForTargetOrientation(luminancePassFramebuffer.orientation)]) - averageColorBySequentialReduction(inputFramebuffer:luminancePassFramebuffer, shader:shader, extractAverageOperation:extractAverageLuminanceFromFramebuffer) + averageColorBySequentialReduction(inputFramebuffer: luminancePassFramebuffer, shader: shader, extractAverageOperation: extractAverageLuminanceFromFramebuffer) releaseIncomingFramebuffers() } - func extractAverageLuminanceFromFramebuffer(_ framebuffer:Framebuffer) { - var data = [UInt8](repeating:0, count:Int(framebuffer.size.width * framebuffer.size.height * 4)) + func extractAverageLuminanceFromFramebuffer(_ framebuffer: Framebuffer) { + var data = [UInt8](repeating: 0, count: Int(framebuffer.size.width * framebuffer.size.height * 4)) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), &data) renderFramebuffer = framebuffer framebuffer.resetRetainCount() diff --git a/framework/Source/Operations/AverageLuminanceThreshold.swift b/framework/Source/Operations/AverageLuminanceThreshold.swift index 8e086c5f..f0cc3a2d 100644 --- a/framework/Source/Operations/AverageLuminanceThreshold.swift +++ b/framework/Source/Operations/AverageLuminanceThreshold.swift @@ -1,5 +1,5 @@ public class AverageLuminanceThreshold: OperationGroup { - public var thresholdMultiplier:Float = 1.0 + public var thresholdMultiplier: Float = 1.0 let averageLuminance = AverageLuminanceExtractor() let luminanceThreshold = LuminanceThreshold() @@ -7,13 +7,13 @@ public class AverageLuminanceThreshold: OperationGroup { public override init() { super.init() - averageLuminance.extractedLuminanceCallback = {[weak self] luminance in + averageLuminance.extractedLuminanceCallback = { [weak self] luminance in self?.luminanceThreshold.threshold = (self?.thresholdMultiplier ?? 1.0) * luminance } - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.averageLuminance input --> self.luminanceThreshold --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/BilateralBlur.swift b/framework/Source/Operations/BilateralBlur.swift index 015d2917..2f97d601 100644 --- a/framework/Source/Operations/BilateralBlur.swift +++ b/framework/Source/Operations/BilateralBlur.swift @@ -1,11 +1,12 @@ // TODO: auto-generate shaders for this, per the Gaussian blur method public class BilateralBlur: TwoStageOperation { - public var distanceNormalizationFactor:Float = 8.0 { didSet { uniformSettings["distanceNormalizationFactor"] = distanceNormalizationFactor } } + public var distanceNormalizationFactor: Float = 8.0 { didSet { uniformSettings["distanceNormalizationFactor"] = distanceNormalizationFactor } } public init() { - super.init(vertexShader:BilateralBlurVertexShader, fragmentShader:BilateralBlurFragmentShader) + super.init(vertexShader: BilateralBlurVertexShader, fragmentShader: BilateralBlurFragmentShader) - ({distanceNormalizationFactor = 1.0})() + downsamplingFactor = 4.0 + ({ distanceNormalizationFactor = 1.0 })() } } diff --git a/framework/Source/Operations/BoxBlur.swift b/framework/Source/Operations/BoxBlur.swift index 9e50a90a..d1eb3086 100755 --- a/framework/Source/Operations/BoxBlur.swift +++ b/framework/Source/Operations/BoxBlur.swift @@ -5,13 +5,13 @@ import Glibc import Foundation public class BoxBlur: TwoStageOperation { - public var blurRadiusInPixels:Float { + public var blurRadiusInPixels: Float { didSet { - let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: overrideDownsamplingOptimization) sharedImageProcessingContext.runOperationAsynchronously { self.downsamplingFactor = downsamplingFactor let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - self.shader = crashOnShaderCompileFailure("BoxBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader:fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius))} + self.shader = crashOnShaderCompileFailure("BoxBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader: fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius)) } } } } @@ -19,13 +19,13 @@ public class BoxBlur: TwoStageOperation { public init() { blurRadiusInPixels = 2.0 let pixelRadius = UInt(round(round(Double(blurRadiusInPixels) / 2.0) * 2.0)) - let initialShader = crashOnShaderCompileFailure("BoxBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader:fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius))} - super.init(shader:initialShader, numberOfInputs:1) + let initialShader = crashOnShaderCompileFailure("BoxBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader: fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius)) } + super.init(shader: initialShader, numberOfInputs: 1) } } -func vertexShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { - guard (radius > 0) else { return OneInputVertexShader } +func vertexShaderForOptimizedBoxBlurOfRadius(_ radius: UInt) -> String { + guard radius > 0 else { return OneInputVertexShader } let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) var shaderString = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" @@ -40,8 +40,8 @@ func vertexShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { return shaderString } -func fragmentShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { - guard (radius > 0) else { return PassthroughFragmentShader } +func fragmentShaderForOptimizedBoxBlurOfRadius(_ radius: UInt) -> String { + guard radius > 0 else { return PassthroughFragmentShader } let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) let trueNumberOfOptimizedOffsets = radius / 2 + (radius % 2) @@ -62,7 +62,7 @@ func fragmentShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { } // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader - if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets) { + if trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets { #if GLES shaderString += "highp vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" #else diff --git a/framework/Source/Operations/BrightnessAdjustment.swift b/framework/Source/Operations/BrightnessAdjustment.swift index c0c2ed03..e8547ba1 100644 --- a/framework/Source/Operations/BrightnessAdjustment.swift +++ b/framework/Source/Operations/BrightnessAdjustment.swift @@ -1,9 +1,9 @@ public class BrightnessAdjustment: BasicOperation { - public var brightness:Float = 0.0 { didSet { uniformSettings["brightness"] = brightness } } + public var brightness: Float = 0.0 { didSet { uniformSettings["brightness"] = brightness } } public init() { - super.init(fragmentShader:BrightnessFragmentShader, numberOfInputs:1) + super.init(fragmentShader: BrightnessFragmentShader, numberOfInputs: 1) - ({brightness = 1.0})() + ({ brightness = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/BulgeDistortion.swift b/framework/Source/Operations/BulgeDistortion.swift index adf3d2b2..8a577595 100644 --- a/framework/Source/Operations/BulgeDistortion.swift +++ b/framework/Source/Operations/BulgeDistortion.swift @@ -1,13 +1,13 @@ public class BulgeDistortion: BasicOperation { - public var radius:Float = 0.25 { didSet { uniformSettings["radius"] = radius } } - public var scale:Float = 0.5 { didSet { uniformSettings["scale"] = scale } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var radius: Float = 0.25 { didSet { uniformSettings["radius"] = radius } } + public var scale: Float = 0.5 { didSet { uniformSettings["scale"] = scale } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:BulgeDistortionFragmentShader, numberOfInputs:1) + super.init(fragmentShader: BulgeDistortionFragmentShader, numberOfInputs: 1) - ({radius = 0.25})() - ({scale = 0.5})() - ({center = Position.center})() + ({ radius = 0.25 })() + ({ scale = 0.5 })() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/CGAColorspaceFilter.swift b/framework/Source/Operations/CGAColorspaceFilter.swift index 6cf16907..4ec6ac75 100644 --- a/framework/Source/Operations/CGAColorspaceFilter.swift +++ b/framework/Source/Operations/CGAColorspaceFilter.swift @@ -1,5 +1,5 @@ public class CGAColorspaceFilter: BasicOperation { public init() { - super.init(fragmentShader:CGAColorspaceFragmentShader, numberOfInputs:1) + super.init(fragmentShader: CGAColorspaceFragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/CannyEdgeDetection.swift b/framework/Source/Operations/CannyEdgeDetection.swift index fbe1bc48..b347f0d5 100644 --- a/framework/Source/Operations/CannyEdgeDetection.swift +++ b/framework/Source/Operations/CannyEdgeDetection.swift @@ -13,25 +13,25 @@ */ public class CannyEdgeDetection: OperationGroup { - public var blurRadiusInPixels:Float = 2.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } - public var upperThreshold:Float = 0.4 { didSet { directionalNonMaximumSuppression.uniformSettings["upperThreshold"] = upperThreshold } } - public var lowerThreshold:Float = 0.1 { didSet { directionalNonMaximumSuppression.uniformSettings["lowerThreshold"] = lowerThreshold } } + public var blurRadiusInPixels: Float = 2.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var upperThreshold: Float = 0.4 { didSet { directionalNonMaximumSuppression.uniformSettings["upperThreshold"] = upperThreshold } } + public var lowerThreshold: Float = 0.1 { didSet { directionalNonMaximumSuppression.uniformSettings["lowerThreshold"] = lowerThreshold } } let luminance = Luminance() let gaussianBlur = SingleComponentGaussianBlur() - let directionalSobel = TextureSamplingOperation(fragmentShader:DirectionalSobelEdgeDetectionFragmentShader) - let directionalNonMaximumSuppression = TextureSamplingOperation(vertexShader:OneInputVertexShader, fragmentShader:DirectionalNonMaximumSuppressionFragmentShader) - let weakPixelInclusion = TextureSamplingOperation(fragmentShader:WeakPixelInclusionFragmentShader) + let directionalSobel = TextureSamplingOperation(fragmentShader: DirectionalSobelEdgeDetectionFragmentShader) + let directionalNonMaximumSuppression = TextureSamplingOperation(vertexShader: OneInputVertexShader, fragmentShader: DirectionalNonMaximumSuppressionFragmentShader) + let weakPixelInclusion = TextureSamplingOperation(fragmentShader: WeakPixelInclusionFragmentShader) public override init() { super.init() - ({blurRadiusInPixels = 2.0})() - ({upperThreshold = 0.4})() - ({lowerThreshold = 0.1})() + ({ blurRadiusInPixels = 2.0 })() + ({ upperThreshold = 0.4 })() + ({ lowerThreshold = 0.1 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.luminance --> self.gaussianBlur --> self.directionalSobel --> self.directionalNonMaximumSuppression --> self.weakPixelInclusion --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ChromaKeyBlend.swift b/framework/Source/Operations/ChromaKeyBlend.swift index 940ad0cf..f4505c6e 100644 --- a/framework/Source/Operations/ChromaKeyBlend.swift +++ b/framework/Source/Operations/ChromaKeyBlend.swift @@ -1,13 +1,13 @@ public class ChromaKeyBlend: BasicOperation { - public var thresholdSensitivity:Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } - public var smoothing:Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } - public var colorToReplace:Color = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } + public var thresholdSensitivity: Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } + public var smoothing: Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } + public var colorToReplace = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } public init() { - super.init(fragmentShader:ChromaKeyBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ChromaKeyBlendFragmentShader, numberOfInputs: 2) - ({thresholdSensitivity = 0.4})() - ({smoothing = 0.1})() - ({colorToReplace = Color.green})() + ({ thresholdSensitivity = 0.4 })() + ({ smoothing = 0.1 })() + ({ colorToReplace = Color.green })() } } diff --git a/framework/Source/Operations/ChromaKeying.swift b/framework/Source/Operations/ChromaKeying.swift index 519f29e4..66d5f31e 100644 --- a/framework/Source/Operations/ChromaKeying.swift +++ b/framework/Source/Operations/ChromaKeying.swift @@ -1,13 +1,13 @@ public class ChromaKeying: BasicOperation { - public var thresholdSensitivity:Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } - public var smoothing:Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } - public var colorToReplace:Color = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } + public var thresholdSensitivity: Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } + public var smoothing: Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } + public var colorToReplace = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } public init() { - super.init(fragmentShader:ChromaKeyFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ChromaKeyFragmentShader, numberOfInputs: 1) - ({thresholdSensitivity = 0.4})() - ({smoothing = 0.1})() - ({colorToReplace = Color.green})() + ({ thresholdSensitivity = 0.4 })() + ({ smoothing = 0.1 })() + ({ colorToReplace = Color.green })() } } diff --git a/framework/Source/Operations/CircleGenerator.swift b/framework/Source/Operations/CircleGenerator.swift index 84a5f593..9cbb138b 100644 --- a/framework/Source/Operations/CircleGenerator.swift +++ b/framework/Source/Operations/CircleGenerator.swift @@ -13,15 +13,15 @@ #endif public class CircleGenerator: ImageGenerator { - let circleShader:ShaderProgram + let circleShader: ShaderProgram - public override init(size:Size) { - circleShader = crashOnShaderCompileFailure("CircleGenerator"){try sharedImageProcessingContext.programForVertexShader(CircleVertexShader, fragmentShader:CircleFragmentShader)} + public override init(size: Size) { + circleShader = crashOnShaderCompileFailure("CircleGenerator") { try sharedImageProcessingContext.programForVertexShader(CircleVertexShader, fragmentShader: CircleFragmentShader) } circleShader.colorUniformsUseFourComponents = true - super.init(size:size) + super.init(size: size) } - public func renderCircleOfRadius(_ radius:Float, center:Position, circleColor:Color = Color.white, backgroundColor:Color = Color.black) { + public func renderCircleOfRadius(_ radius: Float, center: Position, circleColor: Color = Color.white, backgroundColor: Color = Color.black) { let scaledRadius = radius * 2.0 imageFramebuffer.activateFramebufferForRendering() var uniformSettings = ShaderUniformSettings() @@ -35,7 +35,7 @@ public class CircleGenerator: ImageGenerator { let scaledYRadius = scaledRadius / imageFramebuffer.aspectRatioForRotation(.noRotation) uniformSettings["center"] = Position(convertedCenterX, convertedCenterY) - let circleVertices:[GLfloat] = [GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY + scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY + scaledYRadius)] + let circleVertices: [GLfloat] = [GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY + scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY + scaledYRadius)] clearFramebufferWithColor(backgroundColor) circleShader.use() diff --git a/framework/Source/Operations/ClosingFilter.swift b/framework/Source/Operations/ClosingFilter.swift index 966b7157..70d6fa87 100644 --- a/framework/Source/Operations/ClosingFilter.swift +++ b/framework/Source/Operations/ClosingFilter.swift @@ -1,5 +1,5 @@ public class ClosingFilter: OperationGroup { - public var radius:UInt { + public var radius: UInt { didSet { erosion.radius = radius dilation.radius = radius @@ -12,8 +12,8 @@ public class ClosingFilter: OperationGroup { radius = 1 super.init() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.dilation --> self.erosion --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorBlend.swift b/framework/Source/Operations/ColorBlend.swift index 4a81b3f3..5bf347a1 100644 --- a/framework/Source/Operations/ColorBlend.swift +++ b/framework/Source/Operations/ColorBlend.swift @@ -1,5 +1,5 @@ public class ColorBlend: BasicOperation { public init() { - super.init(fragmentShader:ColorBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ColorBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorBurnBlend.swift b/framework/Source/Operations/ColorBurnBlend.swift index ea81cd8e..a27b7619 100644 --- a/framework/Source/Operations/ColorBurnBlend.swift +++ b/framework/Source/Operations/ColorBurnBlend.swift @@ -1,5 +1,5 @@ public class ColorBurnBlend: BasicOperation { public init() { - super.init(fragmentShader:ColorBurnBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ColorBurnBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorDodgeBlend.swift b/framework/Source/Operations/ColorDodgeBlend.swift index 79fe7bfd..900d6411 100644 --- a/framework/Source/Operations/ColorDodgeBlend.swift +++ b/framework/Source/Operations/ColorDodgeBlend.swift @@ -1,5 +1,5 @@ public class ColorDodgeBlend: BasicOperation { public init() { - super.init(fragmentShader:ColorDodgeBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ColorDodgeBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorInversion.swift b/framework/Source/Operations/ColorInversion.swift index 7f0ad408..8fcffe65 100644 --- a/framework/Source/Operations/ColorInversion.swift +++ b/framework/Source/Operations/ColorInversion.swift @@ -1,5 +1,5 @@ public class ColorInversion: BasicOperation { public init() { - super.init(fragmentShader:ColorInvertFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ColorInvertFragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorLocalBinaryPattern.swift b/framework/Source/Operations/ColorLocalBinaryPattern.swift index 20cbe9e1..d74bfb26 100644 --- a/framework/Source/Operations/ColorLocalBinaryPattern.swift +++ b/framework/Source/Operations/ColorLocalBinaryPattern.swift @@ -14,6 +14,6 @@ public class ColorLocalBinaryPattern: TextureSamplingOperation { public init() { - super.init(fragmentShader:ColorLocalBinaryPatternFragmentShader) + super.init(fragmentShader: ColorLocalBinaryPatternFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorMatrixFilter.swift b/framework/Source/Operations/ColorMatrixFilter.swift index a8a344ff..a35075f5 100644 --- a/framework/Source/Operations/ColorMatrixFilter.swift +++ b/framework/Source/Operations/ColorMatrixFilter.swift @@ -1,12 +1,11 @@ public class ColorMatrixFilter: BasicOperation { - public var intensity:Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } - public var colorMatrix:Matrix4x4 = Matrix4x4.identity { didSet { uniformSettings["colorMatrix"] = colorMatrix } } + public var intensity: Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } + public var colorMatrix = Matrix4x4.identity { didSet { uniformSettings["colorMatrix"] = colorMatrix } } public init() { + super.init(fragmentShader: ColorMatrixFragmentShader, numberOfInputs: 1) - super.init(fragmentShader:ColorMatrixFragmentShader, numberOfInputs:1) - - ({intensity = 1.0})() - ({colorMatrix = Matrix4x4.identity})() + ({ intensity = 1.0 })() + ({ colorMatrix = Matrix4x4.identity })() } } diff --git a/framework/Source/Operations/ColourFASTFeatureDetection.swift b/framework/Source/Operations/ColourFASTFeatureDetection.swift index 8582d0de..1e0887cb 100644 --- a/framework/Source/Operations/ColourFASTFeatureDetection.swift +++ b/framework/Source/Operations/ColourFASTFeatureDetection.swift @@ -8,17 +8,17 @@ // TODO: Have the blur radius and texel spacing be tied together into a general sampling distance scale factor public class ColourFASTFeatureDetection: OperationGroup { - public var blurRadiusInPixels:Float = 2.0 { didSet { boxBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var blurRadiusInPixels: Float = 2.0 { didSet { boxBlur.blurRadiusInPixels = blurRadiusInPixels } } let boxBlur = BoxBlur() - let colourFASTFeatureDescriptors = TextureSamplingOperation(vertexShader:ColourFASTDecriptorVertexShader, fragmentShader:ColourFASTDecriptorFragmentShader, numberOfInputs:2) + let colourFASTFeatureDescriptors = TextureSamplingOperation(vertexShader: ColourFASTDecriptorVertexShader, fragmentShader: ColourFASTDecriptorFragmentShader, numberOfInputs: 2) public override init() { super.init() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.colourFASTFeatureDescriptors input --> self.boxBlur --> self.colourFASTFeatureDescriptors --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ContrastAdjustment.swift b/framework/Source/Operations/ContrastAdjustment.swift index 3b3ba561..0319eb14 100644 --- a/framework/Source/Operations/ContrastAdjustment.swift +++ b/framework/Source/Operations/ContrastAdjustment.swift @@ -1,9 +1,9 @@ public class ContrastAdjustment: BasicOperation { - public var contrast:Float = 1.0 { didSet { uniformSettings["contrast"] = contrast } } + public var contrast: Float = 1.0 { didSet { uniformSettings["contrast"] = contrast } } public init() { - super.init(fragmentShader:ContrastFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ContrastFragmentShader, numberOfInputs: 1) - ({contrast = 1.0})() + ({ contrast = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Convolution3x3.swift b/framework/Source/Operations/Convolution3x3.swift index 2ea7a6bc..2eaa9664 100644 --- a/framework/Source/Operations/Convolution3x3.swift +++ b/framework/Source/Operations/Convolution3x3.swift @@ -1,9 +1,9 @@ public class Convolution3x3: TextureSamplingOperation { - public var convolutionKernel:Matrix3x3 = Matrix3x3.centerOnly { didSet { uniformSettings["convolutionMatrix"] = convolutionKernel } } + public var convolutionKernel = Matrix3x3.centerOnly { didSet { uniformSettings["convolutionMatrix"] = convolutionKernel } } public init() { - super.init(fragmentShader:Convolution3x3FragmentShader) + super.init(fragmentShader: Convolution3x3FragmentShader) - ({convolutionKernel = Matrix3x3.centerOnly})() + ({ convolutionKernel = Matrix3x3.centerOnly })() } } diff --git a/framework/Source/Operations/Crop.swift b/framework/Source/Operations/Crop.swift index eb452c53..a069f823 100644 --- a/framework/Source/Operations/Crop.swift +++ b/framework/Source/Operations/Crop.swift @@ -1,41 +1,48 @@ // TODO: Have this adjust in real time to changing crop sizes // TODO: Verify at all orientations -public class Crop: BasicOperation { +open class Crop: BasicOperation { public var cropSizeInPixels: Size? public var locationOfCropInPixels: Position? public init() { - super.init(fragmentShader:PassthroughFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1) } - override func renderFrame() { - let inputFramebuffer:Framebuffer = inputFramebuffers[0]! + override open func renderFrame() { + let inputFramebuffer: Framebuffer = inputFramebuffers[0]! let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait) - let finalCropSize:GLSize - let normalizedOffsetFromOrigin:Position + let finalCropSize: GLSize + let normalizedOffsetFromOrigin: Position if let cropSize = cropSizeInPixels, let locationOfCrop = locationOfCropInPixels { let glCropSize = GLSize(cropSize) - finalCropSize = GLSize(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) + finalCropSize = GLSize(width: min(inputSize.width, glCropSize.width), height: min(inputSize.height, glCropSize.height)) normalizedOffsetFromOrigin = Position(locationOfCrop.x / Float(inputSize.width), locationOfCrop.y / Float(inputSize.height)) } else if let cropSize = cropSizeInPixels { let glCropSize = GLSize(cropSize) - finalCropSize = GLSize(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) + finalCropSize = GLSize(width: min(inputSize.width, glCropSize.width), height: min(inputSize.height, glCropSize.height)) normalizedOffsetFromOrigin = Position(Float(inputSize.width / 2 - finalCropSize.width / 2) / Float(inputSize.width), Float(inputSize.height / 2 - finalCropSize.height / 2) / Float(inputSize.height)) } else { finalCropSize = inputSize - normalizedOffsetFromOrigin = Position.zero + normalizedOffsetFromOrigin = Position.zero } - let normalizedCropSize = Size(width:Float(finalCropSize.width) / Float(inputSize.width), height:Float(finalCropSize.height) / Float(inputSize.height)) - - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:finalCropSize, stencil:false) + let normalizedCropSize = Size(width: Float(finalCropSize.width) / Float(inputSize.width), height: Float(finalCropSize.height) / Float(inputSize.height)) + + let bufferSize: GLSize + if abs(abs(Double(inputSize.width) / Double(inputSize.height)) - abs(Double(finalCropSize.width) / Double(finalCropSize.height))) < 0.01 { + bufferSize = inputSize + } else { + bufferSize = finalCropSize + } + + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: bufferSize, stencil: false) - let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) + let textureProperties = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin: normalizedOffsetFromOrigin, cropSize: normalizedCropSize), texture: inputFramebuffer.texture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[textureProperties]) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [textureProperties]) releaseIncomingFramebuffers() } } diff --git a/framework/Source/Operations/CrosshairGenerator.swift b/framework/Source/Operations/CrosshairGenerator.swift index 0960a3ae..6c499591 100644 --- a/framework/Source/Operations/CrosshairGenerator.swift +++ b/framework/Source/Operations/CrosshairGenerator.swift @@ -13,23 +13,21 @@ #endif public class CrosshairGenerator: ImageGenerator { - - public var crosshairWidth:Float = 5.0 { didSet { uniformSettings["crosshairWidth"] = crosshairWidth } } - public var crosshairColor:Color = Color.green { didSet { uniformSettings["crosshairColor"] = crosshairColor } } + public var crosshairWidth: Float = 5.0 { didSet { uniformSettings["crosshairWidth"] = crosshairWidth } } + public var crosshairColor = Color.green { didSet { uniformSettings["crosshairColor"] = crosshairColor } } - let crosshairShader:ShaderProgram + let crosshairShader: ShaderProgram var uniformSettings = ShaderUniformSettings() - public override init(size:Size) { - crosshairShader = crashOnShaderCompileFailure("CrosshairGenerator"){try sharedImageProcessingContext.programForVertexShader(CrosshairVertexShader, fragmentShader:CrosshairFragmentShader)} - super.init(size:size) + public override init(size: Size) { + crosshairShader = crashOnShaderCompileFailure("CrosshairGenerator") { try sharedImageProcessingContext.programForVertexShader(CrosshairVertexShader, fragmentShader: CrosshairFragmentShader) } + super.init(size: size) - ({crosshairWidth = 5.0})() - ({crosshairColor = Color.green})() + ({ crosshairWidth = 5.0 })() + ({ crosshairColor = Color.green })() } - - public func renderCrosshairs(_ positions:[Position]) { + public func renderCrosshairs(_ positions: [Position]) { imageFramebuffer.activateFramebufferForRendering() imageFramebuffer.timingStyle = .stillImage #if GL @@ -46,7 +44,7 @@ public class CrosshairGenerator: ImageGenerator { guard let positionAttribute = crosshairShader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } - let convertedPositions = positions.flatMap{$0.toGLArray()} + let convertedPositions = positions.flatMap { $0.toGLArray() } glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, convertedPositions) glDrawArrays(GLenum(GL_POINTS), 0, GLsizei(positions.count)) diff --git a/framework/Source/Operations/Crosshatch.swift b/framework/Source/Operations/Crosshatch.swift index a12b1cb9..77bc1167 100644 --- a/framework/Source/Operations/Crosshatch.swift +++ b/framework/Source/Operations/Crosshatch.swift @@ -1,11 +1,11 @@ public class Crosshatch: BasicOperation { - public var crossHatchSpacing:Float = 0.03 { didSet { uniformSettings["crossHatchSpacing"] = crossHatchSpacing } } - public var lineWidth:Float = 0.003 { didSet { uniformSettings["lineWidth"] = lineWidth } } + public var crossHatchSpacing: Float = 0.03 { didSet { uniformSettings["crossHatchSpacing"] = crossHatchSpacing } } + public var lineWidth: Float = 0.003 { didSet { uniformSettings["lineWidth"] = lineWidth } } public init() { - super.init(fragmentShader:CrosshatchFragmentShader, numberOfInputs:1) + super.init(fragmentShader: CrosshatchFragmentShader, numberOfInputs: 1) - ({crossHatchSpacing = 0.03})() - ({lineWidth = 0.003})() + ({ crossHatchSpacing = 0.03 })() + ({ lineWidth = 0.003 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/DarkenBlend.swift b/framework/Source/Operations/DarkenBlend.swift index c90c7fee..c446433b 100644 --- a/framework/Source/Operations/DarkenBlend.swift +++ b/framework/Source/Operations/DarkenBlend.swift @@ -1,5 +1,5 @@ public class DarkenBlend: BasicOperation { public init() { - super.init(fragmentShader:DarkenBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: DarkenBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/DifferenceBlend.swift b/framework/Source/Operations/DifferenceBlend.swift index 7e3ba524..58fd559b 100644 --- a/framework/Source/Operations/DifferenceBlend.swift +++ b/framework/Source/Operations/DifferenceBlend.swift @@ -1,5 +1,5 @@ public class DifferenceBlend: BasicOperation { public init() { - super.init(fragmentShader:DifferenceBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: DifferenceBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Dilation.swift b/framework/Source/Operations/Dilation.swift index 2e180150..2632518b 100644 --- a/framework/Source/Operations/Dilation.swift +++ b/framework/Source/Operations/Dilation.swift @@ -1,24 +1,24 @@ public class Dilation: TwoStageOperation { - public var radius:UInt { + public var radius: UInt { didSet { switch radius { case 0, 1: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader:Dilation1FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader: Dilation1FragmentShader) } case 2: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation2VertexShader, fragmentShader:Dilation2FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation2VertexShader, fragmentShader: Dilation2FragmentShader) } case 3: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation3VertexShader, fragmentShader:Dilation3FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation3VertexShader, fragmentShader: Dilation3FragmentShader) } case 4: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader:Dilation4FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader: Dilation4FragmentShader) } default: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader:Dilation4FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader: Dilation4FragmentShader) } } } } public init() { radius = 1 - let initialShader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader:Dilation1FragmentShader)} - super.init(shader:initialShader, numberOfInputs:1) + let initialShader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader: Dilation1FragmentShader) } + super.init(shader: initialShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/DissolveBlend.swift b/framework/Source/Operations/DissolveBlend.swift index 0b4f5d51..51d788e4 100755 --- a/framework/Source/Operations/DissolveBlend.swift +++ b/framework/Source/Operations/DissolveBlend.swift @@ -1,9 +1,9 @@ public class DissolveBlend: BasicOperation { - public var mix:Float = 0.5 { didSet { uniformSettings["mixturePercent"] = mix } } + public var mix: Float = 0.5 { didSet { uniformSettings["mixturePercent"] = mix } } public init() { - super.init(fragmentShader:DissolveBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: DissolveBlendFragmentShader, numberOfInputs: 2) - ({mix = 0.5})() + ({ mix = 0.5 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/DivideBlend.swift b/framework/Source/Operations/DivideBlend.swift index 599f9526..6df6d537 100644 --- a/framework/Source/Operations/DivideBlend.swift +++ b/framework/Source/Operations/DivideBlend.swift @@ -1,5 +1,5 @@ public class DivideBlend: BasicOperation { public init() { - super.init(fragmentShader:DivideBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: DivideBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/EmbossFilter.swift b/framework/Source/Operations/EmbossFilter.swift index 16e6cbe6..5cc4e81a 100644 --- a/framework/Source/Operations/EmbossFilter.swift +++ b/framework/Source/Operations/EmbossFilter.swift @@ -1,7 +1,7 @@ -public class EmbossFilter : Convolution3x3 { - public var intensity:Float = 1.0 { +public class EmbossFilter: Convolution3x3 { + public var intensity: Float = 1.0 { didSet { - self.convolutionKernel = Matrix3x3(rowMajorValues:[ + self.convolutionKernel = Matrix3x3(rowMajorValues: [ intensity * (-2.0), -intensity, 0.0, -intensity, 1.0, intensity, 0.0, intensity, intensity * 2.0]) @@ -11,6 +11,6 @@ public class EmbossFilter : Convolution3x3 { public override init() { super.init() - ({intensity = 1.0})() + ({ intensity = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Erosion.swift b/framework/Source/Operations/Erosion.swift index e5d7ccd6..590f124e 100644 --- a/framework/Source/Operations/Erosion.swift +++ b/framework/Source/Operations/Erosion.swift @@ -1,24 +1,24 @@ public class Erosion: TwoStageOperation { - public var radius:UInt { + public var radius: UInt { didSet { switch radius { case 0, 1: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader:Erosion1FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader: Erosion1FragmentShader) } case 2: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation2VertexShader, fragmentShader:Erosion2FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation2VertexShader, fragmentShader: Erosion2FragmentShader) } case 3: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation3VertexShader, fragmentShader:Erosion3FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation3VertexShader, fragmentShader: Erosion3FragmentShader) } case 4: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader:Erosion4FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader: Erosion4FragmentShader) } default: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader:Erosion4FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader: Erosion4FragmentShader) } } } } public init() { radius = 1 - let initialShader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader:Erosion1FragmentShader)} - super.init(shader:initialShader, numberOfInputs:1) + let initialShader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader: Erosion1FragmentShader) } + super.init(shader: initialShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ExclusionBlend.swift b/framework/Source/Operations/ExclusionBlend.swift index 52123038..8a0755c3 100644 --- a/framework/Source/Operations/ExclusionBlend.swift +++ b/framework/Source/Operations/ExclusionBlend.swift @@ -1,5 +1,5 @@ public class ExclusionBlend: BasicOperation { public init() { - super.init(fragmentShader:ExclusionBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ExclusionBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ExposureAdjustment.swift b/framework/Source/Operations/ExposureAdjustment.swift index 6f87488a..26824baa 100644 --- a/framework/Source/Operations/ExposureAdjustment.swift +++ b/framework/Source/Operations/ExposureAdjustment.swift @@ -1,9 +1,9 @@ public class ExposureAdjustment: BasicOperation { - public var exposure:Float = 0.0 { didSet { uniformSettings["exposure"] = exposure } } + public var exposure: Float = 0.0 { didSet { uniformSettings["exposure"] = exposure } } public init() { - super.init(fragmentShader:ExposureFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ExposureFragmentShader, numberOfInputs: 1) - ({exposure = 0.0})() + ({ exposure = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/FalseColor.swift b/framework/Source/Operations/FalseColor.swift index 36dcc112..f55d23b2 100644 --- a/framework/Source/Operations/FalseColor.swift +++ b/framework/Source/Operations/FalseColor.swift @@ -1,11 +1,11 @@ public class FalseColor: BasicOperation { - public var firstColor:Color = Color(red:0.0, green:0.0, blue:0.5, alpha:1.0) { didSet { uniformSettings["firstColor"] = firstColor } } - public var secondColor:Color = Color.red { didSet { uniformSettings["secondColor"] = secondColor } } + public var firstColor = Color(red: 0.0, green: 0.0, blue: 0.5, alpha: 1.0) { didSet { uniformSettings["firstColor"] = firstColor } } + public var secondColor = Color.red { didSet { uniformSettings["secondColor"] = secondColor } } public init() { - super.init(fragmentShader:FalseColorFragmentShader, numberOfInputs:1) + super.init(fragmentShader: FalseColorFragmentShader, numberOfInputs: 1) - ({firstColor = Color(red:0.0, green:0.0, blue:0.5, alpha:1.0)})() - ({secondColor = Color.red})() + ({ firstColor = Color(red: 0.0, green: 0.0, blue: 0.5, alpha: 1.0) })() + ({ secondColor = Color.red })() } } diff --git a/framework/Source/Operations/GammaAdjustment.swift b/framework/Source/Operations/GammaAdjustment.swift index 6db7bcf3..413419cd 100644 --- a/framework/Source/Operations/GammaAdjustment.swift +++ b/framework/Source/Operations/GammaAdjustment.swift @@ -1,9 +1,9 @@ public class GammaAdjustment: BasicOperation { - public var gamma:Float = 1.0 { didSet { uniformSettings["gamma"] = gamma } } + public var gamma: Float = 1.0 { didSet { uniformSettings["gamma"] = gamma } } public init() { - super.init(fragmentShader:GammaFragmentShader, numberOfInputs:1) + super.init(fragmentShader: GammaFragmentShader, numberOfInputs: 1) - ({gamma = 1.0})() + ({ gamma = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/GaussianBlur.swift b/framework/Source/Operations/GaussianBlur.swift index bab72ebc..7843f21e 100755 --- a/framework/Source/Operations/GaussianBlur.swift +++ b/framework/Source/Operations/GaussianBlur.swift @@ -6,22 +6,41 @@ let M_PI = 3.14159265359 // TODO: remove this once Foundation pulls this in on L import Foundation public class GaussianBlur: TwoStageOperation { - public var blurRadiusInPixels:Float { + public var blurRadiusInPixels: Float { didSet { - let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: overrideDownsamplingOptimization) sharedImageProcessingContext.runOperationAsynchronously { self.downsamplingFactor = downsamplingFactor let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)))} + self.shader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: self.luminanceThreshold), fragmentShader: fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: self.luminanceThreshold)) } } } } - public init() { - blurRadiusInPixels = 2.0 - let pixelRadius = pixelRadiusForBlurSigma(round(Double(blurRadiusInPixels))) - let initialShader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:2.0), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:2.0))} - super.init(shader:initialShader, numberOfInputs:1) + public var luminanceThreshold: Float? = nil { + didSet { + guard luminanceThreshold != oldValue else { return } + uniformSettings["luminanceThreshold"] = luminanceThreshold + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: overrideDownsamplingOptimization) + sharedImageProcessingContext.runOperationAsynchronously { + self.downsamplingFactor = downsamplingFactor + let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) + self.shader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: self.luminanceThreshold), fragmentShader: fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: self.luminanceThreshold)) } + } + } + } + + public init(blurRadiusInPixels: Float = 2.0, luminanceThreshold: Float? = nil) { + self.blurRadiusInPixels = blurRadiusInPixels + self.luminanceThreshold = luminanceThreshold + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: false) + let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) + let initialShader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: luminanceThreshold), fragmentShader: fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: luminanceThreshold)) } + super.init(shader: initialShader, numberOfInputs: 1) + self.downsamplingFactor = downsamplingFactor + if let luminanceThreshold = luminanceThreshold { + self.uniformSettings["luminanceThreshold"] = luminanceThreshold + } } } @@ -29,22 +48,21 @@ public class GaussianBlur: TwoStageOperation { // MARK: - // MARK: Blur sizing calculations -func sigmaAndDownsamplingForBlurRadius(_ radius:Float, limit:Float, override:Bool = false) -> (sigma:Float, downsamplingFactor:Float?) { +func sigmaAndDownsamplingForBlurRadius(_ radius: Float, limit: Float, override: Bool = false) -> (sigma: Float, downsamplingFactor: Float?) { // For now, only do integral sigmas let startingRadius = Float(round(Double(radius))) - guard ((startingRadius > limit) && (!override)) else { return (sigma:startingRadius, downsamplingFactor:nil) } + guard (startingRadius > limit) && (!override) else { return (sigma:startingRadius, downsamplingFactor:nil) } return (sigma:limit, downsamplingFactor:startingRadius / limit) } - // inputRadius for Core Image's CIGaussianBlur is really sigma in the Gaussian equation, so I'm using that for my blur radius, to be consistent -func pixelRadiusForBlurSigma(_ sigma:Double) -> UInt { +func pixelRadiusForBlurSigma(_ sigma: Double) -> UInt { // 7.0 is the limit for blur size for hardcoded varying offsets let minimumWeightToFindEdgeOfSamplingArea = 1.0 / 256.0 - var calculatedSampleRadius:UInt = 0 - if (sigma >= 1.0) { // Avoid a divide-by-zero error here + var calculatedSampleRadius: UInt = 0 + if sigma >= 1.0 { // Avoid a divide-by-zero error here // Calculate the number of pixels to sample from by setting a bottom limit for the contribution of the outermost pixel calculatedSampleRadius = UInt(floor(sqrt(-2.0 * pow(sigma, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * .pi * pow(sigma, 2.0))) ))) calculatedSampleRadius += calculatedSampleRadius % 2 // There's nothing to gain from handling odd radius sizes, due to the optimizations I use @@ -56,46 +74,46 @@ func pixelRadiusForBlurSigma(_ sigma:Double) -> UInt { // MARK: - // MARK: Standard Gaussian blur shaders -func standardGaussianWeightsForRadius(_ blurRadius:UInt, sigma:Double) -> [Double] { +func standardGaussianWeightsForRadius(_ blurRadius: UInt, sigma: Double) -> [Double] { var gaussianWeights = [Double]() var sumOfWeights = 0.0 for gaussianWeightIndex in 0...blurRadius { let weight = (1.0 / sqrt(2.0 * .pi * pow(sigma, 2.0))) * exp(-pow(Double(gaussianWeightIndex), 2.0) / (2.0 * pow(sigma, 2.0))) gaussianWeights.append(weight) - if (gaussianWeightIndex == 0) { + if gaussianWeightIndex == 0 { sumOfWeights += weight } else { sumOfWeights += (weight * 2.0) } } - return gaussianWeights.map{$0 / sumOfWeights} + return gaussianWeights.map { $0 / sumOfWeights } } -func vertexShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { - guard (radius > 0) else { return OneInputVertexShader } +func vertexShaderForStandardGaussianBlurOfRadius(_ radius: UInt, sigma: Double) -> String { + guard radius > 0 else { return OneInputVertexShader } let numberOfBlurCoordinates = radius * 2 + 1 - var shaderString = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\(numberOfBlurCoordinates)];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" + var shaderString = "varying vec2 textureCoordinate;\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\(numberOfBlurCoordinates)];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" for currentBlurCoordinateIndex in 0.. 0) { + } else if offsetFromCenter > 0 { shaderString += "blurCoordinates[\(currentBlurCoordinateIndex)] = inputTextureCoordinate.xy + singleStepOffset * \(Float(offsetFromCenter));\n" } else { shaderString += "blurCoordinates[\(currentBlurCoordinateIndex)] = inputTextureCoordinate.xy;\n" } } - shaderString += "}\n" + shaderString += "textureCoordinate = inputTextureCoordinate.xy;\n}\n" return shaderString } -func fragmentShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { - guard (radius > 0) else { return PassthroughFragmentShader } +func fragmentShaderForStandardGaussianBlurOfRadius(_ radius: UInt, sigma: Double) -> String { + guard radius > 0 else { return PassthroughFragmentShader } - let gaussianWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) + let gaussianWeights = standardGaussianWeightsForRadius(radius, sigma: sigma) let numberOfBlurCoordinates = radius * 2 + 1 #if GLES @@ -106,7 +124,7 @@ func fragmentShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) for currentBlurCoordinateIndex in 0.. [Double] { - let standardWeights = standardGaussianWeightsForRadius(blurRadius, sigma:sigma) +func optimizedGaussianOffsetsForRadius(_ blurRadius: UInt, sigma: Double) -> [Double] { + let standardWeights = standardGaussianWeightsForRadius(blurRadius, sigma: sigma) let numberOfOptimizedOffsets = min(blurRadius / 2 + (blurRadius % 2), 7) var optimizedOffsets = [Double]() @@ -135,33 +153,46 @@ func optimizedGaussianOffsetsForRadius(_ blurRadius:UInt, sigma:Double) -> [Doub return optimizedOffsets } -func vertexShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { - guard (radius > 0) else { return OneInputVertexShader } +func vertexShaderForOptimizedGaussianBlurOfRadius(_ radius: UInt, sigma: Double, luminanceThreshold: Float? = nil) -> String { + guard radius > 0 else { return OneInputVertexShader } - let optimizedOffsets = optimizedGaussianOffsetsForRadius(radius, sigma:sigma) + let optimizedOffsets = optimizedGaussianOffsetsForRadius(radius, sigma: sigma) let numberOfOptimizedOffsets = optimizedOffsets.count // Header - var shaderString = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\((1 + (numberOfOptimizedOffsets * 2)))];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" + var shaderString: String + if luminanceThreshold != nil { + shaderString = "varying vec2 textureCoordinate;\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\((1 + (numberOfOptimizedOffsets * 2)))];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" + } else { + shaderString = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\((1 + (numberOfOptimizedOffsets * 2)))];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" + } shaderString += "blurCoordinates[0] = inputTextureCoordinate.xy;\n" for currentOptimizedOffset in 0.. String { - guard (radius > 0) else { return PassthroughFragmentShader } +func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius: UInt, sigma: Double, luminanceThreshold: Float? = nil) -> String { + guard radius > 0 else { return PassthroughFragmentShader } - let standardWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) + let standardWeights = standardGaussianWeightsForRadius(radius, sigma: sigma) let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) let trueNumberOfOptimizedOffsets = radius / 2 + (radius % 2) #if GLES - var shaderString = "uniform sampler2D inputImageTexture;\n uniform highp float texelWidth;\n uniform highp float texelHeight;\n \n varying highp vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n lowp vec4 sum = vec4(0.0);\n" + var shaderString: String + if luminanceThreshold != nil { + shaderString = "varying highp vec2 textureCoordinate;\n uniform highp float luminanceThreshold;\n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n uniform sampler2D inputImageTexture;\n uniform highp float texelWidth;\n uniform highp float texelHeight;\n \n varying highp vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n highp float luminance = dot(textureColor.rgb, W);\n highp float thresholdResult = step(luminanceThreshold, luminance);\nif (thresholdResult == 0.0) {\ngl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n return;\n } \n lowp vec4 sum = vec4(0.0);\n" + } else { + shaderString = "uniform sampler2D inputImageTexture;\n uniform highp float texelWidth;\n uniform highp float texelHeight;\n \n varying highp vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n lowp vec4 sum = vec4(0.0);\n" + } #else var shaderString = "uniform sampler2D inputImageTexture;\n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n vec4 sum = vec4(0.0);\n" #endif @@ -179,7 +210,7 @@ func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double) } // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader - if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets) { + if trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets { #if GLES shaderString += "highp vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" #else @@ -188,8 +219,8 @@ func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double) } for currentOverlowTextureRead in numberOfOptimizedOffsets.. ())? + public var blurRadiusInPixels: Float = 2.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var sensitivity: Float = 5.0 { didSet { harrisCornerDetector.uniformSettings["sensitivity"] = sensitivity } } + public var threshold: Float = 0.2 { didSet { nonMaximumSuppression.uniformSettings["threshold"] = threshold } } + public var cornersDetectedCallback: (([Position]) -> Void)? - let xyDerivative = TextureSamplingOperation(fragmentShader:XYDerivativeFragmentShader) + let xyDerivative = TextureSamplingOperation(fragmentShader: XYDerivativeFragmentShader) let gaussianBlur = GaussianBlur() - let harrisCornerDetector:BasicOperation - let nonMaximumSuppression = TextureSamplingOperation(fragmentShader:ThresholdedNonMaximumSuppressionFragmentShader) + let harrisCornerDetector: BasicOperation + let nonMaximumSuppression = TextureSamplingOperation(fragmentShader: ThresholdedNonMaximumSuppressionFragmentShader) - public init(fragmentShader:String = HarrisCornerDetectorFragmentShader) { - harrisCornerDetector = BasicOperation(fragmentShader:fragmentShader) + public init(fragmentShader: String = HarrisCornerDetectorFragmentShader) { + harrisCornerDetector = BasicOperation(fragmentShader: fragmentShader) super.init() - ({blurRadiusInPixels = 2.0})() - ({sensitivity = 5.0})() - ({threshold = 0.2})() + ({ blurRadiusInPixels = 2.0 })() + ({ sensitivity = 5.0 })() + ({ threshold = 0.2 })() - outputImageRelay.newImageCallback = {[weak self] framebuffer in + outputImageRelay.newImageCallback = { [weak self] framebuffer in if let cornersDetectedCallback = self?.cornersDetectedCallback { cornersDetectedCallback(extractCornersFromImage(framebuffer)) } } - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.xyDerivative --> self.gaussianBlur --> self.harrisCornerDetector --> self.nonMaximumSuppression --> output } } } -func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { +func extractCornersFromImage(_ framebuffer: Framebuffer) -> [Position] { let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) // var rawImagePixels = [UInt8](count:imageByteSize, repeatedValue:0) -// let startTime = CFAbsoluteTimeGetCurrent() +// let startTime = CACurrentMediaTime() - let rawImagePixels = UnsafeMutablePointer.allocate(capacity:imageByteSize) + let rawImagePixels = UnsafeMutablePointer.allocate(capacity: imageByteSize) // -Onone, [UInt8] array: 30 ms for 720p frame on Retina iMac // -O, [UInt8] array: 4 ms for 720p frame on Retina iMac // -Onone, UnsafeMutablePointer: 7 ms for 720p frame on Retina iMac @@ -76,10 +76,10 @@ func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { var corners = [Position]() var currentByte = 0 - while (currentByte < imageByteSize) { + while currentByte < imageByteSize { let colorByte = rawImagePixels[currentByte] - if (colorByte > 0) { + if colorByte > 0 { let xCoordinate = currentByte % imageWidth let yCoordinate = currentByte / imageWidth @@ -88,9 +88,9 @@ func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { currentByte += 4 } - rawImagePixels.deallocate(capacity:imageByteSize) + rawImagePixels.deallocate() -// print("Harris extraction frame time: \(CFAbsoluteTimeGetCurrent() - startTime)") +// print("Harris extraction frame time: \(CACurrentMediaTime() - startTime)") return corners } diff --git a/framework/Source/Operations/Haze.swift b/framework/Source/Operations/Haze.swift index 4c394281..f64cc3ac 100644 --- a/framework/Source/Operations/Haze.swift +++ b/framework/Source/Operations/Haze.swift @@ -1,11 +1,11 @@ public class Haze: BasicOperation { - public var distance:Float = 0.2 { didSet { uniformSettings["hazeDistance"] = distance } } - public var slope:Float = 0.0 { didSet { uniformSettings["slope"] = slope } } + public var distance: Float = 0.2 { didSet { uniformSettings["hazeDistance"] = distance } } + public var slope: Float = 0.0 { didSet { uniformSettings["slope"] = slope } } public init() { - super.init(fragmentShader:HazeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: HazeFragmentShader, numberOfInputs: 1) - ({distance = 0.2})() - ({slope = 0.0})() + ({ distance = 0.2 })() + ({ slope = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HighPassFilter.swift b/framework/Source/Operations/HighPassFilter.swift index bad450dc..3d36fd46 100644 --- a/framework/Source/Operations/HighPassFilter.swift +++ b/framework/Source/Operations/HighPassFilter.swift @@ -7,11 +7,11 @@ public class HighPassFilter: OperationGroup { public override init() { super.init() - ({strength = 0.5})() + ({ strength = 0.5 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.differenceBlend input --> self.lowPass --> self.differenceBlend --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HighlightAndShadowTint.swift b/framework/Source/Operations/HighlightAndShadowTint.swift index 54b25d16..6d79cefd 100644 --- a/framework/Source/Operations/HighlightAndShadowTint.swift +++ b/framework/Source/Operations/HighlightAndShadowTint.swift @@ -1,15 +1,15 @@ public class HighlightAndShadowTint: BasicOperation { - public var shadowTintIntensity:Float = 0.0 { didSet { uniformSettings["shadowTintIntensity"] = shadowTintIntensity } } - public var highlightTintIntensity:Float = 0.0 { didSet { uniformSettings["highlightTintIntensity"] = highlightTintIntensity } } - public var shadowTintColor:Color = Color.red { didSet { uniformSettings["shadowTintColor"] = shadowTintColor } } - public var highlightTintColor:Color = Color.blue { didSet { uniformSettings["highlightTintColor"] = highlightTintColor } } + public var shadowTintIntensity: Float = 0.0 { didSet { uniformSettings["shadowTintIntensity"] = shadowTintIntensity } } + public var highlightTintIntensity: Float = 0.0 { didSet { uniformSettings["highlightTintIntensity"] = highlightTintIntensity } } + public var shadowTintColor = Color.red { didSet { uniformSettings["shadowTintColor"] = shadowTintColor } } + public var highlightTintColor = Color.blue { didSet { uniformSettings["highlightTintColor"] = highlightTintColor } } public init() { - super.init(fragmentShader:HighlightShadowTintFragmentShader, numberOfInputs:1) + super.init(fragmentShader: HighlightShadowTintFragmentShader, numberOfInputs: 1) - ({shadowTintIntensity = 0.0})() - ({highlightTintIntensity = 0.0})() - ({shadowTintColor = Color.red})() - ({highlightTintColor = Color.blue})() + ({ shadowTintIntensity = 0.0 })() + ({ highlightTintIntensity = 0.0 })() + ({ shadowTintColor = Color.red })() + ({ highlightTintColor = Color.blue })() } } diff --git a/framework/Source/Operations/HighlightsAndShadows.swift b/framework/Source/Operations/HighlightsAndShadows.swift index 95f77f72..3a414f8d 100644 --- a/framework/Source/Operations/HighlightsAndShadows.swift +++ b/framework/Source/Operations/HighlightsAndShadows.swift @@ -1,11 +1,11 @@ public class HighlightsAndShadows: BasicOperation { - public var shadows:Float = 0.0 { didSet { uniformSettings["shadows"] = shadows } } - public var highlights:Float = 1.0 { didSet { uniformSettings["highlights"] = highlights } } + public var shadows: Float = 0.0 { didSet { uniformSettings["shadows"] = shadows } } + public var highlights: Float = 1.0 { didSet { uniformSettings["highlights"] = highlights } } public init() { - super.init(fragmentShader:HighlightShadowFragmentShader, numberOfInputs:1) + super.init(fragmentShader: HighlightShadowFragmentShader, numberOfInputs: 1) - ({shadows = 0.0})() - ({highlights = 1.0})() + ({ shadows = 0.0 })() + ({ highlights = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Histogram.swift b/framework/Source/Operations/Histogram.swift index 95274542..5a4c6f5c 100755 --- a/framework/Source/Operations/Histogram.swift +++ b/framework/Source/Operations/Histogram.swift @@ -32,31 +32,31 @@ public enum HistogramType { } public class Histogram: BasicOperation { - public var downsamplingFactor:UInt = 16 + public var downsamplingFactor: UInt = 16 - var shader2:ShaderProgram? = nil - var shader3:ShaderProgram? = nil + var shader2: ShaderProgram? + var shader3: ShaderProgram? - public init(type:HistogramType) { + public init(type: HistogramType) { switch type { - case .red: super.init(vertexShader:HistogramRedSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .blue: super.init(vertexShader:HistogramBlueSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .green: super.init(vertexShader:HistogramGreenSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .luminance: super.init(vertexShader:HistogramLuminanceSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) + case .red: super.init(vertexShader: HistogramRedSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) + case .blue: super.init(vertexShader: HistogramBlueSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) + case .green: super.init(vertexShader: HistogramGreenSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) + case .luminance: super.init(vertexShader: HistogramLuminanceSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) case .rgb: - super.init(vertexShader:HistogramRedSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - shader2 = crashOnShaderCompileFailure("Histogram"){try sharedImageProcessingContext.programForVertexShader(HistogramGreenSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader)} - shader3 = crashOnShaderCompileFailure("Histogram"){try sharedImageProcessingContext.programForVertexShader(HistogramBlueSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader)} + super.init(vertexShader: HistogramRedSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) + shader2 = crashOnShaderCompileFailure("Histogram") { try sharedImageProcessingContext.programForVertexShader(HistogramGreenSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader) } + shader3 = crashOnShaderCompileFailure("Histogram") { try sharedImageProcessingContext.programForVertexShader(HistogramBlueSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader) } } } - override func renderFrame() { + override open func renderFrame() { let inputSize = sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!) let inputByteSize = Int(inputSize.width * inputSize.height * 4) - let data = UnsafeMutablePointer.allocate(capacity:inputByteSize) + let data = UnsafeMutablePointer.allocate(capacity: inputByteSize) glReadPixels(0, 0, inputSize.width, inputSize.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:256, height:3), stencil:mask != nil) + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: 256, height: 3), stencil: mask != nil) releaseIncomingFramebuffers() renderFramebuffer.activateFramebufferForRendering() @@ -84,6 +84,6 @@ public class Histogram: BasicOperation { } disableBlending() - data.deallocate(capacity:inputByteSize) + data.deallocate() } } diff --git a/framework/Source/Operations/HistogramDisplay.swift b/framework/Source/Operations/HistogramDisplay.swift index c12b77e3..3f18e065 100644 --- a/framework/Source/Operations/HistogramDisplay.swift +++ b/framework/Source/Operations/HistogramDisplay.swift @@ -1,5 +1,5 @@ public class HistogramDisplay: BasicOperation { public init() { - super.init(vertexShader:HistogramDisplayVertexShader, fragmentShader:HistogramDisplayFragmentShader, numberOfInputs:1) + super.init(vertexShader: HistogramDisplayVertexShader, fragmentShader: HistogramDisplayFragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HistogramEqualization.swift b/framework/Source/Operations/HistogramEqualization.swift index 49dc45a3..de58822b 100644 --- a/framework/Source/Operations/HistogramEqualization.swift +++ b/framework/Source/Operations/HistogramEqualization.swift @@ -1,31 +1,30 @@ public class HistogramEqualization: OperationGroup { public var downsamplingFactor: UInt = 16 { didSet { histogram.downsamplingFactor = downsamplingFactor } } - let histogram:Histogram + let histogram: Histogram let rawDataInput = RawDataInput() let rawDataOutput = RawDataOutput() - let equalizationFilter:BasicOperation + let equalizationFilter: BasicOperation - public init(type:HistogramType) { - - self.histogram = Histogram(type:type) + public init(type: HistogramType) { + self.histogram = Histogram(type: type) switch type { - case .red: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationRedFragmentShader, numberOfInputs:2) - case .blue: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationBlueFragmentShader, numberOfInputs:2) - case .green: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationGreenFragmentShader, numberOfInputs:2) - case .luminance: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationLuminanceFragmentShader, numberOfInputs:2) - case .rgb: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationRGBFragmentShader, numberOfInputs:2) + case .red: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationRedFragmentShader, numberOfInputs: 2) + case .blue: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationBlueFragmentShader, numberOfInputs: 2) + case .green: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationGreenFragmentShader, numberOfInputs: 2) + case .luminance: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationLuminanceFragmentShader, numberOfInputs: 2) + case .rgb: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationRGBFragmentShader, numberOfInputs: 2) } super.init() - ({downsamplingFactor = 16})() + ({ downsamplingFactor = 16 })() - self.configureGroup{input, output in + self.configureGroup {input, output in self.rawDataOutput.dataAvailableCallback = {data in - var redHistogramBin = [Int](repeating:0, count:256) - var greenHistogramBin = [Int](repeating:0, count:256) - var blueHistogramBin = [Int](repeating:0, count:256) + var redHistogramBin = [Int](repeating: 0, count: 256) + var greenHistogramBin = [Int](repeating: 0, count: 256) + var blueHistogramBin = [Int](repeating: 0, count: 256) let rowWidth = 256 * 4 redHistogramBin[0] = Int(data[rowWidth]) @@ -38,7 +37,7 @@ public class HistogramEqualization: OperationGroup { blueHistogramBin[dataIndex] = blueHistogramBin[dataIndex - 1] + Int(data[rowWidth + (dataIndex * 4) + 2]) } - var equalizationLookupTable = [UInt8](repeating:0, count:256 * 4) + var equalizationLookupTable = [UInt8](repeating: 0, count: 256 * 4) for binIndex in 0..<256 { equalizationLookupTable[binIndex * 4] = UInt8((((redHistogramBin[binIndex] - redHistogramBin[0]) * 255) / redHistogramBin[255])) equalizationLookupTable[(binIndex * 4) + 1] = UInt8((((greenHistogramBin[binIndex] - greenHistogramBin[0]) * 255) / greenHistogramBin[255])) @@ -46,7 +45,7 @@ public class HistogramEqualization: OperationGroup { equalizationLookupTable[(binIndex * 4) + 3] = 255 } - self.rawDataInput.uploadBytes(equalizationLookupTable, size:Size(width:256, height:1), pixelFormat:.rgba) + self.rawDataInput.uploadBytes(equalizationLookupTable, size: Size(width: 256, height: 1), pixelFormat: .rgba) } input --> self.histogram --> self.rawDataOutput diff --git a/framework/Source/Operations/HueAdjustment.swift b/framework/Source/Operations/HueAdjustment.swift index 14891889..81ad46f4 100644 --- a/framework/Source/Operations/HueAdjustment.swift +++ b/framework/Source/Operations/HueAdjustment.swift @@ -1,9 +1,9 @@ public class HueAdjustment: BasicOperation { - public var hue:Float = 90.0 { didSet { uniformSettings["hueAdjust"] = hue } } + public var hue: Float = 90.0 { didSet { uniformSettings["hueAdjust"] = hue } } public init() { - super.init(fragmentShader:HueFragmentShader, numberOfInputs:1) + super.init(fragmentShader: HueFragmentShader, numberOfInputs: 1) - ({hue = 90.0})() + ({ hue = 90.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HueBlend.swift b/framework/Source/Operations/HueBlend.swift index 33956fe6..482bac7b 100644 --- a/framework/Source/Operations/HueBlend.swift +++ b/framework/Source/Operations/HueBlend.swift @@ -1,5 +1,5 @@ public class HueBlend: BasicOperation { public init() { - super.init(fragmentShader:HueBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: HueBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ImageBuffer.swift b/framework/Source/Operations/ImageBuffer.swift index 6b5b5675..a2deb0df 100644 --- a/framework/Source/Operations/ImageBuffer.swift +++ b/framework/Source/Operations/ImageBuffer.swift @@ -1,16 +1,20 @@ public class ImageBuffer: ImageProcessingOperation { // TODO: Dynamically release framebuffers on buffer resize - public var bufferSize:UInt = 1 + public var bufferSize: UInt = 1 public var activatePassthroughOnNextFrame = true - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 public let targets = TargetContainer() public let sources = SourceContainer() var bufferedFramebuffers = [Framebuffer]() + + #if DEBUG + public var debugRenderInfo: String { "" } + #endif - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { bufferedFramebuffers.append(framebuffer) - if (bufferedFramebuffers.count > Int(bufferSize)) { + if bufferedFramebuffers.count > Int(bufferSize) { let releasedFramebuffer = bufferedFramebuffers.removeFirst() updateTargetsWithFramebuffer(releasedFramebuffer) releasedFramebuffer.unlock() @@ -23,7 +27,7 @@ public class ImageBuffer: ImageProcessingOperation { } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Buffers most likely won't need this } } diff --git a/framework/Source/Operations/KuwaharaFilter.swift b/framework/Source/Operations/KuwaharaFilter.swift index 7063961e..83725ab2 100644 --- a/framework/Source/Operations/KuwaharaFilter.swift +++ b/framework/Source/Operations/KuwaharaFilter.swift @@ -1,9 +1,9 @@ public class KuwaharaFilter: BasicOperation { - public var radius:Int = 3 { didSet { uniformSettings["radius"] = radius } } + public var radius: Int = 3 { didSet { uniformSettings["radius"] = radius } } public init() { - super.init(fragmentShader:KuwaharaFragmentShader, numberOfInputs:1) + super.init(fragmentShader: KuwaharaFragmentShader, numberOfInputs: 1) - ({radius = 3})() + ({ radius = 3 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/KuwaharaRadius3Filter.swift b/framework/Source/Operations/KuwaharaRadius3Filter.swift index 6380d541..60f3030f 100644 --- a/framework/Source/Operations/KuwaharaRadius3Filter.swift +++ b/framework/Source/Operations/KuwaharaRadius3Filter.swift @@ -1,5 +1,5 @@ public class KuwaharaRadius3Filter: BasicOperation { public init() { - super.init(fragmentShader:KuwaharaRadius3FragmentShader, numberOfInputs:1) + super.init(fragmentShader: KuwaharaRadius3FragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LanczosResampling.swift b/framework/Source/Operations/LanczosResampling.swift index fb6c3af2..609564f5 100644 --- a/framework/Source/Operations/LanczosResampling.swift +++ b/framework/Source/Operations/LanczosResampling.swift @@ -1,32 +1,32 @@ public class LanczosResampling: BasicOperation { public init() { - super.init(vertexShader:LanczosResamplingVertexShader, fragmentShader:LanczosResamplingFragmentShader) + super.init(vertexShader: LanczosResamplingVertexShader, fragmentShader: LanczosResamplingFragmentShader) } - override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { + override func internalRenderFunction(_ inputFramebuffer: Framebuffer, textureProperties: [InputTextureProperties]) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) // Shrink the vertical component of the first stage let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait) - let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:inputSize.width, height:renderFramebuffer.size.height), stencil:false) + let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: inputSize.width, height: renderFramebuffer.size.height), stencil: false) firstStageFramebuffer.lock() firstStageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - let texelSize = inputFramebuffer.initialStageTexelSize(for:outputRotation) + let texelSize = inputFramebuffer.initialStageTexelSize(for: outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) releaseIncomingFramebuffers() // Shrink the width component of the result - let secondStageTexelSize = firstStageFramebuffer.texelSize(for:.noRotation) + let secondStageTexelSize = firstStageFramebuffer.texelSize(for: .noRotation) uniformSettings["texelWidth"] = secondStageTexelSize.width uniformSettings["texelHeight"] = 0.0 renderFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) firstStageFramebuffer.unlock() } } diff --git a/framework/Source/Operations/Laplacian.swift b/framework/Source/Operations/Laplacian.swift index 0ce6ec13..18ac8703 100644 --- a/framework/Source/Operations/Laplacian.swift +++ b/framework/Source/Operations/Laplacian.swift @@ -1,5 +1,5 @@ public class Laplacian: TextureSamplingOperation { public init() { - super.init(fragmentShader:LaplacianFragmentShader) + super.init(fragmentShader: LaplacianFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LevelsAdjustment.swift b/framework/Source/Operations/LevelsAdjustment.swift index e1eaba74..8bcd17c3 100644 --- a/framework/Source/Operations/LevelsAdjustment.swift +++ b/framework/Source/Operations/LevelsAdjustment.swift @@ -1,19 +1,19 @@ public class LevelsAdjustment: BasicOperation { - public var minimum:Color = Color(red:0.0, green:0.0, blue:0.0) { didSet { uniformSettings["levelMinimum"] = minimum } } - public var middle:Color = Color(red:1.0, green:1.0, blue:1.0) { didSet { uniformSettings["levelMiddle"] = middle } } - public var maximum:Color = Color(red:1.0, green:1.0, blue:1.0) { didSet { uniformSettings["levelMaximum"] = maximum } } - public var minOutput:Color = Color(red:0.0, green:0.0, blue:0.0) { didSet { uniformSettings["minOutput"] = minOutput } } - public var maxOutput:Color = Color(red:1.0, green:1.0, blue:1.0) { didSet { uniformSettings["maxOutput"] = maxOutput } } + public var minimum = Color(red: 0.0, green: 0.0, blue: 0.0) { didSet { uniformSettings["levelMinimum"] = minimum } } + public var middle = Color(red: 1.0, green: 1.0, blue: 1.0) { didSet { uniformSettings["levelMiddle"] = middle } } + public var maximum = Color(red: 1.0, green: 1.0, blue: 1.0) { didSet { uniformSettings["levelMaximum"] = maximum } } + public var minOutput = Color(red: 0.0, green: 0.0, blue: 0.0) { didSet { uniformSettings["minOutput"] = minOutput } } + public var maxOutput = Color(red: 1.0, green: 1.0, blue: 1.0) { didSet { uniformSettings["maxOutput"] = maxOutput } } // TODO: Is this an acceptable interface, or do I need to bring this closer to the old implementation? public init() { - super.init(fragmentShader:LevelsFragmentShader, numberOfInputs:1) + super.init(fragmentShader: LevelsFragmentShader, numberOfInputs: 1) - ({minimum = Color(red:0.0, green:0.0, blue:0.0)})() - ({middle = Color(red:1.0, green:1.0, blue:1.0)})() - ({maximum = Color(red:1.0, green:1.0, blue:1.0)})() - ({minOutput = Color(red:0.0, green:0.0, blue:0.0)})() - ({maxOutput = Color(red:1.0, green:1.0, blue:1.0)})() + ({ minimum = Color(red: 0.0, green: 0.0, blue: 0.0) })() + ({ middle = Color(red: 1.0, green: 1.0, blue: 1.0) })() + ({ maximum = Color(red: 1.0, green: 1.0, blue: 1.0) })() + ({ minOutput = Color(red: 0.0, green: 0.0, blue: 0.0) })() + ({ maxOutput = Color(red: 1.0, green: 1.0, blue: 1.0) })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LightenBlend.swift b/framework/Source/Operations/LightenBlend.swift index 4f5a261f..7ee018a6 100644 --- a/framework/Source/Operations/LightenBlend.swift +++ b/framework/Source/Operations/LightenBlend.swift @@ -1,5 +1,5 @@ public class LightenBlend: BasicOperation { public init() { - super.init(fragmentShader:LightenBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: LightenBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LineGenerator.swift b/framework/Source/Operations/LineGenerator.swift index 0120a184..a9eeb9d8 100644 --- a/framework/Source/Operations/LineGenerator.swift +++ b/framework/Source/Operations/LineGenerator.swift @@ -13,44 +13,44 @@ #endif public enum Line { - case infinite(slope:Float, intercept:Float) - case segment(p1:Position, p2:Position) + case infinite(slope: Float, intercept: Float) + case segment(p1: Position, p2: Position) func toGLEndpoints() -> [GLfloat] { switch self { case .infinite(let slope, let intercept): - if (slope > 9000.0) {// Vertical line + if slope > 9000.0 {// Vertical line return [intercept, -1.0, intercept, 1.0] } else { return [-1.0, GLfloat(slope * -1.0 + intercept), 1.0, GLfloat(slope * 1.0 + intercept)] } case .segment(let p1, let p2): - return [p1.x, p1.y, p2.x, p2.y].map {GLfloat($0)} + return [p1.x, p1.y, p2.x, p2.y].map { GLfloat($0) } } } } public class LineGenerator: ImageGenerator { - public var lineColor:Color = Color.green { didSet { uniformSettings["lineColor"] = lineColor } } - public var lineWidth:Float = 1.0 { + public var lineColor = Color.green { didSet { uniformSettings["lineColor"] = lineColor } } + public var lineWidth: Float = 1.0 { didSet { lineShader.use() glLineWidth(lineWidth) } } - let lineShader:ShaderProgram + let lineShader: ShaderProgram var uniformSettings = ShaderUniformSettings() - public override init(size:Size) { - lineShader = crashOnShaderCompileFailure("LineGenerator"){try sharedImageProcessingContext.programForVertexShader(LineVertexShader, fragmentShader:LineFragmentShader)} - super.init(size:size) + public override init(size: Size) { + lineShader = crashOnShaderCompileFailure("LineGenerator") { try sharedImageProcessingContext.programForVertexShader(LineVertexShader, fragmentShader: LineFragmentShader) } + super.init(size: size) - ({lineWidth = 1.0})() - ({lineColor = Color.red})() + ({ lineWidth = 1.0 })() + ({ lineColor = Color.red })() } - public func renderLines(_ lines:[Line]) { + public func renderLines(_ lines: [Line]) { imageFramebuffer.activateFramebufferForRendering() lineShader.use() @@ -60,10 +60,9 @@ public class LineGenerator: ImageGenerator { guard let positionAttribute = lineShader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } - let lineEndpoints = lines.flatMap{$0.toGLEndpoints()} + let lineEndpoints = lines.flatMap { $0.toGLEndpoints() } glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, lineEndpoints) - enableAdditiveBlending() glDrawArrays(GLenum(GL_LINES), 0, GLsizei(lines.count) * 2) diff --git a/framework/Source/Operations/LinearBurnBlend.swift b/framework/Source/Operations/LinearBurnBlend.swift index 13a3f93f..8527b04e 100644 --- a/framework/Source/Operations/LinearBurnBlend.swift +++ b/framework/Source/Operations/LinearBurnBlend.swift @@ -1,5 +1,5 @@ public class LinearBurnBlend: BasicOperation { public init() { - super.init(fragmentShader:LinearBurnBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: LinearBurnBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LocalBinaryPattern.swift b/framework/Source/Operations/LocalBinaryPattern.swift index ea78f4d6..bb08493f 100644 --- a/framework/Source/Operations/LocalBinaryPattern.swift +++ b/framework/Source/Operations/LocalBinaryPattern.swift @@ -14,6 +14,6 @@ public class LocalBinaryPattern: TextureSamplingOperation { public init() { - super.init(fragmentShader:LocalBinaryPatternFragmentShader) + super.init(fragmentShader: LocalBinaryPatternFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LookupFilter.swift b/framework/Source/Operations/LookupFilter.swift index 1a4996c2..a0e847f9 100755 --- a/framework/Source/Operations/LookupFilter.swift +++ b/framework/Source/Operations/LookupFilter.swift @@ -1,18 +1,36 @@ // PictureInput isn't defined yet on Linux, so this operation is inoperable there #if !os(Linux) public class LookupFilter: BasicOperation { - public var intensity:Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } - public var lookupImage:PictureInput? { // TODO: Check for retain cycles in all cases here + public var intensity: Float = 1.0 { didSet { - lookupImage?.addTarget(self, atTargetIndex:1) + if intensity < 0 || intensity > 1.0 { + assertionFailure("LookupFilter intensity:\(intensity) is out of valid range [0, 1.0]") + intensity = min(max(intensity, 0), 1.0) + return + } + uniformSettings["intensity"] = intensity + } + } + public var lookupImage: PictureInput? { // TODO: Check for retain cycles in all cases here + didSet { + lookupImage?.addTarget(self, atTargetIndex: 1) + #if DEBUG + lookupImage?.printDebugRenderInfos = true + #endif lookupImage?.processImage() } } public init() { - super.init(fragmentShader:LookupFragmentShader, numberOfInputs:2) + super.init(fragmentShader: LookupFragmentShader, numberOfInputs: 2) - ({intensity = 1.0})() + ({ intensity = 1.0 })() + } +} + +extension LookupFilter: DebugPipelineNameable { + public var debugNameForPipeline: String { + "LookupFilter(\(lookupImage?.imageName ?? "null")/\(intensity))" } } #endif diff --git a/framework/Source/Operations/LowPassFilter.swift b/framework/Source/Operations/LowPassFilter.swift index a2de9ed3..1d43ae71 100644 --- a/framework/Source/Operations/LowPassFilter.swift +++ b/framework/Source/Operations/LowPassFilter.swift @@ -8,13 +8,13 @@ public class LowPassFilter: OperationGroup { super.init() buffer.bufferSize = 1 - ({strength = 0.5})() + ({ strength = 0.5 })() - self.configureGroup{input, output in + self.configureGroup {input, output in // This is needed to break the cycle on the very first pass through the blend loop self.dissolveBlend.activatePassthroughOnNextFrame = true // TODO: this may be a retain cycle input --> self.dissolveBlend --> self.buffer --> self.dissolveBlend --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Luminance.swift b/framework/Source/Operations/Luminance.swift index fa5cc730..4126cfc1 100755 --- a/framework/Source/Operations/Luminance.swift +++ b/framework/Source/Operations/Luminance.swift @@ -1,5 +1,5 @@ public class Luminance: BasicOperation { public init() { - super.init(fragmentShader:LuminanceFragmentShader, numberOfInputs:1) + super.init(fragmentShader: LuminanceFragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LuminanceRangeReduction.swift b/framework/Source/Operations/LuminanceRangeReduction.swift index db85cba3..3849ae17 100644 --- a/framework/Source/Operations/LuminanceRangeReduction.swift +++ b/framework/Source/Operations/LuminanceRangeReduction.swift @@ -1,9 +1,9 @@ public class LuminanceRangeReduction: BasicOperation { - public var rangeReductionFactor:Float = 0.6 { didSet { uniformSettings["rangeReduction"] = rangeReductionFactor } } + public var rangeReductionFactor: Float = 0.6 { didSet { uniformSettings["rangeReduction"] = rangeReductionFactor } } public init() { - super.init(fragmentShader:LuminanceRangeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: LuminanceRangeFragmentShader, numberOfInputs: 1) - ({rangeReductionFactor = 0.6})() + ({ rangeReductionFactor = 0.6 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LuminanceThreshold.swift b/framework/Source/Operations/LuminanceThreshold.swift index 5b69200e..cc2cfffa 100644 --- a/framework/Source/Operations/LuminanceThreshold.swift +++ b/framework/Source/Operations/LuminanceThreshold.swift @@ -1,9 +1,9 @@ public class LuminanceThreshold: BasicOperation { - public var threshold:Float = 0.5 { didSet { uniformSettings["threshold"] = threshold } } + public var threshold: Float = 0.5 { didSet { uniformSettings["threshold"] = threshold } } public init() { - super.init(fragmentShader:LuminanceThresholdFragmentShader, numberOfInputs:1) + super.init(fragmentShader: LuminanceThresholdFragmentShader, numberOfInputs: 1) - ({threshold = 0.5})() + ({ threshold = 0.5 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LuminosityBlend.swift b/framework/Source/Operations/LuminosityBlend.swift index d4fca389..50815d48 100644 --- a/framework/Source/Operations/LuminosityBlend.swift +++ b/framework/Source/Operations/LuminosityBlend.swift @@ -1,5 +1,5 @@ public class LuminosityBlend: BasicOperation { public init() { - super.init(fragmentShader:LuminosityBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: LuminosityBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/MedianFilter.swift b/framework/Source/Operations/MedianFilter.swift index 016371d3..1efa67fb 100644 --- a/framework/Source/Operations/MedianFilter.swift +++ b/framework/Source/Operations/MedianFilter.swift @@ -1,5 +1,5 @@ public class MedianFilter: TextureSamplingOperation { public init() { - super.init(fragmentShader:MedianFragmentShader) + super.init(fragmentShader: MedianFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/MissEtikateFilter.swift b/framework/Source/Operations/MissEtikateFilter.swift index 16d60168..ad1d96f3 100755 --- a/framework/Source/Operations/MissEtikateFilter.swift +++ b/framework/Source/Operations/MissEtikateFilter.swift @@ -10,7 +10,11 @@ public class MissEtikateFilter: LookupFilter { public override init() { super.init() - ({lookupImage = PictureInput(imageName:"lookup_miss_etikate.png")})() + do { + try ({ lookupImage = try PictureInput(imageName: "lookup_miss_etikate.png") })() + } catch { + print("ERROR: Unable to create PictureInput \(error)") + } } } #endif diff --git a/framework/Source/Operations/MonochromeFilter.swift b/framework/Source/Operations/MonochromeFilter.swift index d94d3135..71a98437 100644 --- a/framework/Source/Operations/MonochromeFilter.swift +++ b/framework/Source/Operations/MonochromeFilter.swift @@ -1,11 +1,11 @@ public class MonochromeFilter: BasicOperation { - public var intensity:Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } - public var color:Color = Color(red:0.6, green:0.45, blue:0.3, alpha:1.0) { didSet { uniformSettings["filterColor"] = color } } + public var intensity: Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } + public var color = Color(red: 0.6, green: 0.45, blue: 0.3, alpha: 1.0) { didSet { uniformSettings["filterColor"] = color } } public init() { - super.init(fragmentShader:MonochromeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: MonochromeFragmentShader, numberOfInputs: 1) - ({intensity = 1.0})() - ({color = Color(red:0.6, green:0.45, blue:0.3, alpha:1.0)})() + ({ intensity = 1.0 })() + ({ color = Color(red: 0.6, green: 0.45, blue: 0.3, alpha: 1.0) })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/MotionBlur.swift b/framework/Source/Operations/MotionBlur.swift index 6c76fe29..b5c62bcc 100755 --- a/framework/Source/Operations/MotionBlur.swift +++ b/framework/Source/Operations/MotionBlur.swift @@ -5,19 +5,19 @@ import Glibc import Foundation public class MotionBlur: BasicOperation { - public var blurSize:Float = 2.5 - public var blurAngle:Float = 0.0 + public var blurSize: Float = 2.5 + public var blurAngle: Float = 0.0 public init() { - super.init(vertexShader:MotionBlurVertexShader, fragmentShader:MotionBlurFragmentShader, numberOfInputs:1) + super.init(vertexShader: MotionBlurVertexShader, fragmentShader: MotionBlurFragmentShader, numberOfInputs: 1) } - override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let texelSize = inputFramebuffer.texelSize(for:outputRotation) + let texelSize = inputFramebuffer.texelSize(for: outputRotation) let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) - let directionalTexelStep:Position + let directionalTexelStep: Position if outputRotation.flipsDimensions() { let xOffset = blurSize * Float(sin(Double(blurAngle) * .pi / 180.0)) * aspectRatio * texelSize.width let yOffset = blurSize * Float(cos(Double(blurAngle) * .pi / 180.0)) * texelSize.width diff --git a/framework/Source/Operations/MotionDetector.swift b/framework/Source/Operations/MotionDetector.swift index 50c64030..a047e3d7 100644 --- a/framework/Source/Operations/MotionDetector.swift +++ b/framework/Source/Operations/MotionDetector.swift @@ -1,19 +1,19 @@ public class MotionDetector: OperationGroup { - public var lowPassStrength:Float = 1.0 { didSet {lowPassFilter.strength = lowPassStrength}} - public var motionDetectedCallback:((Position, Float) -> ())? + public var lowPassStrength: Float = 1.0 { didSet { lowPassFilter.strength = lowPassStrength } } + public var motionDetectedCallback: ((Position, Float) -> Void)? let lowPassFilter = LowPassFilter() - let motionComparison = BasicOperation(fragmentShader:MotionComparisonFragmentShader, numberOfInputs:2) + let motionComparison = BasicOperation(fragmentShader: MotionComparisonFragmentShader, numberOfInputs: 2) let averageColorExtractor = AverageColorExtractor() public override init() { super.init() - averageColorExtractor.extractedColorCallback = {[weak self] color in + averageColorExtractor.extractedColorCallback = { [weak self] color in self?.motionDetectedCallback?(Position(color.redComponent / color.alphaComponent, color.greenComponent / color.alphaComponent), color.alphaComponent) } - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.motionComparison --> self.averageColorExtractor --> output input --> self.lowPassFilter --> self.motionComparison } diff --git a/framework/Source/Operations/MultiplyBlend.swift b/framework/Source/Operations/MultiplyBlend.swift index 1ef94331..b8c9e8ff 100644 --- a/framework/Source/Operations/MultiplyBlend.swift +++ b/framework/Source/Operations/MultiplyBlend.swift @@ -1,5 +1,5 @@ public class MultiplyBlend: BasicOperation { public init() { - super.init(fragmentShader:MultiplyBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: MultiplyBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/NobleCornerDetector.swift b/framework/Source/Operations/NobleCornerDetector.swift index 473779f9..d1f63ced 100644 --- a/framework/Source/Operations/NobleCornerDetector.swift +++ b/framework/Source/Operations/NobleCornerDetector.swift @@ -6,6 +6,6 @@ public class NobleCornerDetector: HarrisCornerDetector { public init() { - super.init(fragmentShader:NobleCornerDetectorFragmentShader) + super.init(fragmentShader: NobleCornerDetectorFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/NormalBlend.swift b/framework/Source/Operations/NormalBlend.swift index 9d483917..72efef54 100644 --- a/framework/Source/Operations/NormalBlend.swift +++ b/framework/Source/Operations/NormalBlend.swift @@ -1,5 +1,5 @@ public class NormalBlend: BasicOperation { public init() { - super.init(fragmentShader:NormalBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: NormalBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/OpacityAdjustment.swift b/framework/Source/Operations/OpacityAdjustment.swift index 3637d8a5..1da2ce80 100644 --- a/framework/Source/Operations/OpacityAdjustment.swift +++ b/framework/Source/Operations/OpacityAdjustment.swift @@ -1,9 +1,9 @@ public class OpacityAdjustment: BasicOperation { - public var opacity:Float = 0.0 { didSet { uniformSettings["opacity"] = opacity } } + public var opacity: Float = 0.0 { didSet { uniformSettings["opacity"] = opacity } } public init() { - super.init(fragmentShader:OpacityFragmentShader, numberOfInputs:1) + super.init(fragmentShader: OpacityFragmentShader, numberOfInputs: 1) - ({opacity = 0.0})() + ({ opacity = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/OpeningFilter.swift b/framework/Source/Operations/OpeningFilter.swift index 65aec8ae..a8fbde72 100644 --- a/framework/Source/Operations/OpeningFilter.swift +++ b/framework/Source/Operations/OpeningFilter.swift @@ -1,5 +1,5 @@ public class OpeningFilter: OperationGroup { - public var radius:UInt { + public var radius: UInt { didSet { erosion.radius = radius dilation.radius = radius @@ -12,8 +12,8 @@ public class OpeningFilter: OperationGroup { radius = 1 super.init() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.erosion --> self.dilation --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/OverlayBlend.swift b/framework/Source/Operations/OverlayBlend.swift index 460722d9..6387ebe4 100644 --- a/framework/Source/Operations/OverlayBlend.swift +++ b/framework/Source/Operations/OverlayBlend.swift @@ -1,5 +1,5 @@ public class OverlayBlend: BasicOperation { public init() { - super.init(fragmentShader:OverlayBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: OverlayBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/PinchDistortion.swift b/framework/Source/Operations/PinchDistortion.swift index 4ab7c4b5..05ea47d3 100644 --- a/framework/Source/Operations/PinchDistortion.swift +++ b/framework/Source/Operations/PinchDistortion.swift @@ -1,13 +1,13 @@ public class PinchDistortion: BasicOperation { - public var radius:Float = 1.0 { didSet { uniformSettings["radius"] = radius } } - public var scale:Float = 0.5 { didSet { uniformSettings["scale"] = scale } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var radius: Float = 1.0 { didSet { uniformSettings["radius"] = radius } } + public var scale: Float = 0.5 { didSet { uniformSettings["scale"] = scale } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:PinchDistortionFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PinchDistortionFragmentShader, numberOfInputs: 1) - ({radius = 1.0})() - ({scale = 0.5})() - ({center = Position.center})() + ({ radius = 1.0 })() + ({ scale = 0.5 })() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/Pixellate.swift b/framework/Source/Operations/Pixellate.swift index da283de5..34c3ce64 100644 --- a/framework/Source/Operations/Pixellate.swift +++ b/framework/Source/Operations/Pixellate.swift @@ -1,5 +1,5 @@ public class Pixellate: BasicOperation { - public var fractionalWidthOfAPixel:Float = 0.01 { + public var fractionalWidthOfAPixel: Float = 0.01 { didSet { let imageWidth = 1.0 / Float(self.renderFramebuffer?.size.width ?? 2048) uniformSettings["fractionalWidthOfPixel"] = max(fractionalWidthOfAPixel, imageWidth) @@ -7,8 +7,8 @@ public class Pixellate: BasicOperation { } public init() { - super.init(fragmentShader:PixellateFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PixellateFragmentShader, numberOfInputs: 1) - ({fractionalWidthOfAPixel = 0.01})() + ({ fractionalWidthOfAPixel = 0.01 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/PolarPixellate.swift b/framework/Source/Operations/PolarPixellate.swift index 230da53a..dfcc1adc 100644 --- a/framework/Source/Operations/PolarPixellate.swift +++ b/framework/Source/Operations/PolarPixellate.swift @@ -1,11 +1,11 @@ public class PolarPixellate: BasicOperation { - public var pixelSize:Size = Size(width:0.05, height:0.05) { didSet { uniformSettings["pixelSize"] = pixelSize } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var pixelSize = Size(width: 0.05, height: 0.05) { didSet { uniformSettings["pixelSize"] = pixelSize } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:PolarPixellateFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PolarPixellateFragmentShader, numberOfInputs: 1) - ({pixelSize = Size(width:0.05, height:0.05)})() - ({center = Position.center})() + ({ pixelSize = Size(width: 0.05, height: 0.05) })() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/PolkaDot.swift b/framework/Source/Operations/PolkaDot.swift index 4f020d24..ecae50c7 100644 --- a/framework/Source/Operations/PolkaDot.swift +++ b/framework/Source/Operations/PolkaDot.swift @@ -1,6 +1,6 @@ public class PolkaDot: BasicOperation { - public var dotScaling:Float = 0.90 { didSet { uniformSettings["dotScaling"] = dotScaling } } - public var fractionalWidthOfAPixel:Float = 0.01 { + public var dotScaling: Float = 0.90 { didSet { uniformSettings["dotScaling"] = dotScaling } } + public var fractionalWidthOfAPixel: Float = 0.01 { didSet { let imageWidth = 1.0 / Float(self.renderFramebuffer?.size.width ?? 2048) uniformSettings["fractionalWidthOfPixel"] = max(fractionalWidthOfAPixel, imageWidth) @@ -8,9 +8,9 @@ public class PolkaDot: BasicOperation { } public init() { - super.init(fragmentShader:PolkaDotFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PolkaDotFragmentShader, numberOfInputs: 1) - ({fractionalWidthOfAPixel = 0.01})() - ({dotScaling = 0.90})() + ({ fractionalWidthOfAPixel = 0.01 })() + ({ dotScaling = 0.90 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Posterize.swift b/framework/Source/Operations/Posterize.swift index 501b333f..998b62b9 100644 --- a/framework/Source/Operations/Posterize.swift +++ b/framework/Source/Operations/Posterize.swift @@ -1,9 +1,9 @@ public class Posterize: BasicOperation { - public var colorLevels:Float = 10.0 { didSet { uniformSettings["colorLevels"] = colorLevels } } + public var colorLevels: Float = 10.0 { didSet { uniformSettings["colorLevels"] = colorLevels } } public init() { - super.init(fragmentShader:PosterizeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PosterizeFragmentShader, numberOfInputs: 1) - ({colorLevels = 10.0})() + ({ colorLevels = 10.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/PrewittEdgeDetection.swift b/framework/Source/Operations/PrewittEdgeDetection.swift index f42220c6..a859cf9d 100644 --- a/framework/Source/Operations/PrewittEdgeDetection.swift +++ b/framework/Source/Operations/PrewittEdgeDetection.swift @@ -1,9 +1,9 @@ public class PrewittEdgeDetection: TextureSamplingOperation { - public var edgeStrength:Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } + public var edgeStrength: Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } public init() { - super.init(fragmentShader:PrewittEdgeDetectionFragmentShader) + super.init(fragmentShader: PrewittEdgeDetectionFragmentShader) - ({edgeStrength = 1.0})() + ({ edgeStrength = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/RGBAdjustmentFilter.swift b/framework/Source/Operations/RGBAdjustmentFilter.swift index a437a07d..18c3b94b 100644 --- a/framework/Source/Operations/RGBAdjustmentFilter.swift +++ b/framework/Source/Operations/RGBAdjustmentFilter.swift @@ -1,13 +1,13 @@ public class RGBAdjustment: BasicOperation { - public var red:Float = 1.0 { didSet { uniformSettings["redAdjustment"] = red } } - public var blue:Float = 1.0 { didSet { uniformSettings["blueAdjustment"] = blue } } - public var green:Float = 1.0 { didSet { uniformSettings["greenAdjustment"] = green } } + public var red: Float = 1.0 { didSet { uniformSettings["redAdjustment"] = red } } + public var blue: Float = 1.0 { didSet { uniformSettings["blueAdjustment"] = blue } } + public var green: Float = 1.0 { didSet { uniformSettings["greenAdjustment"] = green } } public init() { - super.init(fragmentShader:RGBAdjustmentFragmentShader, numberOfInputs:1) + super.init(fragmentShader: RGBAdjustmentFragmentShader, numberOfInputs: 1) - ({red = 1.0})() - ({blue = 1.0})() - ({green = 1.0})() + ({ red = 1.0 })() + ({ blue = 1.0 })() + ({ green = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ResizeCrop.swift b/framework/Source/Operations/ResizeCrop.swift new file mode 100644 index 00000000..7cc945da --- /dev/null +++ b/framework/Source/Operations/ResizeCrop.swift @@ -0,0 +1,97 @@ +public struct ResizeOutputInfo { + // size in pixel + let finalCropSize: Size + // normalized size within [0, 1] of inputSize + let normalizedCropSize: Size + // normalized offset to [0, 1] of inputSize + let normalizedOffsetFromOrigin: Position +} + +public func limitedSizeAndRatio(of inputSize: Size, to maxSize: Size) -> ResizeOutputInfo { + // Aspect fit maxSize to inputSize to get normalized size and offset + let aspectFitRatio = min(inputSize.width / maxSize.width, inputSize.height / maxSize.height) + let cropSizeInInput = Size(width: maxSize.width * aspectFitRatio, height: maxSize.height * aspectFitRatio) + let normalizedCropSize = Size(width: cropSizeInInput.width / inputSize.width, height: cropSizeInInput.height / inputSize.height) + let normalizedOffsetFromOrigin = Position((inputSize.width - cropSizeInInput.width) / 2 / inputSize.width, + (inputSize.height - cropSizeInInput.height) / 2 / inputSize.height) + + let finalCropSize: Size + if inputSize.width < maxSize.width && inputSize.height < maxSize.height { + // inputSize is smaller, use cropSizeInInput as finalCropSize + finalCropSize = cropSizeInInput + } else { + // inputSize is larger, use maxSize as finalCropSize + finalCropSize = maxSize + } + return ResizeOutputInfo(finalCropSize: finalCropSize, normalizedCropSize: normalizedCropSize, normalizedOffsetFromOrigin: normalizedOffsetFromOrigin) +} + +public func calculateResizeOutput(inputSize: Size, outputSize: Size?, scaleOutputSizeToFill: Bool) -> ResizeOutputInfo { + let finalCropSize: Size + let normalizedCropSize: Size + let normalizedOffsetFromOrigin: Position + + if let outputSize = outputSize { + if scaleOutputSizeToFill { + // finalCropSize won't be resized + let ratioW = outputSize.width / inputSize.width + let ratioH = outputSize.height / inputSize.height + if ratioW > ratioH { + finalCropSize = Size(width: inputSize.width, height: inputSize.width * (outputSize.height / outputSize.width)) + } else { + finalCropSize = Size(width: inputSize.height * (outputSize.width / outputSize.height), height: inputSize.height) + } + } else { + // finalCropSize might be resized + finalCropSize = outputSize + } + + // Scale finalCropSize to inputSize to crop original content + let aspectFitRatioToOrigin = min(inputSize.width / finalCropSize.width, inputSize.height / finalCropSize.height) + let cropSizeInOrigin = Size(width: finalCropSize.width * aspectFitRatioToOrigin, height: finalCropSize.height * aspectFitRatioToOrigin) + normalizedCropSize = Size(width: cropSizeInOrigin.width / inputSize.width, height: cropSizeInOrigin.height / inputSize.height) + normalizedOffsetFromOrigin = Position((inputSize.width - cropSizeInOrigin.width) / 2 / inputSize.width, + (inputSize.height - cropSizeInOrigin.height) / 2 / inputSize.height) + } else { + finalCropSize = inputSize + normalizedOffsetFromOrigin = Position.zero + normalizedCropSize = Size(width: 1, height: 1) + } + + return ResizeOutputInfo(finalCropSize: finalCropSize, normalizedCropSize: normalizedCropSize, normalizedOffsetFromOrigin: normalizedOffsetFromOrigin) +} + +open class ResizeCrop: BasicOperation { + public var useCropSizeAsFinal = false + public var cropSizeInPixels: Size? + + public init() { + super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1) + } + + override open func renderFrame() { + let inputFramebuffer: Framebuffer = inputFramebuffers[0]! + let inputGLSize = inputFramebuffer.sizeForTargetOrientation(.portrait) + let inputSize = Size(inputGLSize) + + let resizeOutputInfo = calculateResizeOutput(inputSize: inputSize, outputSize: cropSizeInPixels, scaleOutputSizeToFill: !useCropSizeAsFinal) + + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties( + orientation: .portrait, + size: GLSize(resizeOutputInfo.finalCropSize), + stencil: false) + + let textureProperties = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin: resizeOutputInfo.normalizedOffsetFromOrigin, cropSize: resizeOutputInfo.normalizedCropSize), texture: inputFramebuffer.texture) + + renderFramebuffer.activateFramebufferForRendering() + clearFramebufferWithColor(backgroundColor) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [textureProperties]) + releaseIncomingFramebuffers() + } +} + +extension GLSize { + var gpuSize: Size { + return Size(width: Float(width), height: Float(height)) + } +} diff --git a/framework/Source/Operations/SaturationAdjustment.swift b/framework/Source/Operations/SaturationAdjustment.swift index 147867fc..1c30eacc 100755 --- a/framework/Source/Operations/SaturationAdjustment.swift +++ b/framework/Source/Operations/SaturationAdjustment.swift @@ -1,9 +1,9 @@ public class SaturationAdjustment: BasicOperation { - public var saturation:Float = 1.0 { didSet { uniformSettings["saturation"] = saturation } } + public var saturation: Float = 1.0 { didSet { uniformSettings["saturation"] = saturation } } public init() { - super.init(fragmentShader:SaturationFragmentShader, numberOfInputs:1) + super.init(fragmentShader: SaturationFragmentShader, numberOfInputs: 1) - ({saturation = 1.0})() + ({ saturation = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SaturationBlend.swift b/framework/Source/Operations/SaturationBlend.swift index df826917..b6d491de 100644 --- a/framework/Source/Operations/SaturationBlend.swift +++ b/framework/Source/Operations/SaturationBlend.swift @@ -1,5 +1,5 @@ public class SaturationBlend: BasicOperation { public init() { - super.init(fragmentShader:SaturationBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: SaturationBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ScreenBlend.swift b/framework/Source/Operations/ScreenBlend.swift index 050dbcf4..b892fb9f 100644 --- a/framework/Source/Operations/ScreenBlend.swift +++ b/framework/Source/Operations/ScreenBlend.swift @@ -1,5 +1,5 @@ public class ScreenBlend: BasicOperation { public init() { - super.init(fragmentShader:ScreenBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ScreenBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SepiaToneFilter.swift b/framework/Source/Operations/SepiaToneFilter.swift index bf30137d..404698f1 100644 --- a/framework/Source/Operations/SepiaToneFilter.swift +++ b/framework/Source/Operations/SepiaToneFilter.swift @@ -2,9 +2,9 @@ public class SepiaToneFilter: ColorMatrixFilter { override public init() { super.init() - ({colorMatrix = Matrix4x4(rowMajorValues:[0.3588, 0.7044, 0.1368, 0.0, + ({colorMatrix = Matrix4x4(rowMajorValues: [0.3588, 0.7044, 0.1368, 0.0, 0.2990, 0.5870, 0.1140, 0.0, - 0.2392, 0.4696, 0.0912 ,0.0, + 0.2392, 0.4696, 0.0912, 0.0, 0.0, 0.0, 0.0, 1.0])})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Sharpen.swift b/framework/Source/Operations/Sharpen.swift index 3ba518dc..24acc6bf 100644 --- a/framework/Source/Operations/Sharpen.swift +++ b/framework/Source/Operations/Sharpen.swift @@ -1,16 +1,16 @@ public class Sharpen: BasicOperation { - public var sharpness:Float = 0.0 { didSet { uniformSettings["sharpness"] = sharpness } } - public var overriddenTexelSize:Size? + public var sharpness: Float = 0.0 { didSet { uniformSettings["sharpness"] = sharpness } } + public var overriddenTexelSize: Size? public init() { - super.init(vertexShader:SharpenVertexShader, fragmentShader:SharpenFragmentShader, numberOfInputs:1) + super.init(vertexShader: SharpenVertexShader, fragmentShader: SharpenFragmentShader, numberOfInputs: 1) - ({sharpness = 0.0})() + ({ sharpness = 0.0 })() } - override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for:outputRotation) + let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for: outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height } diff --git a/framework/Source/Operations/ShiTomasiFeatureDetector.swift b/framework/Source/Operations/ShiTomasiFeatureDetector.swift index 4e4c33b9..f8aff136 100644 --- a/framework/Source/Operations/ShiTomasiFeatureDetector.swift +++ b/framework/Source/Operations/ShiTomasiFeatureDetector.swift @@ -6,6 +6,6 @@ public class ShiTomasiFeatureDetector: HarrisCornerDetector { public init() { - super.init(fragmentShader:ShiTomasiFeatureDetectorFragmentShader) + super.init(fragmentShader: ShiTomasiFeatureDetectorFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SingleComponentGaussianBlur.swift b/framework/Source/Operations/SingleComponentGaussianBlur.swift index 919210ef..33c94c05 100644 --- a/framework/Source/Operations/SingleComponentGaussianBlur.swift +++ b/framework/Source/Operations/SingleComponentGaussianBlur.swift @@ -1,11 +1,11 @@ public class SingleComponentGaussianBlur: TwoStageOperation { - public var blurRadiusInPixels:Float { + public var blurRadiusInPixels: Float { didSet { - let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: overrideDownsamplingOptimization) sharedImageProcessingContext.runOperationAsynchronously { self.downsamplingFactor = downsamplingFactor let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)), fragmentShader:fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)))} + self.shader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma)), fragmentShader: fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma))) } } } } @@ -13,16 +13,16 @@ public class SingleComponentGaussianBlur: TwoStageOperation { public init() { blurRadiusInPixels = 2.0 let pixelRadius = pixelRadiusForBlurSigma(Double(blurRadiusInPixels)) - let initialShader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:2.0), fragmentShader:fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma:2.0))} - super.init(shader:initialShader, numberOfInputs:1) + let initialShader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: 2.0), fragmentShader: fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma: 2.0)) } + super.init(shader: initialShader, numberOfInputs: 1) } } -func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { - guard (radius > 0) else { return PassthroughFragmentShader } +func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(_ radius: UInt, sigma: Double) -> String { + guard radius > 0 else { return PassthroughFragmentShader } - let standardWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) + let standardWeights = standardGaussianWeightsForRadius(radius, sigma: sigma) let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) let trueNumberOfOptimizedOffsets = radius / 2 + (radius % 2) @@ -45,7 +45,7 @@ func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(_ radius:UInt } // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader - if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets) { + if trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets { #if GLES shaderString += "highp vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" #else @@ -54,8 +54,8 @@ func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(_ radius:UInt } for currentOverlowTextureRead in numberOfOptimizedOffsets.. self.gaussianBlur --> self.toonFilter --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SobelEdgeDetection.swift b/framework/Source/Operations/SobelEdgeDetection.swift index 161561f2..1948c1ca 100644 --- a/framework/Source/Operations/SobelEdgeDetection.swift +++ b/framework/Source/Operations/SobelEdgeDetection.swift @@ -1,9 +1,9 @@ public class SobelEdgeDetection: TextureSamplingOperation { - public var edgeStrength:Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } + public var edgeStrength: Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } public init() { - super.init(fragmentShader:SobelEdgeDetectionFragmentShader) + super.init(fragmentShader: SobelEdgeDetectionFragmentShader) - ({edgeStrength = 1.0})() + ({ edgeStrength = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SoftElegance.swift b/framework/Source/Operations/SoftElegance.swift index 50e85ef3..584ec786 100755 --- a/framework/Source/Operations/SoftElegance.swift +++ b/framework/Source/Operations/SoftElegance.swift @@ -8,9 +8,13 @@ public class SoftElegance: OperationGroup { public override init() { super.init() - self.configureGroup{input, output in - self.lookup1.lookupImage = PictureInput(imageName:"lookup_soft_elegance_1.png") - self.lookup2.lookupImage = PictureInput(imageName:"lookup_soft_elegance_2.png") + self.configureGroup {input, output in + do { + self.lookup1.lookupImage = try PictureInput(imageName: "lookup_soft_elegance_1.png") + self.lookup2.lookupImage = try PictureInput(imageName: "lookup_soft_elegance_2.png") + } catch { + print("ERROR: Unable to create PictureInput \(error)") + } self.gaussianBlur.blurRadiusInPixels = 10.0 self.alphaBlend.mix = 0.14 diff --git a/framework/Source/Operations/SoftLightBlend.swift b/framework/Source/Operations/SoftLightBlend.swift index 365f0587..e33c4a7b 100644 --- a/framework/Source/Operations/SoftLightBlend.swift +++ b/framework/Source/Operations/SoftLightBlend.swift @@ -1,5 +1,5 @@ public class SoftLightBlend: BasicOperation { public init() { - super.init(fragmentShader:SoftLightBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: SoftLightBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Solarize.swift b/framework/Source/Operations/Solarize.swift index 94a0999d..773c268d 100644 --- a/framework/Source/Operations/Solarize.swift +++ b/framework/Source/Operations/Solarize.swift @@ -1,9 +1,9 @@ public class Solarize: BasicOperation { - public var threshold:Float = 0.5 { didSet { uniformSettings["threshold"] = threshold } } + public var threshold: Float = 0.5 { didSet { uniformSettings["threshold"] = threshold } } public init() { - super.init(fragmentShader:SolarizeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: SolarizeFragmentShader, numberOfInputs: 1) - ({threshold = 0.5})() + ({ threshold = 0.5 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SolidColorGenerator.swift b/framework/Source/Operations/SolidColorGenerator.swift index 67facd28..c9a55c34 100644 --- a/framework/Source/Operations/SolidColorGenerator.swift +++ b/framework/Source/Operations/SolidColorGenerator.swift @@ -1,6 +1,5 @@ public class SolidColorGenerator: ImageGenerator { - - public func renderColor(_ color:Color) { + public func renderColor(_ color: Color) { imageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(color) diff --git a/framework/Source/Operations/SourceOverBlend.swift b/framework/Source/Operations/SourceOverBlend.swift index 636b8f87..e7c0945b 100644 --- a/framework/Source/Operations/SourceOverBlend.swift +++ b/framework/Source/Operations/SourceOverBlend.swift @@ -1,5 +1,5 @@ public class SourceOverBlend: BasicOperation { public init() { - super.init(fragmentShader:SourceOverBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: SourceOverBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SphereRefraction.swift b/framework/Source/Operations/SphereRefraction.swift index 617e142b..9d0dffd4 100644 --- a/framework/Source/Operations/SphereRefraction.swift +++ b/framework/Source/Operations/SphereRefraction.swift @@ -1,15 +1,15 @@ public class SphereRefraction: BasicOperation { - public var radius:Float = 0.25 { didSet { uniformSettings["radius"] = radius } } - public var refractiveIndex:Float = 0.71 { didSet { uniformSettings["refractiveIndex"] = refractiveIndex } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var radius: Float = 0.25 { didSet { uniformSettings["radius"] = radius } } + public var refractiveIndex: Float = 0.71 { didSet { uniformSettings["refractiveIndex"] = refractiveIndex } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:SphereRefractionFragmentShader, numberOfInputs:1) + super.init(fragmentShader: SphereRefractionFragmentShader, numberOfInputs: 1) - ({radius = 0.25})() - ({refractiveIndex = 0.71})() - ({center = Position.center})() + ({ radius = 0.25 })() + ({ refractiveIndex = 0.71 })() + ({ center = Position.center })() - self.backgroundColor = Color(red:0.0, green:0.0, blue:0.0, alpha:0.0) + self.backgroundColor = Color(red: 0.0, green: 0.0, blue: 0.0, alpha: 0.0) } } diff --git a/framework/Source/Operations/StretchDistortion.swift b/framework/Source/Operations/StretchDistortion.swift index 4e3a4a93..d61a1bee 100644 --- a/framework/Source/Operations/StretchDistortion.swift +++ b/framework/Source/Operations/StretchDistortion.swift @@ -1,9 +1,9 @@ public class StretchDistortion: BasicOperation { - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:StretchDistortionFragmentShader, numberOfInputs:1) + super.init(fragmentShader: StretchDistortionFragmentShader, numberOfInputs: 1) - ({center = Position.center})() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/SubtractBlend.swift b/framework/Source/Operations/SubtractBlend.swift index 556a4e1f..7adac75d 100644 --- a/framework/Source/Operations/SubtractBlend.swift +++ b/framework/Source/Operations/SubtractBlend.swift @@ -1,5 +1,5 @@ public class SubtractBlend: BasicOperation { public init() { - super.init(fragmentShader:SubtractBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: SubtractBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SwirlDistortion.swift b/framework/Source/Operations/SwirlDistortion.swift index aea785a9..f9c5ae66 100644 --- a/framework/Source/Operations/SwirlDistortion.swift +++ b/framework/Source/Operations/SwirlDistortion.swift @@ -1,13 +1,13 @@ public class SwirlDistortion: BasicOperation { - public var radius:Float = 0.5 { didSet { uniformSettings["radius"] = radius } } - public var angle:Float = 1.0 { didSet { uniformSettings["angle"] = angle } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var radius: Float = 0.5 { didSet { uniformSettings["radius"] = radius } } + public var angle: Float = 1.0 { didSet { uniformSettings["angle"] = angle } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:SwirlFragmentShader, numberOfInputs:1) + super.init(fragmentShader: SwirlFragmentShader, numberOfInputs: 1) - ({radius = 0.5})() - ({angle = 1.0})() - ({center = Position.center})() + ({ radius = 0.5 })() + ({ angle = 1.0 })() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/ThresholdSketch.swift b/framework/Source/Operations/ThresholdSketch.swift index 0df3afe7..fff109c1 100644 --- a/framework/Source/Operations/ThresholdSketch.swift +++ b/framework/Source/Operations/ThresholdSketch.swift @@ -1,11 +1,11 @@ public class ThresholdSketchFilter: TextureSamplingOperation { - public var edgeStrength:Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } - public var threshold:Float = 0.25 { didSet { uniformSettings["threshold"] = threshold } } + public var edgeStrength: Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } + public var threshold: Float = 0.25 { didSet { uniformSettings["threshold"] = threshold } } public init() { - super.init(fragmentShader:ThresholdSketchFragmentShader) + super.init(fragmentShader: ThresholdSketchFragmentShader) - ({edgeStrength = 1.0})() - ({threshold = 0.25})() + ({ edgeStrength = 1.0 })() + ({ threshold = 0.25 })() } } diff --git a/framework/Source/Operations/ThresholdSobelEdgeDetection.swift b/framework/Source/Operations/ThresholdSobelEdgeDetection.swift index 7225b4c9..f16462e6 100644 --- a/framework/Source/Operations/ThresholdSobelEdgeDetection.swift +++ b/framework/Source/Operations/ThresholdSobelEdgeDetection.swift @@ -1,11 +1,11 @@ public class ThresholdSobelEdgeDetection: TextureSamplingOperation { - public var edgeStrength:Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } - public var threshold:Float = 0.25 { didSet { uniformSettings["threshold"] = threshold } } + public var edgeStrength: Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } + public var threshold: Float = 0.25 { didSet { uniformSettings["threshold"] = threshold } } public init() { - super.init(fragmentShader:ThresholdEdgeDetectionFragmentShader) + super.init(fragmentShader: ThresholdEdgeDetectionFragmentShader) - ({edgeStrength = 1.0})() - ({threshold = 0.25})() + ({ edgeStrength = 1.0 })() + ({ threshold = 0.25 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/TiltShift.swift b/framework/Source/Operations/TiltShift.swift index 966d7c74..0db0b216 100644 --- a/framework/Source/Operations/TiltShift.swift +++ b/framework/Source/Operations/TiltShift.swift @@ -1,23 +1,23 @@ public class TiltShift: OperationGroup { - public var blurRadiusInPixels:Float = 7.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } - public var topFocusLevel:Float = 0.4 { didSet { tiltShift.uniformSettings["topFocusLevel"] = topFocusLevel } } - public var bottomFocusLevel:Float = 0.6 { didSet { tiltShift.uniformSettings["bottomFocusLevel"] = bottomFocusLevel } } - public var focusFallOffRate:Float = 0.2 { didSet { tiltShift.uniformSettings["focusFallOffRate"] = focusFallOffRate } } + public var blurRadiusInPixels: Float = 7.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var topFocusLevel: Float = 0.4 { didSet { tiltShift.uniformSettings["topFocusLevel"] = topFocusLevel } } + public var bottomFocusLevel: Float = 0.6 { didSet { tiltShift.uniformSettings["bottomFocusLevel"] = bottomFocusLevel } } + public var focusFallOffRate: Float = 0.2 { didSet { tiltShift.uniformSettings["focusFallOffRate"] = focusFallOffRate } } let gaussianBlur = GaussianBlur() - let tiltShift = BasicOperation(fragmentShader:TiltShiftFragmentShader, numberOfInputs:2) + let tiltShift = BasicOperation(fragmentShader: TiltShiftFragmentShader, numberOfInputs: 2) public override init() { super.init() - ({blurRadiusInPixels = 7.0})() - ({topFocusLevel = 0.4})() - ({bottomFocusLevel = 0.6})() - ({focusFallOffRate = 0.2})() + ({ blurRadiusInPixels = 7.0 })() + ({ topFocusLevel = 0.4 })() + ({ bottomFocusLevel = 0.6 })() + ({ focusFallOffRate = 0.2 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.tiltShift --> output input --> self.gaussianBlur --> self.tiltShift } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ToonFilter.swift b/framework/Source/Operations/ToonFilter.swift index 091067dd..c5a0a9a2 100644 --- a/framework/Source/Operations/ToonFilter.swift +++ b/framework/Source/Operations/ToonFilter.swift @@ -1,11 +1,11 @@ public class ToonFilter: TextureSamplingOperation { - public var threshold:Float = 0.2 { didSet { uniformSettings["threshold"] = threshold } } - public var quantizationLevels:Float = 10.0 { didSet { uniformSettings["quantizationLevels"] = quantizationLevels } } + public var threshold: Float = 0.2 { didSet { uniformSettings["threshold"] = threshold } } + public var quantizationLevels: Float = 10.0 { didSet { uniformSettings["quantizationLevels"] = quantizationLevels } } public init() { - super.init(fragmentShader:ToonFragmentShader) + super.init(fragmentShader: ToonFragmentShader) - ({threshold = 0.2})() - ({quantizationLevels = 10.0})() + ({ threshold = 0.2 })() + ({ quantizationLevels = 10.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/TransformOperation.swift b/framework/Source/Operations/TransformOperation.swift index 6b87a377..b3a58cc0 100644 --- a/framework/Source/Operations/TransformOperation.swift +++ b/framework/Source/Operations/TransformOperation.swift @@ -12,31 +12,41 @@ #endif #endif -public class TransformOperation: BasicOperation { - public var transform:Matrix4x4 = Matrix4x4.identity { didSet { uniformSettings["transformMatrix"] = transform } } - var normalizedImageVertices:[GLfloat]! +open class TransformOperation: BasicOperation { + public var transform = Matrix4x4.identity { didSet { uniformSettings["transformMatrix"] = transform } } + public var anchorTopLeft = false + public var ignoreAspectRatio = false + var normalizedImageVertices: [GLfloat]! public init() { - super.init(vertexShader:TransformVertexShader, fragmentShader:PassthroughFragmentShader, numberOfInputs:1) + super.init(vertexShader: TransformVertexShader, fragmentShader: PassthroughFragmentShader, numberOfInputs: 1) - ({transform = Matrix4x4.identity})() + ({ transform = Matrix4x4.identity })() } - override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:normalizedImageVertices, inputTextures:textureProperties) + override func internalRenderFunction(_ inputFramebuffer: Framebuffer, textureProperties: [InputTextureProperties]) { + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertices: normalizedImageVertices, inputTextures: textureProperties) releaseIncomingFramebuffers() } - override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) - let orthoMatrix = orthographicMatrix(-1.0, right:1.0, bottom:-1.0 * aspectRatio, top:1.0 * aspectRatio, near:-1.0, far:1.0) + var aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) + if ignoreAspectRatio { + aspectRatio = 1 + } + let orthoMatrix = orthographicMatrix(-1.0, right: 1.0, bottom: -1.0 * aspectRatio, top: 1.0 * aspectRatio, near: -1.0, far: 1.0, anchorTopLeft: anchorTopLeft) normalizedImageVertices = normalizedImageVerticesForAspectRatio(aspectRatio) uniformSettings["orthographicMatrix"] = orthoMatrix } -} - -func normalizedImageVerticesForAspectRatio(_ aspectRatio:Float) -> [GLfloat] { - return [-1.0, GLfloat(-aspectRatio), 1.0, GLfloat(-aspectRatio), -1.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] + + func normalizedImageVerticesForAspectRatio(_ aspectRatio: Float) -> [GLfloat] { + // [TopLeft.x, TopLeft.y, TopRight.x, TopRight.y, BottomLeft.x, BottomLeft.y, BottomRight.x, BottomRight.y] + if anchorTopLeft { + return [0.0, 0.0, 1.0, 0.0, 0.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] + } else { + return [-1.0, GLfloat(-aspectRatio), 1.0, GLfloat(-aspectRatio), -1.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] + } + } } diff --git a/framework/Source/Operations/UnsharpMask.swift b/framework/Source/Operations/UnsharpMask.swift index ecda7dcf..2d1793aa 100644 --- a/framework/Source/Operations/UnsharpMask.swift +++ b/framework/Source/Operations/UnsharpMask.swift @@ -3,17 +3,17 @@ public class UnsharpMask: OperationGroup { public var intensity: Float = 1.0 { didSet { unsharpMask.uniformSettings["intensity"] = intensity } } let gaussianBlur = GaussianBlur() - let unsharpMask = BasicOperation(fragmentShader:UnsharpMaskFragmentShader, numberOfInputs:2) + let unsharpMask = BasicOperation(fragmentShader: UnsharpMaskFragmentShader, numberOfInputs: 2) public override init() { blurRadiusInPixels = 4.0 super.init() - ({intensity = 1.0})() + ({ intensity = 1.0 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.unsharpMask input --> self.gaussianBlur --> self.unsharpMask --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Vibrance.swift b/framework/Source/Operations/Vibrance.swift index 8b7671d9..13ead009 100644 --- a/framework/Source/Operations/Vibrance.swift +++ b/framework/Source/Operations/Vibrance.swift @@ -1,9 +1,9 @@ public class Vibrance: BasicOperation { - public var vibrance:Float = 0.0 { didSet { uniformSettings["vibrance"] = vibrance } } + public var vibrance: Float = 0.0 { didSet { uniformSettings["vibrance"] = vibrance } } public init() { - super.init(fragmentShader:VibranceFragmentShader, numberOfInputs:1) + super.init(fragmentShader: VibranceFragmentShader, numberOfInputs: 1) - ({vibrance = 0.0})() + ({ vibrance = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Vignette.swift b/framework/Source/Operations/Vignette.swift index c027a544..1b2afa11 100644 --- a/framework/Source/Operations/Vignette.swift +++ b/framework/Source/Operations/Vignette.swift @@ -1,15 +1,15 @@ public class Vignette: BasicOperation { - public var center:Position = Position.center { didSet { uniformSettings["vignetteCenter"] = center } } - public var color:Color = Color.black { didSet { uniformSettings["vignetteColor"] = color } } - public var start:Float = 0.3 { didSet { uniformSettings["vignetteStart"] = start } } - public var end:Float = 0.75 { didSet { uniformSettings["vignetteEnd"] = end } } + public var center = Position.center { didSet { uniformSettings["vignetteCenter"] = center } } + public var color = Color.black { didSet { uniformSettings["vignetteColor"] = color } } + public var start: Float = 0.3 { didSet { uniformSettings["vignetteStart"] = start } } + public var end: Float = 0.75 { didSet { uniformSettings["vignetteEnd"] = end } } public init() { - super.init(fragmentShader:VignetteFragmentShader, numberOfInputs:1) + super.init(fragmentShader: VignetteFragmentShader, numberOfInputs: 1) - ({center = Position.center})() - ({color = Color.black})() - ({start = 0.3})() - ({end = 0.75})() + ({ center = Position.center })() + ({ color = Color.black })() + ({ start = 0.3 })() + ({ end = 0.75 })() } } diff --git a/framework/Source/Operations/WhiteBalance.swift b/framework/Source/Operations/WhiteBalance.swift index 5335d495..1d205a46 100644 --- a/framework/Source/Operations/WhiteBalance.swift +++ b/framework/Source/Operations/WhiteBalance.swift @@ -1,11 +1,11 @@ public class WhiteBalance: BasicOperation { - public var temperature:Float = 5000.0 { didSet { uniformSettings["temperature"] = temperature < 5000.0 ? 0.0004 * (temperature - 5000.0) : 0.00006 * (temperature - 5000.0) } } - public var tint:Float = 0.0 { didSet { uniformSettings["tint"] = tint / 100.0 } } + public var temperature: Float = 5000.0 { didSet { uniformSettings["temperature"] = temperature < 5000.0 ? 0.0004 * (temperature - 5000.0) : 0.00006 * (temperature - 5000.0) } } + public var tint: Float = 0.0 { didSet { uniformSettings["tint"] = tint / 100.0 } } public init() { - super.init(fragmentShader:WhiteBalanceFragmentShader, numberOfInputs:1) + super.init(fragmentShader: WhiteBalanceFragmentShader, numberOfInputs: 1) - ({temperature = 5000.0})() - ({tint = 0.0})() + ({ temperature = 5000.0 })() + ({ tint = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ZoomBlur.swift b/framework/Source/Operations/ZoomBlur.swift index 627fe060..d77789fc 100644 --- a/framework/Source/Operations/ZoomBlur.swift +++ b/framework/Source/Operations/ZoomBlur.swift @@ -1,11 +1,11 @@ public class ZoomBlur: BasicOperation { - public var blurSize:Float = 1.0 { didSet { uniformSettings["blurSize"] = blurSize } } - public var blurCenter:Position = Position.center { didSet { uniformSettings["blurCenter"] = blurCenter } } + public var blurSize: Float = 1.0 { didSet { uniformSettings["blurSize"] = blurSize } } + public var blurCenter = Position.center { didSet { uniformSettings["blurCenter"] = blurCenter } } public init() { - super.init(fragmentShader:ZoomBlurFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ZoomBlurFragmentShader, numberOfInputs: 1) - ({blurSize = 1.0})() - ({blurCenter = Position.center})() + ({ blurSize = 1.0 })() + ({ blurCenter = Position.center })() } } diff --git a/framework/Source/Operations/iOSBlur.swift b/framework/Source/Operations/iOSBlur.swift index a5478078..707a2b52 100644 --- a/framework/Source/Operations/iOSBlur.swift +++ b/framework/Source/Operations/iOSBlur.swift @@ -1,7 +1,7 @@ public class iOSBlur: OperationGroup { - public var blurRadiusInPixels:Float = 48.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } - public var saturation:Float = 0.8 { didSet { saturationFilter.saturation = saturation } } - public var rangeReductionFactor:Float = 0.6 { didSet { luminanceRange.rangeReductionFactor = rangeReductionFactor } } + public var blurRadiusInPixels: Float = 48.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var saturation: Float = 0.8 { didSet { saturationFilter.saturation = saturation } } + public var rangeReductionFactor: Float = 0.6 { didSet { luminanceRange.rangeReductionFactor = rangeReductionFactor } } let saturationFilter = SaturationAdjustment() let gaussianBlur = GaussianBlur() @@ -10,12 +10,12 @@ public class iOSBlur: OperationGroup { public override init() { super.init() - ({blurRadiusInPixels = 48.0})() - ({saturation = 0.8})() - ({rangeReductionFactor = 0.6})() + ({ blurRadiusInPixels = 48.0 })() + ({ saturation = 0.8 })() + ({ rangeReductionFactor = 0.6 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.saturationFilter --> self.gaussianBlur --> self.luminanceRange --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 65611ea7..b422dec3 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -2,27 +2,38 @@ // MARK: Basic types import Foundation -public protocol ImageSource { - var targets:TargetContainer { get } - func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) +public var _needCheckFilterContainerThread: Bool? + +public protocol ImageSource: AnyObject { + var _needCheckSourceThread: Bool { get } + #if DEBUG + var debugRenderInfo: String { get } + func debugGetOnePassRenderInfos() -> String + #endif + var targets: TargetContainer { get } + func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) } -public protocol ImageConsumer:AnyObject { - var maximumInputs:UInt { get } - var sources:SourceContainer { get } +public protocol ImageConsumer: AnyObject { + var _needCheckConsumerThread: Bool { get } + #if DEBUG + var debugRenderInfo: String { get } + #endif + var maximumInputs: UInt { get } + var sources: SourceContainer { get } - func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) + func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) } public protocol ImageProcessingOperation: ImageConsumer, ImageSource { } infix operator --> : AdditionPrecedence -//precedencegroup ProcessingOperationPrecedence { +// precedencegroup ProcessingOperationPrecedence { // associativity: left //// higherThan: Multiplicative -//} -@discardableResult public func -->(source:ImageSource, destination:T) -> T { +// } +@discardableResult public func -->(source: ImageSource, destination: T) -> T { source.addTarget(destination) return destination } @@ -31,82 +42,164 @@ infix operator --> : AdditionPrecedence // MARK: Extensions and supporting types public extension ImageSource { - public func addTarget(_ target:ImageConsumer, atTargetIndex:UInt? = nil) { + var _needCheckSourceThread: Bool { + return _needCheckFilterContainerThread ?? true + } + + func addTarget(_ target: ImageConsumer, atTargetIndex: UInt? = nil) { + if _needCheckSourceThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } if let targetIndex = atTargetIndex { - target.setSource(self, atIndex:targetIndex) - targets.append(target, indexAtTarget:targetIndex) - transmitPreviousImage(to:target, atIndex:targetIndex) + target.setSource(self, atIndex: targetIndex) + targets.append(target, indexAtTarget: targetIndex) + sharedImageProcessingContext.runOperationAsynchronously { + self.transmitPreviousImage(to: target, atIndex: targetIndex) + } } else if let indexAtTarget = target.addSource(self) { - targets.append(target, indexAtTarget:indexAtTarget) - transmitPreviousImage(to:target, atIndex:indexAtTarget) + targets.append(target, indexAtTarget: indexAtTarget) + sharedImageProcessingContext.runOperationAsynchronously { + self.transmitPreviousImage(to: target, atIndex: indexAtTarget) + } } else { debugPrint("Warning: tried to add target beyond target's input capacity") } } - public func removeAllTargets() { + func removeAllTargets() { + if _needCheckSourceThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } for (target, index) in targets { target.removeSourceAtIndex(index) } targets.removeAll() } - public func updateTargetsWithFramebuffer(_ framebuffer:Framebuffer) { - if targets.count == 0 { // Deal with the case where no targets are attached by immediately returning framebuffer to cache + func remove(_ target: ImageConsumer) { + if _needCheckSourceThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } + for (testTarget, index) in targets { + if target === testTarget { + target.removeSourceAtIndex(index) + targets.remove(target) + } + } + } + + func updateTargetsWithFramebuffer(_ framebuffer: Framebuffer) { + var foundTargets = [(ImageConsumer, UInt)]() + for target in targets { + foundTargets.append(target) + } + + if foundTargets.count == 0 { // Deal with the case where no targets are attached by immediately returning framebuffer to cache framebuffer.lock() framebuffer.unlock() } else { // Lock first for each output, to guarantee proper ordering on multi-output operations - for _ in targets { + for _ in foundTargets { framebuffer.lock() } } - for (target, index) in targets { - target.newFramebufferAvailable(framebuffer, fromSourceIndex:index) + for (target, index) in foundTargets { + target.newFramebufferAvailable(framebuffer, fromSourceIndex: index) + } + } + + #if DEBUG + func debugGetOnePassRenderInfos() -> String { + var renderInfos = "" + renderInfos.append(debugRenderInfo) + for target in targets { + if let source = target.0 as? ImageSource { + renderInfos.append(source.debugGetOnePassRenderInfos()) + } else { + renderInfos.append(target.0.debugRenderInfo) + } } + return renderInfos } + #endif } public extension ImageConsumer { - public func addSource(_ source:ImageSource) -> UInt? { - return sources.append(source, maximumInputs:maximumInputs) + var _needCheckConsumerThread: Bool { + return _needCheckFilterContainerThread ?? true } - public func setSource(_ source:ImageSource, atIndex:UInt) { - _ = sources.insert(source, atIndex:atIndex, maximumInputs:maximumInputs) + func addSource(_ source: ImageSource) -> UInt? { + if _needCheckConsumerThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } + return sources.append(source, maximumInputs: maximumInputs) + } + + func setSource(_ source: ImageSource, atIndex: UInt) { + if _needCheckConsumerThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } + _ = sources.insert(source, atIndex: atIndex, maximumInputs: maximumInputs) } - public func removeSourceAtIndex(_ index:UInt) { + func removeSourceAtIndex(_ index: UInt) { + if _needCheckConsumerThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } sources.removeAtIndex(index) } + + func removeAllSources() { + if _needCheckConsumerThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } + sources.sources.removeAll() + } + + func flushWithTinyBuffer(in context: OpenGLContext = sharedImageProcessingContext) { + context.runOperationSynchronously { + do { + for index in 0.. (ImageConsumer, UInt)? in #if os(Linux) - if (index >= self.targets.count) { + if index >= self.targets.count { return nil } - while (self.targets[index].value == nil) { - self.targets.remove(at:index) - if (index >= self.targets.count) { + // NOTE: strong retain value, in case the value is released on another thread + var retainedValue = self.targets[index].value + while retainedValue == nil { + self.targets.remove(at: index) + if index >= self.targets.count { return nil } + retainedValue = self.targets[index].value } index += 1 - return (self.targets[index - 1].value!, self.targets[index - 1].indexAtTarget) + return (retainedValue!, self.targets[index - 1].indexAtTarget) #else - return self.dispatchQueue.sync{ - if (index >= self.targets.count) { + return self.dispatchQueue.sync { + if index >= self.targets.count { return nil } - while (self.targets[index].value == nil) { - self.targets.remove(at:index) - if (index >= self.targets.count) { + // NOTE: strong retain value, in case the value is released on another thread + var retainedValue = self.targets[index].value + while retainedValue == nil { + self.targets.remove(at: index) + if index >= self.targets.count { return nil } + retainedValue = self.targets[index].value } index += 1 - return (self.targets[index - 1].value!, self.targets[index - 1].indexAtTarget) - } + return (retainedValue!, self.targets[index - 1].indexAtTarget) + } #endif } } @@ -153,23 +252,33 @@ public class TargetContainer:Sequence { #if os(Linux) self.targets.removeAll() #else - dispatchQueue.async{ + dispatchQueue.async { self.targets.removeAll() } #endif } + + public func remove(_ target: ImageConsumer) { + #if os(Linux) + self.targets = self.targets.filter { $0.value !== target } + #else + dispatchQueue.async { + self.targets = self.targets.filter { $0.value !== target } + } + #endif + } } public class SourceContainer { - var sources:[UInt:ImageSource] = [:] + public var sources: [UInt: ImageSource] = [:] public init() { } - public func append(_ source:ImageSource, maximumInputs:UInt) -> UInt? { - var currentIndex:UInt = 0 + public func append(_ source: ImageSource, maximumInputs: UInt) -> UInt? { + var currentIndex: UInt = 0 while currentIndex < maximumInputs { - if (sources[currentIndex] == nil) { + if sources[currentIndex] == nil { sources[currentIndex] = source return currentIndex } @@ -179,49 +288,128 @@ public class SourceContainer { return nil } - public func insert(_ source:ImageSource, atIndex:UInt, maximumInputs:UInt) -> UInt { - guard (atIndex < maximumInputs) else { fatalError("ERROR: Attempted to set a source beyond the maximum number of inputs on this operation") } + public func insert(_ source: ImageSource, atIndex: UInt, maximumInputs: UInt) -> UInt { + guard atIndex < maximumInputs else { fatalError("ERROR: Attempted to set a source beyond the maximum number of inputs on this operation") } sources[atIndex] = source return atIndex } - public func removeAtIndex(_ index:UInt) { + public func removeAtIndex(_ index: UInt) { sources[index] = nil } } public class ImageRelay: ImageProcessingOperation { - public var newImageCallback:((Framebuffer) -> ())? + public var newImageCallback: ((Framebuffer) -> Void)? public let sources = SourceContainer() public let targets = TargetContainer() - public let maximumInputs:UInt = 1 - public var preventRelay:Bool = false + public let maximumInputs: UInt = 1 + public var preventRelay = false public init() { } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { - sources.sources[0]?.transmitPreviousImage(to:self, atIndex:0) + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { + guard sources.sources.count > 0 else { return } + sources.sources[0]?.transmitPreviousImage(to: self, atIndex: 0) } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { if let newImageCallback = newImageCallback { newImageCallback(framebuffer) } - if (!preventRelay) { + if !preventRelay { relayFramebufferOnward(framebuffer) } } - public func relayFramebufferOnward(_ framebuffer:Framebuffer) { + public func relayFramebufferOnward(_ framebuffer: Framebuffer) { // Need to override to guarantee a removal of the previously applied lock for _ in targets { framebuffer.lock() } framebuffer.unlock() for (target, index) in targets { - target.newFramebufferAvailable(framebuffer, fromSourceIndex:index) + target.newFramebufferAvailable(framebuffer, fromSourceIndex: index) } } + + #if DEBUG + public var debugRenderInfo: String = "" + #endif +} + +public protocol DebugPipelineNameable { + var debugNameForPipeline: String { get } +} + +private func simpleName(_ obj: T) -> String { + if let obj = obj as? DebugPipelineNameable { + return obj.debugNameForPipeline + } + + let origin = String(describing: obj) + return origin.split(separator: ".").last.map { String($0) } ?? origin } + +extension OperationGroup { + public var debugPipelineDescription: String { + // if group have custom name, do not use relay.description + if let obj = self as? DebugPipelineNameable { + return obj.debugNameForPipeline + } + + return "[\(simpleName(self))(\(inputImageRelay.debugPipelineDescription))]" + } +} + +public extension ImageSource { + var debugPipelineDescription: String { + let nextInfos: [String] = targets.map { consumer, _ in + if let c = consumer as? OperationGroup { + return c.debugPipelineDescription + } + + if let c = consumer as? ImageRelay { + return c.debugPipelineDescription + } + + if let c = consumer as? ImageSource { + return c.debugPipelineDescription + } + + return simpleName(consumer) + } + let nextInfosText = nextInfos.joined(separator: " -> ") + + if self is ImageRelay { + return nextInfosText + } + + return "\(simpleName(self)) -> \(nextInfosText)" + } +} + +#if DEBUG +public extension ImageSource { + var debugPipelineNext: String { + let nextInfos: [String] = targets.map { + if let operationGroup = $0.0 as? OperationGroup { + return operationGroup.inputImageRelay.debugPipelineNext + } else if let operation = $0.0 as? ImageProcessingOperation { + return operation.debugPipelineNext + } else { + return $0.0.debugPipelineEnd + } + } + return "{'\(self)':[\(nextInfos.joined(separator: ","))]}" + } +} + +public extension ImageConsumer { + var debugPipelineEnd: String { + return "'\(self)'" + } +} +#endif diff --git a/framework/Source/Position.swift b/framework/Source/Position.swift index 1b640783..778354e3 100644 --- a/framework/Source/Position.swift +++ b/framework/Source/Position.swift @@ -5,18 +5,18 @@ import UIKit #endif public struct Position { - public let x:Float - public let y:Float - public let z:Float? + public let x: Float + public let y: Float + public let z: Float? - public init (_ x:Float, _ y:Float, _ z:Float? = nil) { + public init (_ x: Float, _ y: Float, _ z: Float? = nil) { self.x = x self.y = y self.z = z } #if !os(Linux) - public init(point:CGPoint) { + public init(point: CGPoint) { self.x = Float(point.x) self.y = Float(point.y) self.z = nil diff --git a/framework/Source/RawDataInput.swift b/framework/Source/RawDataInput.swift index c918045f..5e0e5560 100644 --- a/framework/Source/RawDataInput.swift +++ b/framework/Source/RawDataInput.swift @@ -32,12 +32,15 @@ public enum PixelFormat { public class RawDataInput: ImageSource { public let targets = TargetContainer() + #if DEBUG + public var debugRenderInfo: String = "" + #endif + public init() { - } - public func uploadBytes(_ bytes:[UInt8], size:Size, pixelFormat:PixelFormat, orientation:ImageOrientation = .portrait) { - let dataFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:orientation, size:GLSize(size), textureOnly:true, internalFormat:pixelFormat.toGL(), format:pixelFormat.toGL()) + public func uploadBytes(_ bytes: [UInt8], size: Size, pixelFormat: PixelFormat, orientation: ImageOrientation = .portrait) { + let dataFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: orientation, size: GLSize(size), textureOnly: true, internalFormat: pixelFormat.toGL(), format: pixelFormat.toGL()) glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), dataFramebuffer.texture) @@ -46,7 +49,7 @@ public class RawDataInput: ImageSource { updateTargetsWithFramebuffer(dataFramebuffer) } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // TODO: Determine if this is necessary for the raw data uploads } } diff --git a/framework/Source/RawDataOutput.swift b/framework/Source/RawDataOutput.swift index dac09d7b..cd5b5069 100644 --- a/framework/Source/RawDataOutput.swift +++ b/framework/Source/RawDataOutput.swift @@ -13,25 +13,43 @@ #endif public class RawDataOutput: ImageConsumer { - public var dataAvailableCallback:(([UInt8]) -> ())? + public var dataAvailableCallback: (([UInt8]) -> Void)? public let sources = SourceContainer() - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 + + #if DEBUG + public var debugRenderInfo: String = "" + #endif public init() { } // TODO: Replace with texture caches - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + debugRenderInfo = """ +{ + RawDataOutput: { + input: \(framebuffer.debugRenderInfo), + output: { size: \(framebuffer.size.width * framebuffer.size.height * 4), type: RGBData }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + } + #endif + let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: framebuffer.orientation, size: framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() - var data = [UInt8](repeating:0, count:Int(framebuffer.size.width * framebuffer.size.height * 4)) + var data = [UInt8](repeating: 0, count: Int(framebuffer.size.width * framebuffer.size.height * 4)) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), &data) renderFramebuffer.unlock() diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index bdf32f1d..f6819264 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -1,4 +1,5 @@ import Foundation +import AVFoundation #if os(Linux) // For now, disable GCD on Linux and run everything on the main thread @@ -7,7 +8,7 @@ protocol SerialDispatch { } extension SerialDispatch { - func runOperationAsynchronously(operation:() -> ()) { + func runOperationAsynchronously(operation:() -> Void) { operation() } @@ -18,36 +19,35 @@ extension SerialDispatch { #else -public let standardProcessingQueuePriority:DispatchQueue.GlobalQueuePriority = { - // DispatchQueue.QoSClass.default +public var standardProcessingQueue: DispatchQueue { if #available(iOS 10, OSX 10.10, *) { - return DispatchQueue.GlobalQueuePriority.default + return DispatchQueue.global(qos: .default) } else { - return DispatchQueue.GlobalQueuePriority.default + return DispatchQueue.global(priority: .default) } -}() - -public let lowProcessingQueuePriority:DispatchQueue.GlobalQueuePriority = { +} + +public var lowProcessingQueue: DispatchQueue { if #available(iOS 10, OSX 10.10, *) { - return DispatchQueue.GlobalQueuePriority.low + return DispatchQueue.global(qos: .background) } else { - return DispatchQueue.GlobalQueuePriority.low + return DispatchQueue.global(priority: .low) } -}() +} -func runAsynchronouslyOnMainQueue(_ mainThreadOperation:@escaping () -> ()) { - if (Thread.isMainThread) { +func runAsynchronouslyOnMainQueue(_ mainThreadOperation:@escaping () -> Void) { + if Thread.isMainThread { mainThreadOperation() } else { - DispatchQueue.main.async(execute:mainThreadOperation) + DispatchQueue.main.async(execute: mainThreadOperation) } } -func runOnMainQueue(_ mainThreadOperation:() -> ()) { - if (Thread.isMainThread) { +func runOnMainQueue(_ mainThreadOperation:() -> Void) { + if Thread.isMainThread { mainThreadOperation() } else { - DispatchQueue.main.sync(execute:mainThreadOperation) + DispatchQueue.main.sync(execute: mainThreadOperation) } } @@ -62,34 +62,56 @@ func runOnMainQueue(_ mainThreadOperation:() -> T) -> T { // MARK: - // MARK: SerialDispatch extension -public protocol SerialDispatch { - var serialDispatchQueue:DispatchQueue { get } - var dispatchQueueKey:DispatchSpecificKey { get } +public protocol SerialDispatch: class { + var executeStartTime: TimeInterval? { get set } + var serialDispatchQueue: DispatchQueue { get } + var dispatchQueueKey: DispatchSpecificKey { get } + var dispatchQueueKeyValue: Int { get } func makeCurrentContext() } public extension SerialDispatch { - public func runOperationAsynchronously(_ operation:@escaping () -> ()) { + var alreadyExecuteTime: TimeInterval { + if let executeStartTime = executeStartTime { + return CACurrentMediaTime() - executeStartTime + } else { + return 0.0 + } + } + + func runOperation(sync: Bool, _ operation:@escaping () -> Void) { + if sync { + runOperationSynchronously(operation) + } else { + runOperationAsynchronously(operation) + } + } + + func runOperationAsynchronously(_ operation:@escaping () -> Void) { self.serialDispatchQueue.async { + self.executeStartTime = CACurrentMediaTime() self.makeCurrentContext() operation() + self.executeStartTime = nil } } - public func runOperationSynchronously(_ operation:() -> ()) { + func runOperationSynchronously(_ operation:() -> Void) { // TODO: Verify this works as intended - if (DispatchQueue.getSpecific(key:self.dispatchQueueKey) == 81) { + if DispatchQueue.getSpecific(key: self.dispatchQueueKey) == self.dispatchQueueKeyValue { operation() } else { self.serialDispatchQueue.sync { + self.executeStartTime = CACurrentMediaTime() self.makeCurrentContext() operation() + self.executeStartTime = nil } } } - public func runOperationSynchronously(_ operation:() throws -> ()) throws { - var caughtError:Error? = nil + func runOperationSynchronously(_ operation:() throws -> Void) throws { + var caughtError: Error? runOperationSynchronously { do { try operation() @@ -97,10 +119,10 @@ public extension SerialDispatch { caughtError = error } } - if (caughtError != nil) {throw caughtError!} + if caughtError != nil { throw caughtError! } } - public func runOperationSynchronously(_ operation:() throws -> T) throws -> T { + func runOperationSynchronously(_ operation:() throws -> T) throws -> T { var returnedValue: T! try runOperationSynchronously { returnedValue = try operation() @@ -108,7 +130,7 @@ public extension SerialDispatch { return returnedValue } - public func runOperationSynchronously(_ operation:() -> T) -> T { + func runOperationSynchronously(_ operation:() -> T) -> T { var returnedValue: T! runOperationSynchronously { returnedValue = operation() diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index abf50b63..c65cb75f 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -14,9 +14,8 @@ import Foundation - -struct ShaderCompileError:Error { - let compileLog:String +struct ShaderCompileError: Error { + let compileLog: String } enum ShaderType { @@ -26,45 +25,54 @@ enum ShaderType { public class ShaderProgram { public var colorUniformsUseFourComponents = false - let program:GLuint - var vertexShader:GLuint! // At some point, the Swift compiler will be able to deal with the early throw and we can convert these to lets - var fragmentShader:GLuint! - private var attributeAddresses = [String:GLuint]() - private var uniformAddresses = [String:GLint]() - private var currentUniformIntValues = [String:GLint]() - private var currentUniformFloatValues = [String:GLfloat]() - private var currentUniformFloatArrayValues = [String:[GLfloat]]() + public static var disableAttributeCache = false + let program: GLuint + let initTime: CFTimeInterval? + var vertexShader: GLuint! // At some point, the Swift compiler will be able to deal with the early throw and we can convert these to lets + var fragmentShader: GLuint! + private var attributeAddresses = [String: GLuint]() + private var uniformAddresses = [String: GLint]() + private var currentUniformIntValues = [String: GLint]() + private var currentUniformFloatValues = [String: GLfloat]() + private var currentUniformFloatArrayValues = [String: [GLfloat]]() // MARK: - // MARK: Initialization and teardown - public init(vertexShader:String, fragmentShader:String) throws { + public init(vertexShader: String, fragmentShader: String) throws { program = glCreateProgram() - self.vertexShader = try compileShader(vertexShader, type:.vertex) - self.fragmentShader = try compileShader(fragmentShader, type:.fragment) - + self.vertexShader = try compileShader(vertexShader, type: .vertex) + self.fragmentShader = try compileShader(fragmentShader, type: .fragment) + + // tricky way to control if needs set inputTime + if fragmentShader.contains("uniform float inputTime") { + self.initTime = CACurrentMediaTime() + } else { + self.initTime = nil + } + glAttachShader(program, self.vertexShader) glAttachShader(program, self.fragmentShader) - + try link() } - public convenience init(vertexShader:String, fragmentShaderFile:URL) throws { - try self.init(vertexShader:vertexShader, fragmentShader:try shaderFromFile(fragmentShaderFile)) + public convenience init(vertexShader: String, fragmentShaderFile: URL) throws { + try self.init(vertexShader: vertexShader, fragmentShader: try shaderFromFile(fragmentShaderFile)) } - public convenience init(vertexShaderFile:URL, fragmentShaderFile:URL) throws { - try self.init(vertexShader:try shaderFromFile(vertexShaderFile), fragmentShader:try shaderFromFile(fragmentShaderFile)) + public convenience init(vertexShaderFile: URL, fragmentShaderFile: URL) throws { + try self.init(vertexShader: try shaderFromFile(vertexShaderFile), fragmentShader: try shaderFromFile(fragmentShaderFile)) } deinit { - debugPrint("Shader deallocated") + // debugPrint("Shader deallocated") - if (vertexShader != nil) { + if vertexShader != nil { glDeleteShader(vertexShader) } - if (fragmentShader != nil) { + if fragmentShader != nil { glDeleteShader(fragmentShader) } glDeleteProgram(program) @@ -73,35 +81,37 @@ public class ShaderProgram { // MARK: - // MARK: Attributes and uniforms - public func attributeIndex(_ attribute:String) -> GLuint? { - if let attributeAddress = attributeAddresses[attribute] { + public func attributeIndex(_ attribute: String) -> GLuint? { + if let attributeAddress = attributeAddresses[attribute], !ShaderProgram.disableAttributeCache { return attributeAddress } else { - var attributeAddress:GLint = -1 - attribute.withGLChar{glString in + var attributeAddress: GLint = -1 + attribute.withGLChar {glString in attributeAddress = glGetAttribLocation(self.program, glString) } - if (attributeAddress < 0) { + if attributeAddress < 0 { return nil } else { glEnableVertexAttribArray(GLuint(attributeAddress)) - attributeAddresses[attribute] = GLuint(attributeAddress) + if !ShaderProgram.disableAttributeCache { + attributeAddresses[attribute] = GLuint(attributeAddress) + } return GLuint(attributeAddress) } } } - public func uniformIndex(_ uniform:String) -> GLint? { + public func uniformIndex(_ uniform: String) -> GLint? { if let uniformAddress = uniformAddresses[uniform] { return uniformAddress } else { - var uniformAddress:GLint = -1 - uniform.withGLChar{glString in + var uniformAddress: GLint = -1 + uniform.withGLChar {glString in uniformAddress = glGetUniformLocation(self.program, glString) } - if (uniformAddress < 0) { + if uniformAddress < 0 { return nil } else { uniformAddresses[uniform] = uniformAddress @@ -113,48 +123,48 @@ public class ShaderProgram { // MARK: - // MARK: Uniform accessors - public func setValue(_ value:GLfloat, forUniform:String) { + public func setValue(_ value: GLfloat, forUniform: String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if (currentUniformFloatValues[forUniform] != value) { + if currentUniformFloatValues[forUniform] != value { glUniform1f(GLint(uniformAddress), value) currentUniformFloatValues[forUniform] = value } } - public func setValue(_ value:GLint, forUniform:String) { + public func setValue(_ value: GLint, forUniform: String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if (currentUniformIntValues[forUniform] != value) { + if currentUniformIntValues[forUniform] != value { glUniform1i(GLint(uniformAddress), value) currentUniformIntValues[forUniform] = value } } - public func setValue(_ value:Color, forUniform:String) { + public func setValue(_ value: Color, forUniform: String) { if colorUniformsUseFourComponents { - self.setValue(value.toGLArrayWithAlpha(), forUniform:forUniform) + self.setValue(value.toGLArrayWithAlpha(), forUniform: forUniform) } else { - self.setValue(value.toGLArray(), forUniform:forUniform) + self.setValue(value.toGLArray(), forUniform: forUniform) } } - public func setValue(_ value:[GLfloat], forUniform:String) { + public func setValue(_ value: [GLfloat], forUniform: String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value{ + if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value { } else { - if (value.count == 2) { + if value.count == 2 { glUniform2fv(uniformAddress, 1, value) - } else if (value.count == 3) { + } else if value.count == 3 { glUniform3fv(uniformAddress, 1, value) - } else if (value.count == 4) { + } else if value.count == 4 { glUniform4fv(uniformAddress, 1, value) } else { fatalError("Tried to set a float array uniform outside of the range of values") @@ -163,16 +173,16 @@ public class ShaderProgram { } } - public func setMatrix(_ value:[GLfloat], forUniform:String) { + public func setMatrix(_ value: [GLfloat], forUniform: String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value{ + if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value { } else { - if (value.count == 9) { + if value.count == 9 { glUniformMatrix3fv(uniformAddress, 1, GLboolean(GL_FALSE), value) - } else if (value.count == 16) { + } else if value.count == 16 { glUniformMatrix4fv(uniformAddress, 1, GLboolean(GL_FALSE), value) } else { fatalError("Tried to set a matrix uniform outside of the range of supported sizes (3x3, 4x4)") @@ -187,19 +197,19 @@ public class ShaderProgram { func link() throws { glLinkProgram(program) - var linkStatus:GLint = 0 + var linkStatus: GLint = 0 glGetProgramiv(program, GLenum(GL_LINK_STATUS), &linkStatus) - if (linkStatus == 0) { - var logLength:GLint = 0 + if linkStatus == 0 { + var logLength: GLint = 0 glGetProgramiv(program, GLenum(GL_INFO_LOG_LENGTH), &logLength) - if (logLength > 0) { - var compileLog = [CChar](repeating:0, count:Int(logLength)) + if logLength > 0 { + var compileLog = [CChar](repeating: 0, count: Int(logLength)) glGetProgramInfoLog(program, logLength, &logLength, &compileLog) - print("Link log: \(String(cString:compileLog))") + print("Link log: \(String(cString: compileLog))") } - throw ShaderCompileError(compileLog:"Link error") + throw ShaderCompileError(compileLog: "Link error") } } @@ -208,34 +218,34 @@ public class ShaderProgram { } } -func compileShader(_ shaderString:String, type:ShaderType) throws -> GLuint { - let shaderHandle:GLuint +func compileShader(_ shaderString: String, type: ShaderType) throws -> GLuint { + let shaderHandle: GLuint switch type { case .vertex: shaderHandle = glCreateShader(GLenum(GL_VERTEX_SHADER)) case .fragment: shaderHandle = glCreateShader(GLenum(GL_FRAGMENT_SHADER)) } - shaderString.withGLChar{glString in - var tempString:UnsafePointer? = glString + shaderString.withGLChar {glString in + var tempString: UnsafePointer? = glString glShaderSource(shaderHandle, 1, &tempString, nil) glCompileShader(shaderHandle) } - var compileStatus:GLint = 1 + var compileStatus: GLint = 1 glGetShaderiv(shaderHandle, GLenum(GL_COMPILE_STATUS), &compileStatus) - if (compileStatus != 1) { - var logLength:GLint = 0 + if compileStatus != 1 { + var logLength: GLint = 0 glGetShaderiv(shaderHandle, GLenum(GL_INFO_LOG_LENGTH), &logLength) - if (logLength > 0) { - var compileLog = [CChar](repeating:0, count:Int(logLength)) + if logLength > 0 { + var compileLog = [CChar](repeating: 0, count: Int(logLength)) glGetShaderInfoLog(shaderHandle, logLength, &logLength, &compileLog) - print("Compile log: \(String(cString:compileLog))") + print("Compile log: \(String(cString: compileLog))") // let compileLogString = String(bytes:compileLog.map{UInt8($0)}, encoding:NSASCIIStringEncoding) switch type { - case .vertex: throw ShaderCompileError(compileLog:"Vertex shader compile error:") - case .fragment: throw ShaderCompileError(compileLog:"Fragment shader compile error:") + case .vertex: throw ShaderCompileError(compileLog: "Vertex shader compile error:") + case .fragment: throw ShaderCompileError(compileLog: "Fragment shader compile error:") } } } @@ -243,7 +253,7 @@ func compileShader(_ shaderString:String, type:ShaderType) throws -> GLuint { return shaderHandle } -public func crashOnShaderCompileFailure(_ shaderName:String, _ operation:() throws -> T) -> T { +public func crashOnShaderCompileFailure(_ shaderName: String, _ operation:() throws -> T) -> T { do { return try operation() } catch { @@ -252,12 +262,12 @@ public func crashOnShaderCompileFailure(_ shaderName:String, _ operation:() t } } -public func shaderFromFile(_ file:URL) throws -> String { +public func shaderFromFile(_ file: URL) throws -> String { // Note: this is a hack until Foundation's String initializers are fully functional // let fragmentShaderString = String(contentsOfURL:fragmentShaderFile, encoding:NSASCIIStringEncoding) - guard (FileManager.default.fileExists(atPath: file.path)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} + guard FileManager.default.fileExists(atPath: file.path) else { throw ShaderCompileError(compileLog: "Shader file \(file) missing") } - let fragmentShaderString = try NSString(contentsOfFile:file.path, encoding:String.Encoding.ascii.rawValue) + let fragmentShaderString = try NSString(contentsOfFile: file.path, encoding: String.Encoding.ascii.rawValue) - return String(describing:fragmentShaderString) + return String(describing: fragmentShaderString) } diff --git a/framework/Source/ShaderUniformSettings.swift b/framework/Source/ShaderUniformSettings.swift index f67aed30..91b34b6b 100644 --- a/framework/Source/ShaderUniformSettings.swift +++ b/framework/Source/ShaderUniformSettings.swift @@ -13,56 +13,71 @@ #endif public struct ShaderUniformSettings { - private var uniformValues = [String:Any]() + private static var lock = os_unfair_lock_s() + private var _uniformValues = [String: Any]() + private var uniformValues: [String: Any] { + get { + os_unfair_lock_lock(&Self.lock) + let temp = _uniformValues + os_unfair_lock_unlock(&Self.lock) + return temp + } + set { + os_unfair_lock_lock(&Self.lock) + _uniformValues = newValue + os_unfair_lock_unlock(&Self.lock) + } + } public init() { } - public subscript(index:String) -> Float? { - get { return uniformValues[index] as? Float} + public subscript(index: String) -> Float? { + get { return uniformValues[index] as? Float } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Int? { + public subscript(index: String) -> Int? { get { return uniformValues[index] as? Int } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Color? { + public subscript(index: String) -> Color? { get { return uniformValues[index] as? Color } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Position? { + public subscript(index: String) -> Position? { get { return uniformValues[index] as? Position } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Size? { - get { return uniformValues[index] as? Size} + public subscript(index: String) -> Size? { + get { return uniformValues[index] as? Size } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Matrix4x4? { + public subscript(index: String) -> Matrix4x4? { get { return uniformValues[index] as? Matrix4x4 } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Matrix3x3? { - get { return uniformValues[index] as? Matrix3x3} + public subscript(index: String) -> Matrix3x3? { + get { return uniformValues[index] as? Matrix3x3 } set(newValue) { uniformValues[index] = newValue } } - public func restoreShaderSettings(_ shader:ShaderProgram) { - for (uniform, value) in uniformValues { + public func restoreShaderSettings(_ shader: ShaderProgram) { + let finalUniformValues = uniformValues + for (uniform, value) in finalUniformValues { switch value { - case let value as Float: shader.setValue(GLfloat(value), forUniform:uniform) - case let value as Int: shader.setValue(GLint(value), forUniform:uniform) - case let value as Color: shader.setValue(value, forUniform:uniform) - case let value as Position: shader.setValue(value.toGLArray(), forUniform:uniform) - case let value as Size: shader.setValue(value.toGLArray(), forUniform:uniform) - case let value as Matrix4x4: shader.setMatrix(value.toRowMajorGLArray(), forUniform:uniform) - case let value as Matrix3x3: shader.setMatrix(value.toRowMajorGLArray(), forUniform:uniform) + case let value as Float: shader.setValue(GLfloat(value), forUniform: uniform) + case let value as Int: shader.setValue(GLint(value), forUniform: uniform) + case let value as Color: shader.setValue(value, forUniform: uniform) + case let value as Position: shader.setValue(value.toGLArray(), forUniform: uniform) + case let value as Size: shader.setValue(value.toGLArray(), forUniform: uniform) + case let value as Matrix4x4: shader.setMatrix(value.toRowMajorGLArray(), forUniform: uniform) + case let value as Matrix3x3: shader.setMatrix(value.toRowMajorGLArray(), forUniform: uniform) default: fatalError("Somehow tried to restore a shader uniform value of an unsupported type: \(value)") } } diff --git a/framework/Source/Size.swift b/framework/Source/Size.swift index 07604c39..1e22cefa 100644 --- a/framework/Source/Size.swift +++ b/framework/Source/Size.swift @@ -1,9 +1,13 @@ public struct Size { - public let width:Float - public let height:Float + public let width: Float + public let height: Float - public init(width:Float, height:Float) { + public init(width: Float, height: Float) { self.width = width self.height = height } -} \ No newline at end of file + + #if DEBUG + public var debugRenderInfo: String { "\(width)x\(height)" } + #endif +} diff --git a/framework/Source/TPCircularBuffer.h b/framework/Source/TPCircularBuffer.h new file mode 100755 index 00000000..88129560 --- /dev/null +++ b/framework/Source/TPCircularBuffer.h @@ -0,0 +1,243 @@ +// +// TPCircularBuffer.h +// Circular/Ring buffer implementation +// +// https://github.com/michaeltyson/TPCircularBuffer +// +// Created by Michael Tyson on 10/12/2011. +// +// +// This implementation makes use of a virtual memory mapping technique that inserts a virtual copy +// of the buffer memory directly after the buffer's end, negating the need for any buffer wrap-around +// logic. Clients can simply use the returned memory address as if it were contiguous space. +// +// The implementation is thread-safe in the case of a single producer and single consumer. +// +// Virtual memory technique originally proposed by Philip Howard (http://vrb.slashusr.org/), and +// adapted to Darwin by Kurt Revis (http://www.snoize.com, +// http://www.snoize.com/Code/PlayBufferedSoundFile.tar.gz) +// +// +// Copyright (C) 2012-2013 A Tasty Pixel +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// +// 3. This notice may not be removed or altered from any source distribution. +// + +#ifndef TPCircularBuffer_h +#define TPCircularBuffer_h + +#include +#include +#include + +#ifdef __cplusplus + extern "C++" { + #include + typedef std::atomic_int atomicInt; + #define atomicFetchAdd(a,b) std::atomic_fetch_add(a,b) + } +#else + #include + typedef atomic_int atomicInt; + #define atomicFetchAdd(a,b) atomic_fetch_add(a,b) +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct { + void *buffer; + uint32_t length; + uint32_t tail; + uint32_t head; + volatile atomicInt fillCount; + bool atomic; +} TPCircularBuffer; + +/*! + * Initialise buffer + * + * Note that the length is advisory only: Because of the way the + * memory mirroring technique works, the true buffer length will + * be multiples of the device page size (e.g. 4096 bytes) + * + * If you intend to use the AudioBufferList utilities, you should + * always allocate a bit more space than you need for pure audio + * data, so there's room for the metadata. How much extra is required + * depends on how many AudioBufferList structures are used, which is + * a function of how many audio frames each buffer holds. A good rule + * of thumb is to add 15%, or at least another 2048 bytes or so. + * + * @param buffer Circular buffer + * @param length Length of buffer + */ +#define TPCircularBufferInit(buffer, length) \ + _TPCircularBufferInit(buffer, length, sizeof(*buffer)) +bool _TPCircularBufferInit(TPCircularBuffer *buffer, uint32_t length, size_t structSize); + +/*! + * Cleanup buffer + * + * Releases buffer resources. + */ +void TPCircularBufferCleanup(TPCircularBuffer *buffer); + +/*! + * Clear buffer + * + * Resets buffer to original, empty state. + * + * This is safe for use by consumer while producer is accessing + * buffer. + */ +void TPCircularBufferClear(TPCircularBuffer *buffer); + +/*! + * Set the atomicity + * + * If you set the atomiticy to false using this method, the buffer will + * not use atomic operations. This can be used to give the compiler a little + * more optimisation opportunities when the buffer is only used on one thread. + * + * Important note: Only set this to false if you know what you're doing! + * + * The default value is true (the buffer will use atomic operations) + * + * @param buffer Circular buffer + * @param atomic Whether the buffer is atomic (default true) + */ +void TPCircularBufferSetAtomic(TPCircularBuffer *buffer, bool atomic); + +// Reading (consuming) + +/*! + * Access end of buffer + * + * This gives you a pointer to the end of the buffer, ready + * for reading, and the number of available bytes to read. + * + * @param buffer Circular buffer + * @param availableBytes On output, the number of bytes ready for reading + * @return Pointer to the first bytes ready for reading, or NULL if buffer is empty + */ +static __inline__ __attribute__((always_inline)) void* TPCircularBufferTail(TPCircularBuffer *buffer, uint32_t* availableBytes) { + *availableBytes = buffer->fillCount; + if ( *availableBytes == 0 ) return NULL; + return (void*)((char*)buffer->buffer + buffer->tail); +} + +/*! + * Consume bytes in buffer + * + * This frees up the just-read bytes, ready for writing again. + * + * @param buffer Circular buffer + * @param amount Number of bytes to consume + */ +static __inline__ __attribute__((always_inline)) void TPCircularBufferConsume(TPCircularBuffer *buffer, uint32_t amount) { + buffer->tail = (buffer->tail + amount) % buffer->length; + if ( buffer->atomic ) { + atomicFetchAdd(&buffer->fillCount, -amount); + } else { + buffer->fillCount -= amount; + } + assert(buffer->fillCount >= 0); +} + +/*! + * Access front of buffer + * + * This gives you a pointer to the front of the buffer, ready + * for writing, and the number of available bytes to write. + * + * @param buffer Circular buffer + * @param availableBytes On output, the number of bytes ready for writing + * @return Pointer to the first bytes ready for writing, or NULL if buffer is full + */ +static __inline__ __attribute__((always_inline)) void* TPCircularBufferHead(TPCircularBuffer *buffer, uint32_t* availableBytes) { + *availableBytes = (buffer->length - buffer->fillCount); + if ( *availableBytes == 0 ) return NULL; + return (void*)((char*)buffer->buffer + buffer->head); +} + +// Writing (producing) + +/*! + * Produce bytes in buffer + * + * This marks the given section of the buffer ready for reading. + * + * @param buffer Circular buffer + * @param amount Number of bytes to produce + */ +static __inline__ __attribute__((always_inline)) void TPCircularBufferProduce(TPCircularBuffer *buffer, uint32_t amount) { + buffer->head = (buffer->head + amount) % buffer->length; + if ( buffer->atomic ) { + atomicFetchAdd(&buffer->fillCount, amount); + } else { + buffer->fillCount += amount; + } + assert(buffer->fillCount <= buffer->length); +} + +/*! + * Helper routine to copy bytes to buffer + * + * This copies the given bytes to the buffer, and marks them ready for reading. + * + * @param buffer Circular buffer + * @param src Source buffer + * @param len Number of bytes in source buffer + * @return true if bytes copied, false if there was insufficient space + */ +static __inline__ __attribute__((always_inline)) bool TPCircularBufferProduceBytes(TPCircularBuffer *buffer, const void* src, uint32_t len) { + uint32_t space; + void *ptr = TPCircularBufferHead(buffer, &space); + if ( space < len ) return false; + memcpy(ptr, src, len); + TPCircularBufferProduce(buffer, len); + return true; +} + +/*! + * Deprecated method + */ +static __inline__ __attribute__((always_inline)) __deprecated_msg("use TPCircularBufferSetAtomic(false) and TPCircularBufferConsume instead") +void TPCircularBufferConsumeNoBarrier(TPCircularBuffer *buffer, uint32_t amount) { + buffer->tail = (buffer->tail + amount) % buffer->length; + buffer->fillCount -= amount; + assert(buffer->fillCount >= 0); +} + +/*! + * Deprecated method + */ +static __inline__ __attribute__((always_inline)) __deprecated_msg("use TPCircularBufferSetAtomic(false) and TPCircularBufferProduce instead") +void TPCircularBufferProduceNoBarrier(TPCircularBuffer *buffer, uint32_t amount) { + buffer->head = (buffer->head + amount) % buffer->length; + buffer->fillCount += amount; + assert(buffer->fillCount <= buffer->length); +} + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/framework/Source/TPCircularBuffer.m b/framework/Source/TPCircularBuffer.m new file mode 100755 index 00000000..a3e6b3c5 --- /dev/null +++ b/framework/Source/TPCircularBuffer.m @@ -0,0 +1,149 @@ +// +// TPCircularBuffer.c +// Circular/Ring buffer implementation +// +// https://github.com/michaeltyson/TPCircularBuffer +// +// Created by Michael Tyson on 10/12/2011. +// +// Copyright (C) 2012-2013 A Tasty Pixel +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// +// 3. This notice may not be removed or altered from any source distribution. +// + +#include "TPCircularBuffer.h" +#include +#include +#include + +#define reportResult(result,operation) (_reportResult((result),(operation),strrchr(__FILE__, '/')+1,__LINE__)) +static inline bool _reportResult(kern_return_t result, const char *operation, const char* file, int line) { + if ( result != ERR_SUCCESS ) { + printf("%s:%d: %s: %s\n", file, line, operation, mach_error_string(result)); + return false; + } + return true; +} + +bool _TPCircularBufferInit(TPCircularBuffer *buffer, uint32_t length, size_t structSize) { + + assert(length > 0); + + if ( structSize != sizeof(TPCircularBuffer) ) { + fprintf(stderr, "TPCircularBuffer: Header version mismatch. Check for old versions of TPCircularBuffer in your project\n"); + abort(); + } + + // Keep trying until we get our buffer, needed to handle race conditions + int retries = 3; + while ( true ) { + + buffer->length = (uint32_t)round_page(length); // We need whole page sizes + + // Temporarily allocate twice the length, so we have the contiguous address space to + // support a second instance of the buffer directly after + vm_address_t bufferAddress; + kern_return_t result = vm_allocate(mach_task_self(), + &bufferAddress, + buffer->length * 2, + VM_FLAGS_ANYWHERE); // allocate anywhere it'll fit + if ( result != ERR_SUCCESS ) { + if ( retries-- == 0 ) { + reportResult(result, "Buffer allocation"); + return false; + } + // Try again if we fail + continue; + } + + // Now replace the second half of the allocation with a virtual copy of the first half. Deallocate the second half... + result = vm_deallocate(mach_task_self(), + bufferAddress + buffer->length, + buffer->length); + if ( result != ERR_SUCCESS ) { + if ( retries-- == 0 ) { + reportResult(result, "Buffer deallocation"); + return false; + } + // If this fails somehow, deallocate the whole region and try again + vm_deallocate(mach_task_self(), bufferAddress, buffer->length); + continue; + } + + // Re-map the buffer to the address space immediately after the buffer + vm_address_t virtualAddress = bufferAddress + buffer->length; + vm_prot_t cur_prot, max_prot; + result = vm_remap(mach_task_self(), + &virtualAddress, // mirror target + buffer->length, // size of mirror + 0, // auto alignment + 0, // force remapping to virtualAddress + mach_task_self(), // same task + bufferAddress, // mirror source + 0, // MAP READ-WRITE, NOT COPY + &cur_prot, // unused protection struct + &max_prot, // unused protection struct + VM_INHERIT_DEFAULT); + if ( result != ERR_SUCCESS ) { + if ( retries-- == 0 ) { + reportResult(result, "Remap buffer memory"); + return false; + } + // If this remap failed, we hit a race condition, so deallocate and try again + vm_deallocate(mach_task_self(), bufferAddress, buffer->length); + continue; + } + + if ( virtualAddress != bufferAddress+buffer->length ) { + // If the memory is not contiguous, clean up both allocated buffers and try again + if ( retries-- == 0 ) { + printf("Couldn't map buffer memory to end of buffer\n"); + return false; + } + + vm_deallocate(mach_task_self(), virtualAddress, buffer->length); + vm_deallocate(mach_task_self(), bufferAddress, buffer->length); + continue; + } + + buffer->buffer = (void*)bufferAddress; + buffer->fillCount = 0; + buffer->head = buffer->tail = 0; + buffer->atomic = true; + + return true; + } + return false; +} + +void TPCircularBufferCleanup(TPCircularBuffer *buffer) { + vm_deallocate(mach_task_self(), (vm_address_t)buffer->buffer, buffer->length * 2); + memset(buffer, 0, sizeof(TPCircularBuffer)); +} + +void TPCircularBufferClear(TPCircularBuffer *buffer) { + uint32_t fillCount; + if ( TPCircularBufferTail(buffer, &fillCount) ) { + TPCircularBufferConsume(buffer, fillCount); + } +} + +void TPCircularBufferSetAtomic(TPCircularBuffer *buffer, bool atomic) { + buffer->atomic = atomic; +} diff --git a/framework/Source/TextureInput.swift b/framework/Source/TextureInput.swift index b2a782bf..855c0deb 100644 --- a/framework/Source/TextureInput.swift +++ b/framework/Source/TextureInput.swift @@ -15,11 +15,15 @@ public class TextureInput: ImageSource { public let targets = TargetContainer() - let textureFramebuffer:Framebuffer + #if DEBUG + public var debugRenderInfo: String = "" + #endif + + let textureFramebuffer: Framebuffer - public init(texture:GLuint, size:Size, orientation:ImageOrientation = .portrait) { + public init(texture: GLuint, size: Size, orientation: ImageOrientation = .portrait) { do { - textureFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(size), textureOnly:true, overriddenTexture:texture) + textureFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: orientation, size: GLSize(size), textureOnly: true, overriddenTexture: texture) } catch { fatalError("Could not create framebuffer for custom input texture.") } @@ -29,8 +33,8 @@ public class TextureInput: ImageSource { updateTargetsWithFramebuffer(textureFramebuffer) } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { textureFramebuffer.lock() - target.newFramebufferAvailable(textureFramebuffer, fromSourceIndex:atIndex) + target.newFramebufferAvailable(textureFramebuffer, fromSourceIndex: atIndex) } } diff --git a/framework/Source/TextureOutput.swift b/framework/Source/TextureOutput.swift index 072e07c6..48881767 100644 --- a/framework/Source/TextureOutput.swift +++ b/framework/Source/TextureOutput.swift @@ -13,12 +13,30 @@ #endif public class TextureOutput: ImageConsumer { - public var newTextureAvailableCallback:((GLuint) -> ())? + public var newTextureAvailableCallback: ((GLuint) -> Void)? public let sources = SourceContainer() - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + #if DEBUG + public var debugRenderInfo: String = "" + #endif + + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + debugRenderInfo = """ +{ + TextureOutput: { + input: \(framebuffer.debugRenderInfo), + output: { size: \(framebuffer.size.width * framebuffer.size.height * 4) type: TextureCallback }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + } + #endif newTextureAvailableCallback?(framebuffer.texture) // TODO: Maybe extend the lifetime of the texture past this if needed framebuffer.unlock() diff --git a/framework/Source/TextureSamplingOperation.swift b/framework/Source/TextureSamplingOperation.swift index 19026fd0..fca0b0ae 100644 --- a/framework/Source/TextureSamplingOperation.swift +++ b/framework/Source/TextureSamplingOperation.swift @@ -1,13 +1,13 @@ open class TextureSamplingOperation: BasicOperation { - public var overriddenTexelSize:Size? + public var overriddenTexelSize: Size? - public init(vertexShader:String = NearbyTexelSamplingVertexShader, fragmentShader:String, numberOfInputs:UInt = 1) { - super.init(vertexShader:vertexShader, fragmentShader:fragmentShader, numberOfInputs:numberOfInputs) + public init(vertexShader: String = NearbyTexelSamplingVertexShader, fragmentShader: String, numberOfInputs: UInt = 1) { + super.init(vertexShader: vertexShader, fragmentShader: fragmentShader, numberOfInputs: numberOfInputs) } - override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for:outputRotation) + let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for: outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height } diff --git a/framework/Source/Timestamp.swift b/framework/Source/Timestamp.swift index 4455a898..81b84669 100644 --- a/framework/Source/Timestamp.swift +++ b/framework/Source/Timestamp.swift @@ -2,8 +2,8 @@ import Foundation // This reimplements CMTime such that it can reach across to Linux public struct TimestampFlags: OptionSet { - public let rawValue:UInt32 - public init(rawValue:UInt32) { self.rawValue = rawValue } + public let rawValue: UInt32 + public init(rawValue: UInt32) { self.rawValue = rawValue } public static let valid = TimestampFlags(rawValue: 1 << 0) public static let hasBeenRounded = TimestampFlags(rawValue: 1 << 1) @@ -13,12 +13,12 @@ public struct TimestampFlags: OptionSet { } public struct Timestamp: Comparable { - let value:Int64 - let timescale:Int32 - let flags:TimestampFlags - let epoch:Int64 + let value: Int64 + let timescale: Int32 + let flags: TimestampFlags + let epoch: Int64 - public init(value:Int64, timescale:Int32, flags:TimestampFlags, epoch:Int64) { + public init(value: Int64, timescale: Int32, flags: TimestampFlags, epoch: Int64) { self.value = value self.timescale = timescale self.flags = flags @@ -30,7 +30,7 @@ public struct Timestamp: Comparable { } } -public func ==(x:Timestamp, y:Timestamp) -> Bool { +public func ==(x: Timestamp, y: Timestamp) -> Bool { // TODO: Fix this // if (x.flags.contains(TimestampFlags.PositiveInfinity) && y.flags.contains(TimestampFlags.PositiveInfinity)) { // return true @@ -40,8 +40,8 @@ public func ==(x:Timestamp, y:Timestamp) -> Bool { // return false // } - let correctedYValue:Int64 - if (x.timescale != y.timescale) { + let correctedYValue: Int64 + if x.timescale != y.timescale { correctedYValue = Int64(round(Double(y.value) * Double(x.timescale) / Double(y.timescale))) } else { correctedYValue = y.value @@ -50,7 +50,7 @@ public func ==(x:Timestamp, y:Timestamp) -> Bool { return ((x.value == correctedYValue) && (x.epoch == y.epoch)) } -public func <(x:Timestamp, y:Timestamp) -> Bool { +public func <(x: Timestamp, y: Timestamp) -> Bool { // TODO: Fix this // if (x.flags.contains(TimestampFlags.PositiveInfinity) || y.flags.contains(TimestampFlags.NegativeInfinity)) { // return false @@ -58,14 +58,14 @@ public func <(x:Timestamp, y:Timestamp) -> Bool { // return true // } - if (x.epoch < y.epoch) { + if x.epoch < y.epoch { return true - } else if (x.epoch > y.epoch) { + } else if x.epoch > y.epoch { return false } - let correctedYValue:Int64 - if (x.timescale != y.timescale) { + let correctedYValue: Int64 + if x.timescale != y.timescale { correctedYValue = Int64(round(Double(y.value) * Double(x.timescale) / Double(y.timescale))) } else { correctedYValue = y.value diff --git a/framework/Source/TwoStageOperation.swift b/framework/Source/TwoStageOperation.swift index 1afd416c..0c220660 100644 --- a/framework/Source/TwoStageOperation.swift +++ b/framework/Source/TwoStageOperation.swift @@ -1,24 +1,24 @@ open class TwoStageOperation: BasicOperation { - public var overrideDownsamplingOptimization:Bool = false + public var overrideDownsamplingOptimization = false // override var outputFramebuffer:Framebuffer { get { return Framebuffer } } - var downsamplingFactor:Float? + var downsamplingFactor: Float? - override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { + override func internalRenderFunction(_ inputFramebuffer: Framebuffer, textureProperties: [InputTextureProperties]) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) // Downsample - let internalStageSize:GLSize - let firstStageTextureProperties:[InputTextureProperties] - let downsamplingFramebuffer:Framebuffer? + let internalStageSize: GLSize + let firstStageTextureProperties: [InputTextureProperties] + let downsamplingFramebuffer: Framebuffer? if let downsamplingFactor = downsamplingFactor { - internalStageSize = GLSize(Size(width:max(5.0, Float(renderFramebuffer.size.width) / downsamplingFactor), height:max(5.0, Float(renderFramebuffer.size.height) / downsamplingFactor))) - downsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:internalStageSize, stencil:false) + internalStageSize = GLSize(Size(width: max(5.0, Float(renderFramebuffer.size.width) / downsamplingFactor), height: max(5.0, Float(renderFramebuffer.size.height) / downsamplingFactor))) + downsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: internalStageSize, stencil: false) downsamplingFramebuffer!.lock() downsamplingFramebuffer!.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:nil, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: nil, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) releaseIncomingFramebuffers() firstStageTextureProperties = [downsamplingFramebuffer!.texturePropertiesForOutputRotation(.noRotation)] @@ -29,42 +29,42 @@ open class TwoStageOperation: BasicOperation { } // Render first stage - let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:internalStageSize, stencil:false) + let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: internalStageSize, stencil: false) firstStageFramebuffer.lock() firstStageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - let texelSize = inputFramebuffer.initialStageTexelSize(for:outputRotation) + let texelSize = inputFramebuffer.initialStageTexelSize(for: outputRotation) uniformSettings["texelWidth"] = texelSize.width * (downsamplingFactor ?? 1.0) uniformSettings["texelHeight"] = texelSize.height * (downsamplingFactor ?? 1.0) - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:firstStageTextureProperties) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: firstStageTextureProperties) if let downsamplingFramebuffer = downsamplingFramebuffer { downsamplingFramebuffer.unlock() } else { releaseIncomingFramebuffers() } - let secondStageTexelSize = renderFramebuffer.texelSize(for:.noRotation) + let secondStageTexelSize = renderFramebuffer.texelSize(for: .noRotation) uniformSettings["texelWidth"] = secondStageTexelSize.width * (downsamplingFactor ?? 1.0) uniformSettings["texelHeight"] = 0.0 // Render second stage and upsample - if (downsamplingFactor != nil) { - let beforeUpsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:internalStageSize, stencil:false) + if downsamplingFactor != nil { + let beforeUpsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: internalStageSize, stencil: false) beforeUpsamplingFramebuffer.activateFramebufferForRendering() beforeUpsamplingFramebuffer.lock() clearFramebufferWithColor(backgroundColor) - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) firstStageFramebuffer.unlock() renderFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:nil, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[beforeUpsamplingFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: nil, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [beforeUpsamplingFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) beforeUpsamplingFramebuffer.unlock() } else { renderFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) firstStageFramebuffer.unlock() } } diff --git a/framework/Source/iOS/CILookupFilter.swift b/framework/Source/iOS/CILookupFilter.swift new file mode 100644 index 00000000..a8a45b86 --- /dev/null +++ b/framework/Source/iOS/CILookupFilter.swift @@ -0,0 +1,147 @@ +// +// CILookupFilter.swift +// DayCam +// +// Created by 陈品霖 on 2020/2/23. +// Copyright © 2020 rocry. All rights reserved. +// + +import Foundation +import UIKit +import CoreImage + +public class CILookupFilter { + private var lutFilter: CIFilter? + public private(set) var intensity: Double? + public private(set) var brightnessFactor: Double? + // Use "ColorMatrix(Alpha) + Composite" filters for color LUT + private var alphaFilter: CIFilter? + private var compositeFilter: CIFilter? + private lazy var alphaColorMatrix = [CGFloat]() + // Use "ColorControl(Brightness)" filter for black and white LUT + private var brightnessFilter: CIFilter? + + init(lutImage: UIImage, intensity: Double? = nil, brightnessFactor: Double? = nil) { + self.intensity = intensity + self.brightnessFactor = brightnessFactor + lutFilter = CIFilter.filter(with: lutImage) + if let intensity = intensity { + if let factor = brightnessFactor { + brightnessFilter = CIFilter(name: "CIColorControls") + brightnessFilter?.setDefaults() + let adjustedBrightness = -factor + factor * intensity + brightnessFilter?.setValue(NSNumber(value: adjustedBrightness), forKey: kCIInputBrightnessKey) + } else { + alphaColorMatrix = [0, 0, 0, CGFloat(intensity)] + alphaFilter = CIFilter(name: "CIColorMatrix") + alphaFilter?.setDefaults() + alphaFilter?.setValue(CIVector(values: &alphaColorMatrix, count: 4), forKey: "inputAVector") + + compositeFilter = CIFilter(name: "CISourceOverCompositing") + compositeFilter?.setDefaults() + } + } + } + + func applyFilter(on image: CIImage) -> CIImage? { + lutFilter?.setValue(image, forKey: kCIInputImageKey) + if intensity == nil { + return lutFilter?.outputImage + } else { + if brightnessFactor != nil { + brightnessFilter?.setValue(lutFilter?.outputImage, forKey: kCIInputImageKey) + return brightnessFilter?.outputImage + } else { + alphaFilter?.setValue(lutFilter?.outputImage, forKey: kCIInputImageKey) + compositeFilter?.setValue(alphaFilter?.outputImage, forKey: kCIInputImageKey) + compositeFilter?.setValue(image, forKey: kCIInputBackgroundImageKey) + return compositeFilter?.outputImage + } + } + } +} + +public extension CIFilter { + static func filter(with lutUIImage: UIImage) -> CIFilter? { + guard let lutCGImage = lutUIImage.cgImage else { + print("ERROR: Invalid colorLUT") + return nil + } + let size = 64 + let lutWidth = lutCGImage.width + let lutHeight = lutCGImage.height + let rowCount = lutHeight / size + let columnCount = lutWidth / size + + guard lutWidth % size == 0 && lutHeight % size == 0 && rowCount * columnCount == size else { + print("ERROR: Invalid colorLUT image size, width:\(lutWidth) height:\(lutHeight)") + return nil + } + + guard let bitmap = getBytesFromImage(image: lutUIImage) else { + print("ERROR: Cannot get byte from image") + return nil + } + + let floatSize = MemoryLayout.size + let cubeData = UnsafeMutablePointer.allocate(capacity: size * size * size * 4 * floatSize) + var z = 0 + var bitmapOffset = 0 + + for _ in 0 ..< rowCount { + for y in 0 ..< size { + let tmp = z + for _ in 0 ..< columnCount { + for x in 0 ..< size { + let alpha = Float(bitmap[bitmapOffset]) / 255.0 + let red = Float(bitmap[bitmapOffset + 1]) / 255.0 + let green = Float(bitmap[bitmapOffset + 2]) / 255.0 + let blue = Float(bitmap[bitmapOffset + 3]) / 255.0 + + let dataOffset = (z * size * size + y * size + x) * 4 + + cubeData[dataOffset + 3] = alpha + cubeData[dataOffset + 2] = red + cubeData[dataOffset + 1] = green + cubeData[dataOffset + 0] = blue + bitmapOffset += 4 + } + z += 1 + } + z = tmp + } + z += columnCount + } + + // create CIColorCube Filter + let colorCubeData = NSData(bytesNoCopy: cubeData, length: size * size * size * 4 * floatSize, freeWhenDone: true) + guard let filter = CIFilter(name: "CIColorCube") else { + print("ERROR: Cannot get CIColorCube filter") + return nil + } + filter.setValue(colorCubeData, forKey: "inputCubeData") + filter.setValue(size, forKey: "inputCubeDimension") + return filter + } + + static func getBytesFromImage(image: UIImage?) -> [UInt8]? { + var pixelValues: [UInt8]? + if let imageRef = image?.cgImage { + let width = Int(imageRef.width) + let height = Int(imageRef.height) + let bitsPerComponent = 8 + let bytesPerRow = width * 4 + let totalBytes = height * bytesPerRow + + let bitmapInfo = CGImageAlphaInfo.premultipliedLast.rawValue | CGBitmapInfo.byteOrder32Little.rawValue + let colorSpace = CGColorSpaceCreateDeviceRGB() + var intensities = [UInt8](repeating: 0, count: totalBytes) + + let contextRef = CGContext(data: &intensities, width: width, height: height, bitsPerComponent: bitsPerComponent, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo) + contextRef?.draw(imageRef, in: CGRect(x: 0.0, y: 0.0, width: CGFloat(width), height: CGFloat(height))) + + pixelValues = intensities + } + return pixelValues + } +} diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 0c7f4950..7a784823 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -1,37 +1,56 @@ import Foundation import AVFoundation -public protocol CameraDelegate { +public protocol CameraDelegate: class { + /// Output original unprocessed sample buffer on AVCaptureDataOutput queue WITHOUT frame drops. + /// + /// - Parameters: + /// - sampleBuffer: original sample buffer + /// It should be very lightweight and delay less than 1/FPS secons. + func didCaptureBufferOnOutputQueue(_ sampleBuffer: CMSampleBuffer) + + /// Output original unprocessed sample buffer on sharedImageProcessing queue WITH frame drops if needed. + /// + /// - Parameter sampleBuffer: original sample buffer func didCaptureBuffer(_ sampleBuffer: CMSampleBuffer) } public enum PhysicalCameraLocation { case backFacing case frontFacing + case frontFacingMirrored // Documentation: "The front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight." func imageOrientation() -> ImageOrientation { switch self { - case .backFacing: return .landscapeRight - case .frontFacing: return .landscapeLeft + case .backFacing: return .portrait + case .frontFacing: return .portrait + case .frontFacingMirrored: return .portrait } } - func captureDevicePosition() -> AVCaptureDevicePosition { + public func captureDevicePosition() -> AVCaptureDevice.Position { switch self { case .backFacing: return .back case .frontFacing: return .front + case .frontFacingMirrored: return .front } } - func device() -> AVCaptureDevice? { - let devices = AVCaptureDevice.devices(withMediaType:AVMediaTypeVideo) - for case let device as AVCaptureDevice in devices! { - if (device.position == self.captureDevicePosition()) { - return device - } + public func device(_ type: AVCaptureDevice.DeviceType) -> AVCaptureDevice? { + if let matchedDevice = AVCaptureDevice.DiscoverySession( + deviceTypes: [type], + mediaType: .video, + position: captureDevicePosition()).devices.first { + return matchedDevice } - - return AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeVideo) + + // Or use default wideAngleCamera + if let device = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: captureDevicePosition()).devices.first { + return device + } + + // or fallback to old logic + return AVCaptureDevice.default(for: .video) } } @@ -41,63 +60,92 @@ struct CameraError: Error { let initialBenchmarkFramesToIgnore = 5 public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { - public var location:PhysicalCameraLocation { + public var location: PhysicalCameraLocation { didSet { - // TODO: Swap the camera locations, framebuffers as needed + if oldValue == location { return } + configureDeviceInput(location: location, deviceType: deviceType) } } - public var runBenchmark:Bool = false - public var logFPS:Bool = false - public var audioEncodingTarget:AudioEncodingTarget? { + public var runBenchmark = false + public var logFPS = false + public var audioEncodingTarget: AudioEncodingTarget? { didSet { guard let audioEncodingTarget = audioEncodingTarget else { - self.removeAudioInputsAndOutputs() return } do { try self.addAudioInputsAndOutputs() - audioEncodingTarget.activateAudioTrack() + try audioEncodingTarget.activateAudioTrack() } catch { - fatalError("ERROR: Could not connect audio target with error: \(error)") + print("ERROR: Could not connect audio target with error: \(error)") } } } + public private(set) var photoOutput: AVCapturePhotoOutput? + public let targets = TargetContainer() - public var delegate: CameraDelegate? - public let captureSession:AVCaptureSession - let inputCamera:AVCaptureDevice! - let videoInput:AVCaptureDeviceInput! - let videoOutput:AVCaptureVideoDataOutput! - var microphone:AVCaptureDevice? - var audioInput:AVCaptureDeviceInput? - var audioOutput:AVCaptureAudioDataOutput? + public weak var delegate: CameraDelegate? + public let captureSession: AVCaptureSession + public var outputBufferSize: GLSize? + public var inputCamera: AVCaptureDevice! + public private(set) var videoInput: AVCaptureDeviceInput! + public let videoOutput: AVCaptureVideoDataOutput! + public var microphone: AVCaptureDevice? + public var audioInput: AVCaptureDeviceInput? + public var audioOutput: AVCaptureAudioDataOutput? + public var dontDropFrames = false + public var deviceType: AVCaptureDevice.DeviceType { + return inputCamera.deviceType + } + public var backCameraStableMode: AVCaptureVideoStabilizationMode? { + didSet { + if location == .backFacing { + configureStabilization() + } + } + } + public var frontCameraStableMode: AVCaptureVideoStabilizationMode? { + didSet { + if location != .backFacing { + configureStabilization() + } + } + } - var supportsFullYUVRange:Bool = false - let captureAsYUV:Bool - let yuvConversionShader:ShaderProgram? - let frameRenderingSemaphore = DispatchSemaphore(value:1) - let cameraProcessingQueue = DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default) - let audioProcessingQueue = DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default) + var supportsFullYUVRange = false + let captureAsYUV: Bool + let yuvConversionShader: ShaderProgram? + let frameRenderingSemaphore = DispatchSemaphore(value: 1) + let cameraProcessingQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.cameraProcessingQueue", qos: .default) + let audioProcessingQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.audioProcessingQueue", qos: .default) let framesToIgnore = 5 var numberOfFramesCaptured = 0 - var totalFrameTimeDuringCapture:Double = 0.0 + var totalFrameTimeDuringCapture: Double = 0.0 var framesSinceLastCheck = 0 - var lastCheckTime = CFAbsoluteTimeGetCurrent() + var lastCheckTime = CACurrentMediaTime() + + var captureSessionRestartAttempts = 0 + + #if DEBUG + public var debugRenderInfo: String = "" + #endif - public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true) throws { + public init(sessionPreset: AVCaptureSession.Preset, cameraDevice: AVCaptureDevice? = nil, location: PhysicalCameraLocation = .backFacing, captureAsYUV: Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil, metadataObjectTypes: [AVMetadataObject.ObjectType]? = nil, deviceType: AVCaptureDevice.DeviceType = .builtInWideAngleCamera) throws { + debugPrint("camera init") self.location = location self.captureAsYUV = captureAsYUV self.captureSession = AVCaptureSession() self.captureSession.beginConfiguration() + captureSession.sessionPreset = sessionPreset if let cameraDevice = cameraDevice { self.inputCamera = cameraDevice } else { - if let device = location.device() { + if let device = location.device(deviceType) { self.inputCamera = device } else { self.videoInput = nil @@ -109,8 +157,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } + captureSession.automaticallyConfiguresCaptureDeviceForWideColor = false do { - self.videoInput = try AVCaptureDeviceInput(device:inputCamera) + self.videoInput = try AVCaptureDeviceInput(device: inputCamera) } catch { self.videoInput = nil self.videoOutput = nil @@ -118,7 +167,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer super.init() throw error } - if (captureSession.canAddInput(videoInput)) { + if captureSession.canAddInput(videoInput) { captureSession.addInput(videoInput) } @@ -128,123 +177,252 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if captureAsYUV { supportsFullYUVRange = false - let supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes - for currentPixelFormat in supportedPixelFormats! { - if ((currentPixelFormat as! NSNumber).int32Value == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { + #if !targetEnvironment(simulator) + let supportedPixelFormats = videoOutput.availableVideoPixelFormatTypes + for currentPixelFormat in supportedPixelFormats { + if currentPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange { supportsFullYUVRange = true } } + #endif - if (supportsFullYUVRange) { - yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + if supportsFullYUVRange { + yuvConversionShader = crashOnShaderCompileFailure("Camera") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] } else { - yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] + yuvConversionShader = crashOnShaderCompileFailure("Camera") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionVideoRangeFragmentShader) } + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA] } - if (captureSession.canAddOutput(videoOutput)) { + if captureSession.canAddOutput(videoOutput) { captureSession.addOutput(videoOutput) } + + if let photoOutput = photoOutput { + self.photoOutput = photoOutput + if captureSession.canAddOutput(photoOutput) { + captureSession.addOutput(photoOutput) + } + } + + if let metadataDelegate = metadataDelegate, let metadataObjectTypes = metadataObjectTypes, !metadataObjectTypes.isEmpty { + let captureMetadataOutput = AVCaptureMetadataOutput() + if captureSession.canAddOutput(captureMetadataOutput) { + captureSession.addOutput(captureMetadataOutput) + + captureMetadataOutput.setMetadataObjectsDelegate(metadataDelegate, queue: cameraProcessingQueue) + captureMetadataOutput.metadataObjectTypes = metadataObjectTypes + } + } + captureSession.sessionPreset = sessionPreset + + Camera.updateVideoOutput(location: location, videoOutput: videoOutput) + captureSession.commitConfiguration() super.init() - videoOutput.setSampleBufferDelegate(self, queue:cameraProcessingQueue) + videoOutput.setSampleBufferDelegate(self, queue: cameraProcessingQueue) + + NotificationCenter.default.addObserver(self, selector: #selector(Camera.captureSessionRuntimeError(note:)), name: NSNotification.Name.AVCaptureSessionRuntimeError, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(Camera.captureSessionDidStartRunning(note:)), name: NSNotification.Name.AVCaptureSessionDidStartRunning, object: nil) + } + + public func captureStillImage(delegate: AVCapturePhotoCaptureDelegate, settings: AVCapturePhotoSettings? = nil) { + guard let photoOutput = photoOutput else { + fatalError("didn't setup photo output") + } + + let photoSettings = settings ?? AVCapturePhotoSettings() + +// photoSettings.isAutoStillImageStabilizationEnabled = photoOutput.isStillImageStabilizationSupported + + print("isStillImageStabilizationSupported: \(photoOutput.isStillImageStabilizationSupported), isStillImageStabilizationScene: \(photoOutput.isStillImageStabilizationScene)") + photoOutput.capturePhoto(with: photoSettings, delegate: delegate) + } + + func configureStabilization() { + let stableMode = (location == .backFacing ? backCameraStableMode : frontCameraStableMode) + Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode: stableMode) + } + + public func configureDeviceInput(location: PhysicalCameraLocation, deviceType: AVCaptureDevice.DeviceType, skipConfiguration: Bool = false) { + guard let device = location.device(deviceType) else { + fatalError("ERROR: Can't find video devices for \(location)") + } + + do { + let newVideoInput = try AVCaptureDeviceInput(device: device) + if !skipConfiguration { + captureSession.beginConfiguration() + } + + captureSession.removeInput(videoInput) + if captureSession.canAddInput(newVideoInput) { + inputCamera = device + captureSession.addInput(newVideoInput) + videoInput = newVideoInput + self.location = location + configureStabilization() + } else { + print("Can't add video input") + captureSession.addInput(videoInput) + } + + if !skipConfiguration { + captureSession.commitConfiguration() + } + } catch let error { + fatalError("ERROR: Could not init device: \(error)") + } } deinit { - sharedImageProcessingContext.runOperationSynchronously{ - self.stopCapture() - self.videoOutput.setSampleBufferDelegate(nil, queue:nil) - self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) + debugPrint("camera deinit") + + let captureSession = self.captureSession + DispatchQueue.global().async { + if captureSession.isRunning { + // Don't call this on the sharedImageProcessingContext otherwise you may get a deadlock + // since this waits for the captureOutput() delegate call to finish. + captureSession.stopRunning() + } + } + + sharedImageProcessingContext.runOperationSynchronously { + self.videoOutput?.setSampleBufferDelegate(nil, queue: nil) + self.audioOutput?.setSampleBufferDelegate(nil, queue: nil) + } + } + + @objc func captureSessionRuntimeError(note: NSNotification) { + print("ERROR: Capture session runtime error: \(String(describing: note.userInfo))") + if self.captureSessionRestartAttempts < 1 { + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + self.startCapture() + } + self.captureSessionRestartAttempts += 1 } } - public func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) { - guard (captureOutput != audioOutput) else { + @objc func captureSessionDidStartRunning(note: NSNotification) { + self.captureSessionRestartAttempts = 0 + } + + public func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + guard captureOutput != audioOutput else { self.processAudioSampleBuffer(sampleBuffer) return } + + delegate?.didCaptureBufferOnOutputQueue(sampleBuffer) - guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return } - - let startTime = CFAbsoluteTimeGetCurrent() + let notFrameDrop = dontDropFrames - let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer)! - let bufferWidth = CVPixelBufferGetWidth(cameraFrame) - let bufferHeight = CVPixelBufferGetHeight(cameraFrame) - let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + guard notFrameDrop || (frameRenderingSemaphore.wait(timeout: DispatchTime.now()) == DispatchTimeoutResult.success) else { return } - CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - sharedImageProcessingContext.runOperationAsynchronously{ - let cameraFramebuffer:Framebuffer + sharedImageProcessingContext.runOperationAsynchronously { + defer { + if !notFrameDrop { + self.frameRenderingSemaphore.signal() + } + } + let startTime = CACurrentMediaTime() + guard let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer) else { + print("Warning: cannot get imageBuffer") + return + } + let bufferWidth = CVPixelBufferGetWidth(cameraFrame) + let bufferHeight = CVPixelBufferGetHeight(cameraFrame) + let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + let cameraFramebuffer: Framebuffer self.delegate?.didCaptureBuffer(sampleBuffer) if self.captureAsYUV { - let luminanceFramebuffer:Framebuffer - let chrominanceFramebuffer:Framebuffer + let luminanceFramebuffer: Framebuffer + let chrominanceFramebuffer: Framebuffer if sharedImageProcessingContext.supportsTextureCaches() { - var luminanceTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceTextureRef) + var luminanceTextureRef: CVOpenGLESTexture? + _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceTextureRef) let luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef!) glActiveTexture(GLenum(GL_TEXTURE4)) glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture) glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE) glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE) - luminanceFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true, overriddenTexture:luminanceTexture) + luminanceFramebuffer = try! Framebuffer(context: sharedImageProcessingContext, orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture) - var chrominanceTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceTextureRef) + var chrominanceTextureRef: CVOpenGLESTexture? + _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceTextureRef) let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef!) glActiveTexture(GLenum(GL_TEXTURE5)) glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture) glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE) glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE) - chrominanceFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth / 2), height:GLint(bufferHeight / 2)), textureOnly:true, overriddenTexture:chrominanceTexture) + chrominanceFramebuffer = try! Framebuffer(context: sharedImageProcessingContext, orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth / 2), height: GLint(bufferHeight / 2)), textureOnly: true, overriddenTexture: chrominanceTexture) } else { glActiveTexture(GLenum(GL_TEXTURE4)) - luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) luminanceFramebuffer.lock() glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 0)) glActiveTexture(GLenum(GL_TEXTURE5)) - chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth / 2), height:GLint(bufferHeight / 2)), textureOnly:true) + chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth / 2), height: GLint(bufferHeight / 2)), textureOnly: true) chrominanceFramebuffer.lock() glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 1)) } - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:luminanceFramebuffer.sizeForTargetOrientation(.portrait), textureOnly:false) + let inputSize = luminanceFramebuffer.sizeForTargetOrientation(.portrait).gpuSize + let outputSize = self.outputBufferSize ?? luminanceFramebuffer.sizeForTargetOrientation(.portrait) + let resizeOutput = limitedSizeAndRatio(of: inputSize, to: outputSize.gpuSize) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(resizeOutput.finalCropSize), textureOnly: false) - let conversionMatrix:Matrix3x3 - if (self.supportsFullYUVRange) { + let conversionMatrix: Matrix3x3 + if self.supportsFullYUVRange { conversionMatrix = colorConversionMatrix601FullRangeDefault } else { conversionMatrix = colorConversionMatrix601Default } - convertYUVToRGB(shader:self.yuvConversionShader!, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:cameraFramebuffer, colorConversionMatrix:conversionMatrix) + convertYUVToRGB(shader: self.yuvConversionShader!, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resizeOutput: resizeOutput, resultFramebuffer: cameraFramebuffer, colorConversionMatrix: conversionMatrix) } else { - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) glBindTexture(GLenum(GL_TEXTURE_2D), cameraFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(cameraFrame)) } - CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + + #if DEBUG + self.debugRenderInfo = """ +{ + Camera: { + input: \(bufferWidth)x\(bufferHeight), input_type: CMSampleBuffer, + output: { size: \(cameraFramebuffer.debugRenderInfo) }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + #endif - cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) + cameraFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) + // Clean up after all done + if self.captureAsYUV && sharedImageProcessingContext.supportsTextureCaches() { + CVOpenGLESTextureCacheFlush(sharedImageProcessingContext.coreVideoTextureCache, 0) + } + if self.runBenchmark { self.numberOfFramesCaptured += 1 - if (self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore) { - let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) + if self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore { + let currentFrameTime = (CACurrentMediaTime() - startTime) self.totalFrameTimeDuringCapture += currentFrameTime print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured - initialBenchmarkFramesToIgnore)) ms") print("Current frame time : \(1000.0 * currentFrameTime) ms") @@ -252,62 +430,68 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } if self.logFPS { - if ((CFAbsoluteTimeGetCurrent() - self.lastCheckTime) > 1.0) { - self.lastCheckTime = CFAbsoluteTimeGetCurrent() + if (CACurrentMediaTime() - self.lastCheckTime) > 1.0 { + self.lastCheckTime = CACurrentMediaTime() print("FPS: \(self.framesSinceLastCheck)") self.framesSinceLastCheck = 0 } self.framesSinceLastCheck += 1 } - - self.frameRenderingSemaphore.signal() } } + + public func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + debugPrint("dropped a video frame from camera") + } public func startCapture() { self.numberOfFramesCaptured = 0 self.totalFrameTimeDuringCapture = 0 - if (!captureSession.isRunning) { + if !captureSession.isRunning { captureSession.startRunning() } } - public func stopCapture() { - if (captureSession.isRunning) { + public func stopCapture(force: Bool = false) { + // NOTE: Sometime camera is actually running, but isRunning is false. When it happens, set force to true. + if captureSession.isRunning || force { captureSession.stopRunning() } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Not needed for camera inputs } // MARK: - // MARK: Audio processing - func addAudioInputsAndOutputs() throws { - guard (audioOutput == nil) else { return } + public func addAudioInputsAndOutputs() throws { + guard audioOutput == nil else { return } captureSession.beginConfiguration() defer { captureSession.commitConfiguration() } - microphone = AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeAudio) - audioInput = try AVCaptureDeviceInput(device:microphone) + microphone = AVCaptureDevice.default(for: .audio) + guard let microphone = microphone else { return } + audioInput = try AVCaptureDeviceInput(device: microphone) + guard let audioInput = audioInput else { return } if captureSession.canAddInput(audioInput) { captureSession.addInput(audioInput) } - audioOutput = AVCaptureAudioDataOutput() - if captureSession.canAddOutput(audioOutput) { - captureSession.addOutput(audioOutput) + let output = AVCaptureAudioDataOutput() + if captureSession.canAddOutput(output) { + captureSession.addOutput(output) } - audioOutput?.setSampleBufferDelegate(self, queue:audioProcessingQueue) + output.setSampleBufferDelegate(self, queue: audioProcessingQueue) + audioOutput = output } - func removeAudioInputsAndOutputs() { - guard (audioOutput != nil) else { return } + public func removeAudioInputsAndOutputs() { + guard audioOutput != nil else { return } captureSession.beginConfiguration() captureSession.removeInput(audioInput!) @@ -318,7 +502,27 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.commitConfiguration() } - func processAudioSampleBuffer(_ sampleBuffer:CMSampleBuffer) { - self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) + func processAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) + } +} + +private extension Camera { + static func updateVideoOutput(location: PhysicalCameraLocation, videoOutput: AVCaptureOutput, stableMode: AVCaptureVideoStabilizationMode? = nil) { + for connection in videoOutput.connections { + if connection.isVideoMirroringSupported { + connection.isVideoMirrored = (location == .frontFacingMirrored) + } + + if connection.isVideoOrientationSupported { + connection.videoOrientation = .portrait + } + + if let stableMode = stableMode, connection.isVideoStabilizationSupported { + connection.preferredVideoStabilizationMode = stableMode + } + + print("isVideoStabilizationSupported: \(connection.isVideoStabilizationSupported), activeVideoStabilizationMode: \(connection.activeVideoStabilizationMode.rawValue)") + } } } diff --git a/framework/Source/iOS/FramebufferGenerator.swift b/framework/Source/iOS/FramebufferGenerator.swift new file mode 100644 index 00000000..03a630bb --- /dev/null +++ b/framework/Source/iOS/FramebufferGenerator.swift @@ -0,0 +1,257 @@ +// +// FramebufferGenerator.swift +// GPUImage2 +// +// Created by 陈品霖 on 2019/8/22. +// + +import CoreMedia + +public class FramebufferGenerator { + lazy var yuvConversionShader = _setupShader() + private(set) var outputSize: GLSize? + private(set) var pixelBufferPool: CVPixelBufferPool? + private var renderFramebuffer: Framebuffer? + + public init() { + } + + public func generateFromYUVBuffer(_ yuvPixelBuffer: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { + var framebuffer: Framebuffer? + sharedImageProcessingContext.runOperationSynchronously { + framebuffer = _generateFromYUVBuffer(yuvPixelBuffer, frameTime: frameTime, videoOrientation: videoOrientation) + } + return framebuffer + } + + public func convertToPixelBuffer(_ framebuffer: Framebuffer) -> CVPixelBuffer? { + var pixelBuffer: CVPixelBuffer? + sharedImageProcessingContext.runOperationSynchronously { + pixelBuffer = _convertToPixelBuffer(framebuffer) + } + return pixelBuffer + } + + public func processAndGenerateFromBuffer(_ pixelBuffer: CVPixelBuffer, frameTime: CMTime, processSteps: [PictureInputProcessStep], videoOrientation: ImageOrientation) -> Framebuffer? { + var framebuffer: Framebuffer? + sharedImageProcessingContext.runOperationSynchronously { + framebuffer = _processAndGenerateFromBuffer(pixelBuffer, frameTime: frameTime, processSteps: processSteps, videoOrientation: videoOrientation) + } + return framebuffer + } +} + +private extension FramebufferGenerator { + func _setupShader() -> ShaderProgram? { + var yuvConversionShader: ShaderProgram? + sharedImageProcessingContext.runOperationSynchronously { + yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { + try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), + fragmentShader: YUVConversionFullRangeFragmentShader) + } + } + return yuvConversionShader + } + + func _generateFromYUVBuffer(_ yuvPixelBuffer: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { +// let startTime = CACurrentMediaTime() + guard let yuvConversionShader = yuvConversionShader else { + debugPrint("ERROR! yuvConversionShader hasn't been setup before starting") + return nil + } + let originalOrientation = videoOrientation.originalOrientation + let bufferHeight = CVPixelBufferGetHeight(yuvPixelBuffer) + let bufferWidth = CVPixelBufferGetWidth(yuvPixelBuffer) + let conversionMatrix = colorConversionMatrix601FullRangeDefault + CVPixelBufferLockBaseAddress(yuvPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + defer { + CVPixelBufferUnlockBaseAddress(yuvPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + CVOpenGLESTextureCacheFlush(sharedImageProcessingContext.coreVideoTextureCache, 0) + } + + glActiveTexture(GLenum(GL_TEXTURE0)) + var luminanceGLTexture: CVOpenGLESTexture? + let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, yuvPixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) + if luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil { + print("Could not create LuminanceGLTexture") + return nil + } + + let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!) + + glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)) + + let luminanceFramebuffer: Framebuffer + do { + luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, + orientation: originalOrientation, + size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), + textureOnly: true, + overriddenTexture: luminanceTexture) + } catch { + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return nil + } + + glActiveTexture(GLenum(GL_TEXTURE1)) + var chrominanceGLTexture: CVOpenGLESTexture? + let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, yuvPixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) + + if chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil { + print("Could not create ChrominanceGLTexture") + return nil + } + + let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!) + + glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)) + + let chrominanceFramebuffer: Framebuffer + do { + chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, + orientation: originalOrientation, + size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), + textureOnly: true, + overriddenTexture: chrominanceTexture) + } catch { + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return nil + } + + let portraitSize: GLSize + switch videoOrientation.rotationNeededForOrientation(.portrait) { + case .noRotation, .rotate180, .flipHorizontally, .flipVertically: + portraitSize = GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)) + case .rotateCounterclockwise, .rotateClockwise, .rotateClockwiseAndFlipVertically, .rotateClockwiseAndFlipHorizontally: + portraitSize = GLSize(width: GLint(bufferHeight), height: GLint(bufferWidth)) + } + + let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: portraitSize, textureOnly: false) + + convertYUVToRGB(shader: yuvConversionShader, + luminanceFramebuffer: luminanceFramebuffer, + chrominanceFramebuffer: chrominanceFramebuffer, + resultFramebuffer: framebuffer, + colorConversionMatrix: conversionMatrix) + framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(frameTime)) + +// debugPrint("Generated framebuffer from CVPixelBuffer. time: \(CACurrentMediaTime() - startTime)") + + return framebuffer + } + + func _convertToPixelBuffer(_ framebuffer: Framebuffer) -> CVPixelBuffer? { + if pixelBufferPool == nil || outputSize?.width != framebuffer.size.width || outputSize?.height != framebuffer.size.height { + outputSize = framebuffer.size + pixelBufferPool = _createPixelBufferPool(framebuffer.size.width, framebuffer.size.height, FourCharCode(kCVPixelFormatType_32BGRA), 3) + } + guard let pixelBufferPool = pixelBufferPool else { return nil } + var outPixelBuffer: CVPixelBuffer? + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &outPixelBuffer) + guard let pixelBuffer = outPixelBuffer, pixelBufferStatus == kCVReturnSuccess else { + print("WARNING: Unable to create pixel buffer, dropping frame") + return nil + } + + do { + if renderFramebuffer == nil { + CVBufferSetAttachment(pixelBuffer, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) + } + + let bufferSize = framebuffer.size + var cachedTextureRef: CVOpenGLESTexture? + _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) + + renderFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: bufferSize, textureOnly: false, overriddenTexture: cachedTexture) + + renderFramebuffer?.activateFramebufferForRendering() + clearFramebufferWithColor(Color.black) + CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: sharedImageProcessingContext) + + glFinish() + } catch { + print("WARNING: Trouble appending pixel buffer at time: \(framebuffer.timingStyle.timestamp?.seconds() ?? 0) \(error)") + } + + CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + return pixelBuffer + } + + func _processAndGenerateFromBuffer(_ yuvPixelBuffer: CVPixelBuffer, frameTime: CMTime, processSteps: [PictureInputProcessStep], videoOrientation: ImageOrientation) -> Framebuffer? { +// let startTime = CACurrentMediaTime() + CVPixelBufferLockBaseAddress(yuvPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + defer { + CVPixelBufferUnlockBaseAddress(yuvPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + CVOpenGLESTextureCacheFlush(sharedImageProcessingContext.coreVideoTextureCache, 0) + } + + let ciImage = CIImage(cvPixelBuffer: yuvPixelBuffer, + options: [.applyOrientationProperty: true, + .properties: [ kCGImagePropertyOrientation: videoOrientation.cgImageOrientation.rawValue ]]) + var processStepsWithCoordinateCorrection = processSteps + // NOTE: CIImage coordinate is mirrored compared with OpenGLES when calling draw(_:in:size:from:), so it needs to be mirrored before render to OpenGL + processStepsWithCoordinateCorrection.append(.scale(x: 1, y: -1, anchorPoint: .extentCenter)) + let processedImage = ciImage.processed(with: processStepsWithCoordinateCorrection) + +// debugPrint("Process CIImage. time: \(CACurrentMediaTime() - startTime)") + + let bufferHeight = Int32(processedImage.extent.height) + let bufferWidth = Int32(processedImage.extent.width) + + let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) + framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(frameTime)) + + // Bind texture + framebuffer.activateFramebufferForRendering() + clearFramebufferWithColor(Color.black) + glBindTexture(GLenum(GL_TEXTURE_2D), framebuffer.texture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)) + + // TODO: this API performance is slower than Crop filter, improve this later + CIImage.ciGPUContext.draw(processedImage, in: CGRect(origin: .zero, size: processedImage.accurateExtent.rounded(.towardZero).size), from: processedImage.accurateExtent.rounded(.towardZero)) + +// debugPrint("Reneder CIImage to OpenGL texture. time: \(CACurrentMediaTime() - startTime)") + + return framebuffer + } + + func _createPixelBufferPool(_ width: Int32, _ height: Int32, _ pixelFormat: FourCharCode, _ maxBufferCount: Int32) -> CVPixelBufferPool? { + var outputPool: CVPixelBufferPool? + + let sourcePixelBufferOptions: NSDictionary = [kCVPixelBufferPixelFormatTypeKey: pixelFormat, + kCVPixelBufferWidthKey: width, + kCVPixelBufferHeightKey: height, + kCVPixelFormatOpenGLESCompatibility: true, + kCVPixelBufferIOSurfaceCoreAnimationCompatibilityKey: true, + kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: true, + kCVPixelBufferIOSurfacePropertiesKey: NSDictionary()] + + let pixelBufferPoolOptions: NSDictionary = [kCVPixelBufferPoolMinimumBufferCountKey: maxBufferCount] + + CVPixelBufferPoolCreate(kCFAllocatorDefault, pixelBufferPoolOptions, sourcePixelBufferOptions, &outputPool) + + return outputPool + } +} + +public extension ImageOrientation { + var originalOrientation: ImageOrientation { + switch self { + case .portrait, .portraitUpsideDown: + return self + case .landscapeLeft: + return .landscapeRight + case .landscapeRight: + return .landscapeLeft + } + } +} diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift new file mode 100644 index 00000000..5a49de8d --- /dev/null +++ b/framework/Source/iOS/MovieCache.swift @@ -0,0 +1,418 @@ +// +// MovieCache.swift +// GPUImage2 +// +// Created by 陈品霖 on 2020/3/27. +// + +import Foundation +import AVFoundation + +public enum MovieCacheError: Error, Equatable, CustomStringConvertible { + case invalidState + case sameState + case emptyMovieOutput + case unmatchedVideoID + case movieOutputError(Error) + + public var description: String { + switch self { + case .invalidState: return "invalidState" + case .sameState: return "sameState" + case .emptyMovieOutput: return "emptyMovieOutput" + case .movieOutputError: return "movieOutputError" + case .unmatchedVideoID: return "unmatchedVideoID" + } + } + + public static func == (lhs: MovieCacheError, rhs: MovieCacheError) -> Bool { + return lhs.description == rhs.description + } +} + +public class MovieCache: ImageConsumer, AudioEncodingTarget { + public typealias Completion = (Result) -> Void + public let sources = SourceContainer() + public let maximumInputs: UInt = 1 + public private(set) var movieOutput: MovieOutput? + public private(set) lazy var framebufferCache = [Framebuffer]() + public private(set) lazy var videoSampleBufferCache = NSMutableArray() + public private(set) lazy var audioSampleBufferCache = [CMSampleBuffer]() + public private(set) var cacheBuffersDuration: TimeInterval = 0 + public enum State: String { + case unknown + case idle + case caching + case writing + case stopped + case canceled + } + public private(set) var state = State.unknown + private var writingCallback: Completion? + public var isReadyToWrite: Bool { + guard let movieOutput = movieOutput else { return false } + return movieOutput.writerStatus == .unknown + } + private var startingVideoID: String? + #if DEBUG + public var debugRenderInfo: String = "{ MovieCache: passthrough }," + #endif + + public init() { + print("MovieCache init") + } + + deinit { + if movieOutput?.writerStatus == .writing { + print("[WARNING] movieOutput is still writing, cancel it now") + movieOutput?.cancelRecording() + } + } + + public func startCaching(duration: TimeInterval) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._startCaching(duration: duration) + } + } + + public func setMovieOutputIfNotReady(url: URL, + fps: Double, + size: Size, + needAlignAV: Bool, + fileType: AVFileType = .mov, + liveVideo: Bool = false, + videoSettings: [String: Any]? = nil, + videoNaturalTimeScale: CMTimeScale? = nil, + optimizeForNetworkUse: Bool = false, + disablePixelBufferAttachments: Bool = true, + audioSettings: [String: Any]? = nil, + audioSourceFormatHint: CMFormatDescription? = nil, + _ configure: ((MovieOutput) -> Void)? = nil) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._setMovieOutputIfNotReady(url: url, + fps: fps, + size: size, + needAlignAV: needAlignAV, + fileType: fileType, + liveVideo: liveVideo, + videoSettings: videoSettings, + videoNaturalTimeScale: videoNaturalTimeScale, + optimizeForNetworkUse: optimizeForNetworkUse, + disablePixelBufferAttachments: disablePixelBufferAttachments, + audioSettings: audioSettings, + audioSourceFormatHint: audioSourceFormatHint, + configure) + } + } + + public func startWriting(videoID: String?, _ completionCallback: Completion? = nil) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._startWriting(videoID: videoID, completionCallback) + } + } + + public func stopWriting(videoID: String?, _ completionCallback: ((MovieOutput?, MovieCacheError?) -> Void)? = nil) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._stopWriting(videoID: videoID, completionCallback) + } + } + + public func cancelWriting(videoID: String?, _ completionCallback: Completion? = nil) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._cancelWriting(videoID: videoID, completionCallback) + } + } + + public func stopCaching(needsCancel: Bool = false, _ completionCallback: Completion? = nil) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._stopCaching(needsCancel: needsCancel, completionCallback) + } + } +} + +extension MovieCache { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { +// debugPrint("get new framebuffer time:\(framebuffer.timingStyle.timestamp?.asCMTime.seconds ?? .zero)") + guard shouldProcessBuffer else { return } + glFinish() + _cacheFramebuffer(framebuffer) + _writeFramebuffers() + } + + public func activateAudioTrack() throws { + try movieOutput?.activateAudioTrack() + } + + public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { + guard shouldProcessBuffer else { return } + _cacheAudioSampleBuffer(sampleBuffer) + _writeAudioSampleBuffers(shouldInvalidateSampleWhenDone) + } + + public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { + guard shouldProcessBuffer else { return } + _cacheVideoSampleBuffer(sampleBuffer) + _writeVideoSampleBuffers(shouldInvalidateSampleWhenDone) + } + + public func readyForNextAudioBuffer() -> Bool { + guard shouldProcessBuffer else { return false } + return true + } +} + +private extension MovieCache { + var shouldProcessBuffer: Bool { + return state != .unknown && state != .idle + } + + func _tryTransitingState(to newState: State, _ errorCallback: Completion? = nil) -> MovieCacheError? { + if state == newState { + // NOTE: for same state, just do nothing and callback + print("WARNING: Same state transition for:\(state)") + errorCallback?(.success(movieOutput)) + return .sameState + } + switch (state, newState) { + case (.unknown, .idle), (.unknown, .caching), (.unknown, .writing), (.unknown, .stopped), + (.idle, .caching), (.idle, .writing), + (.caching, .writing), (.caching, .stopped), (.caching, .idle), + (.writing, .stopped), + (.stopped, .idle), (.stopped, .caching), (.stopped, .writing), + (.canceled, .idle), (.canceled, .caching), (.canceled, .writing), (.canceled, .stopped), + (_, .canceled): // any state can transite to canceled + debugPrint("state transite from:\(state) to:\(newState)") + state = newState + return nil + default: + assertionFailure() + print("ERROR: invalid state transition from:\(state) to:\(newState)") + errorCallback?(.failure(.invalidState)) + return .invalidState + } + } + + func _startCaching(duration: TimeInterval) { + let error = _tryTransitingState(to: .caching) + guard error == nil else { return } + print("start caching") + cacheBuffersDuration = duration + } + + func _setMovieOutputIfNotReady(url: URL, + fps: Double, + size: Size, + needAlignAV: Bool, + fileType: AVFileType = .mov, + liveVideo: Bool = false, + videoSettings: [String: Any]? = nil, + videoNaturalTimeScale: CMTimeScale? = nil, + optimizeForNetworkUse: Bool = false, + disablePixelBufferAttachments: Bool = true, + audioSettings: [String: Any]? = nil, + audioSourceFormatHint: CMFormatDescription? = nil, + _ configure: ((MovieOutput) -> Void)? = nil) { + guard !isReadyToWrite else { + print("No need to create MovieOutput") + return + } + if let currentMovieOutput = movieOutput, movieOutput?.writerStatus == .writing { + print("MovieOutput is still writing, skip set MovieOutput. state:\(state) currentURL:\(currentMovieOutput.url) newURL:\(url)") + return + } + do { + let newMovieOutput = try MovieOutput(URL: url, + fps: fps, + size: size, + needAlignAV: needAlignAV, + fileType: fileType, + liveVideo: liveVideo, + videoSettings: videoSettings, + videoNaturalTimeScale: videoNaturalTimeScale, + optimizeForNetworkUse: optimizeForNetworkUse, + disablePixelBufferAttachments: disablePixelBufferAttachments, + audioSettings: audioSettings, + audioSourceFormatHint: audioSourceFormatHint) + self.movieOutput = newMovieOutput + print("set movie output") + configure?(newMovieOutput) + if state == .writing { + print("it is already writing, start MovieOutput recording immediately, videoID:\(String(describing: startingVideoID))") + _startMovieOutput(videoID: startingVideoID, writingCallback) + startingVideoID = nil + } + } catch { + print("[ERROR] can't create movie output") + } + } + + func _startWriting(videoID: String?, _ completionCallback: Completion? = nil) { + guard _tryTransitingState(to: .writing) == nil else { return } + guard movieOutput != nil else { + print("movie output is not ready yet, waiting...") + writingCallback = completionCallback + startingVideoID = videoID + return + } + print("start writing, videoID:\(String(describing: videoID))") + _startMovieOutput(videoID: videoID, completionCallback) + } + + func _startMovieOutput(videoID: String?, _ completionCallback: Completion? = nil) { + guard let movieOutput = movieOutput else { + completionCallback?(.failure(.emptyMovieOutput)) + return + } + movieOutput.videoID = videoID + movieOutput.startRecording(sync: true) { _, error in + if let error = error { + completionCallback?(.failure(.movieOutputError(error))) + } else { + completionCallback?(.success(movieOutput)) + } + } + } + + func _stopWriting(videoID: String?, _ completionCallback: ((MovieOutput?, MovieCacheError?) -> Void)? = nil) { + guard videoID == movieOutput?.videoID else { + print("stopWriting failed. Unmatched videoID:\(String(describing: videoID)) movieOutput?.videoID:\(String(describing: movieOutput?.videoID))") + completionCallback?(movieOutput, .unmatchedVideoID) + return + } + guard _tryTransitingState(to: .stopped) == nil else { + completionCallback?(movieOutput, .invalidState) + return + } + guard let movieOutput = movieOutput else { + completionCallback?(nil, .emptyMovieOutput) + return + } + print("stop writing. videoID:\(String(describing: videoID)) videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") + movieOutput.finishRecording(sync: true) { + if let error = movieOutput.writerError { + completionCallback?(movieOutput, .movieOutputError(error)) + } else { + completionCallback?(movieOutput, nil) + } + } + self.movieOutput = nil + writingCallback = nil + } + + func _cancelWriting(videoID: String?, _ completionCallback: Completion? = nil) { + guard videoID == movieOutput?.videoID else { + print("cancelWriting failed. Unmatched videoID:\(String(describing: videoID)) movieOutput?.videoID:\(String(describing: movieOutput?.videoID))") + completionCallback?(.failure(.unmatchedVideoID)) + return + } + defer { + movieOutput = nil + writingCallback = nil + } + guard _tryTransitingState(to: .canceled) == nil else { return } + guard let movieOutput = movieOutput else { + completionCallback?(.success(self.movieOutput)) + return + } + print("cancel writing, videoID:\(String(describing: videoID))") + movieOutput.cancelRecording(sync: true) { + completionCallback?(.success(movieOutput)) + } + } + + func _stopCaching(needsCancel: Bool, _ completionCallback: Completion?) { + let movieOutput = self.movieOutput + if needsCancel && state == .writing { + _cancelWriting(videoID: nil) + startingVideoID = nil + } + defer { + completionCallback?(.success(movieOutput)) + } + guard _tryTransitingState(to: .idle) == nil else { return } + print("stop caching") + _cleanBufferCaches() + } + + func _cleanBufferCaches() { + print("Clean all buffers framebufferCache:\(framebufferCache.count) audioSampleBuffer:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") + sharedImageProcessingContext.runOperationSynchronously { + self.framebufferCache.forEach { $0.unlock() } + self.framebufferCache.removeAll() + self.videoSampleBufferCache.removeAllObjects() + self.audioSampleBufferCache.removeAll() + } + } + + func _cacheFramebuffer(_ framebuffer: Framebuffer) { + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { + print("Cannot get timestamp from framebuffer, dropping frame") + return + } + framebufferCache.append(framebuffer) + while let firstBufferTime = framebufferCache.first?.timingStyle.timestamp?.asCMTime, CMTimeSubtract(frameTime, firstBufferTime).seconds > cacheBuffersDuration { +// debugPrint("dropping oldest video framebuffer time:\(firstBufferTime.seconds)") + _ = framebufferCache.removeFirst() + } + } + + func _writeFramebuffers() { + guard state == .writing else { return } + var appendedBufferCount = 0 + for framebuffer in framebufferCache { + guard movieOutput?._processFramebuffer(framebuffer) == true else { break } + appendedBufferCount += 1 + framebuffer.unlock() + // NOTE: don't occupy too much GPU time, if it is already accumulate lots of framebuffer. + // So that it can reduce frame drop and video frames brightness flashing. + guard sharedImageProcessingContext.alreadyExecuteTime < 1.0 / 40.0 else { break } + } + framebufferCache.removeFirst(appendedBufferCount) + } + + func _cacheAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + audioSampleBufferCache.append(sampleBuffer) + while let firstBuffer = audioSampleBufferCache.first, CMTimeSubtract(frameTime, CMSampleBufferGetPresentationTimeStamp(firstBuffer)).seconds > cacheBuffersDuration { +// debugPrint("dropping oldest audio buffer time:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer)).seconds))") + _ = audioSampleBufferCache.removeFirst() + } + } + + func _writeAudioSampleBuffers(_ shouldInvalidateSampleWhenDone: Bool) { + guard state == .writing else { return } + var appendedBufferCount = 0 + for audioBuffer in audioSampleBufferCache { + // debugPrint("[Caching] appending audio buffer \(i+1)/\(self.audioSampleBufferCache.count) at:\(CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer).seconds)") + guard movieOutput?._processAudioSampleBuffer(audioBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) == true else { break } + appendedBufferCount += 1 + } + audioSampleBufferCache.removeFirst(appendedBufferCount) + } + + func _cacheVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + videoSampleBufferCache.add(sampleBuffer) + // debugPrint("[Caching] cache new video sample buffer at:\(frameTime.seconds)") + if videoSampleBufferCache.count >= 13 { + // Be careful of caching too much sample buffers from camera captureOutput. iOS has a hard limit of camera buffer count: 15. + // debugPrint("WARNING: almost reach system buffer limit: \(self.videoSampleBufferCache.count)/15") + } + while let firstBuffer = videoSampleBufferCache.firstObject, CMTimeSubtract(frameTime, CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer)).seconds > cacheBuffersDuration { +// debugPrint("dropping oldest video buffer time:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer).seconds)") + videoSampleBufferCache.removeObject(at: 0) + } + } + + private func _writeVideoSampleBuffers(_ shouldInvalidateSampleWhenDone: Bool) { + guard state == .writing else { return } + var appendedBufferCount = 0 + // Drain all cached buffers at first + for sampleBufferObject in videoSampleBufferCache { + let sampleBuffer = sampleBufferObject as! CMSampleBuffer + guard movieOutput?._processVideoSampleBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) == true else { break } + appendedBufferCount += 1 + } + videoSampleBufferCache.removeObjects(in: NSRange(0.. Void)? + public var startProcessingCallback: (() -> Void)? + // Progress block of the video with a paramater value of 0-1. + // Can be used to check video encoding progress. Not called from main thread. + public var progress: ((Double) -> Void)? + + public var synchronizedMovieOutput: MovieOutput? { + didSet { + self.enableSynchronizedEncoding() + } + } + public var synchronizedEncodingDebug = false { + didSet { + self.synchronizedMovieOutput?.synchronizedEncodingDebug = self.synchronizedEncodingDebug + } + } + let conditionLock = NSCondition() + var readingShouldWait = false + var videoInputStatusObserver: NSKeyValueObservation? + var audioInputStatusObserver: NSKeyValueObservation? + let maxFPS: Float? + lazy var framebufferGenerator = FramebufferGenerator() + + public var useRealtimeThreads = false + public var transcodingOnly = false { + didSet { + if transcodingOnly, let movieOutput = synchronizedMovieOutput, let transform = asset.tracks(withMediaType: .video).first?.preferredTransform { + movieOutput.preferredTransform = transform + } + } + } + var timebaseInfo = mach_timebase_info_data_t() + var currentThread: Thread? + + var totalFramesSent = 0 + var totalFrameTimeDuringCapture: Double = 0.0 + + var audioSettings: [String: Any]? + + var movieFramebuffer: Framebuffer? + public var framebufferUserInfo: [AnyHashable: Any]? + public var processSteps: [PictureInputProcessStep]? + + #if DEBUG + public var debugRenderInfo: String = "" + #endif + // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works - public init(asset:AVAsset, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + public init(asset: AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = false, loop: Bool = false, playrate: Double = 1.0, audioSettings: [String: Any]? = nil, maxFPS: Float? = nil) throws { + debugPrint("movie input init \(asset)") + self.asset = asset + self.videoComposition = videoComposition self.playAtActualSpeed = playAtActualSpeed self.loop = loop - self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} + self.playrate = playrate + self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } + self.audioSettings = audioSettings + self.maxFPS = maxFPS + } + + public convenience init(url: URL, playAtActualSpeed: Bool = false, loop: Bool = false, playrate: Double = 1.0, audioSettings: [String: Any]? = nil) throws { + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] + let inputAsset = AVURLAsset(url: url, options: inputOptions) + try self.init(asset: inputAsset, videoComposition: nil, playAtActualSpeed: playAtActualSpeed, loop: loop, playrate: playrate, audioSettings: audioSettings) + } + + deinit { + debugPrint("movie input deinit \(asset)") - assetReader = try AVAssetReader(asset:self.asset) + self.movieFramebuffer?.unlock() + self.cancel() - let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) - readerVideoTrackOutput.alwaysCopiesSampleData = false - assetReader.add(readerVideoTrackOutput) - // TODO: Audio here + self.videoInputStatusObserver?.invalidate() + self.audioInputStatusObserver?.invalidate() } - - public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] - let inputAsset = AVURLAsset(url:url, options:inputOptions) - try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop) + + public var videoOrientation: ImageOrientation { + return asset.imageOrientation ?? .portrait } // MARK: - // MARK: Playback control - - public func start() { - asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{ - DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: { - guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return } - - guard self.assetReader.startReading() else { - print("Couldn't start reading") - return - } - - var readerVideoTrackOutput:AVAssetReaderOutput? = nil; - - for output in self.assetReader.outputs { - if(output.mediaType == AVMediaTypeVideo) { - readerVideoTrackOutput = output; - } + + public func start(atTime: CMTime, duration: CMTime? = nil, isTrimming: Bool = false) { + if !isTrimming { + requestedStartTime = atTime + } else { + trimmedStartTime = atTime + trimmedDuration = duration + } + + self.start() + } + + @objc public func start() { + if let currentThread = self.currentThread, + currentThread.isExecuting, + !currentThread.isCancelled { + // If the current thread is running and has not been cancelled, bail. + return + } + // Cancel the thread just to be safe in the event we somehow get here with the thread still running. + self.currentThread?.cancel() + + self.currentThread = Thread(target: self, selector: #selector(beginReading), object: nil) + self.currentThread?.start() + } + + public func cancel() { + self.currentThread?.cancel() + self.currentThread = nil + (self.audioEncodingTarget as? MovieOutput)?.cancelRecodingImmediately() + synchronizedEncodingDebugPrint("MovieInput cancel") + } + + public func pause() { + self.cancel() + self.requestedStartTime = self.currentTime + synchronizedEncodingDebugPrint("MovieInput pause") + } + + public func pauseWithoutCancel() { + requestedStartTime = currentTime + conditionLock.lock() + readingShouldWait = true + conditionLock.unlock() + synchronizedEncodingDebugPrint("MovieInput pauseWithoutCancel") + } + + public func resume() { + conditionLock.lock() + readingShouldWait = false + conditionLock.signal() + conditionLock.unlock() + synchronizedEncodingDebugPrint("MovieInput resume") + } + + // MARK: - + // MARK: Internal processing functions + + func createReader() -> AVAssetReader? { + do { + let outputSettings: [String: AnyObject] = + [(kCVPixelBufferPixelFormatTypeKey as String): NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + + let assetReader = try AVAssetReader(asset: self.asset) + + try NSObject.catchException { + if self.videoComposition == nil { + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: .video).first!, outputSettings: outputSettings) + readerVideoTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerVideoTrackOutput) + } else { + let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: .video), videoSettings: outputSettings) + readerVideoTrackOutput.videoComposition = self.videoComposition + readerVideoTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerVideoTrackOutput) } - while (self.assetReader.status == .reading) { - self.readNextVideoFrame(from:readerVideoTrackOutput!) + if let audioTrack = self.asset.tracks(withMediaType: .audio).first, + let _ = self.audioEncodingTarget { + let readerAudioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: self.audioSettings) + readerAudioTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerAudioTrackOutput) } - if (self.assetReader.status == .completed) { - self.assetReader.cancelReading() - - if (self.loop) { - // TODO: Restart movie processing + self.startTime = self.requestedStartTime + if let startTime = self.requestedStartTime ?? self.trimmedStartTime { + if let trimmedDuration = self.trimmedDuration, trimmedDuration.seconds > 0, CMTimeAdd(startTime, trimmedDuration) <= self.asset.duration { + assetReader.timeRange = CMTimeRange(start: startTime, duration: trimmedDuration) } else { - self.endProcessing() + assetReader.timeRange = CMTimeRange(start: startTime, duration: .positiveInfinity) } } - }) - }) + } + + self.requestedStartTime = nil + self.currentTime = nil + self.actualStartTime = nil + + return assetReader + } catch { + print("ERROR: Unable to create asset reader: \(error)") + } + return nil } - public func cancel() { + @objc func beginReading() { + if let startProcessingCallback = startProcessingCallback { + DispatchQueue.main.sync { + startProcessingCallback() + } + } + + let thread = Thread.current + + mach_timebase_info(&timebaseInfo) + + if useRealtimeThreads { + self.configureThread() + } else if playAtActualSpeed { + thread.qualityOfService = .userInitiated + } else { + // This includes synchronized encoding since the above vars will be disabled for it. + thread.qualityOfService = .default + } + + guard let assetReader = self.createReader() else { + completion?(MovieInputError.cannotCreateAssetReader) + return // A return statement in this frame will end thread execution. + } + + do { + try NSObject.catchException { + guard assetReader.startReading() else { + print("ERROR: Unable to start reading: \(String(describing: assetReader.error))") + self.completion?(assetReader.error) + return + } + } + } catch { + print("ERROR: Unable to start reading: \(error)") + completion?(error) + return + } + + var readerVideoTrackOutput: AVAssetReaderOutput? + var readerAudioTrackOutput: AVAssetReaderOutput? + + for output in assetReader.outputs { + if output.mediaType == .video { + readerVideoTrackOutput = output + } + if output.mediaType == .audio { + readerAudioTrackOutput = output + } + } + + while assetReader.status == .reading { + if thread.isCancelled { break } + autoreleasepool { + if let movieOutput = self.synchronizedMovieOutput { + self.conditionLock.lock() + if self.readingShouldWait { + self.synchronizedEncodingDebugPrint("Disable reading") + self.conditionLock.wait() + self.synchronizedEncodingDebugPrint("Enable reading") + } + self.conditionLock.unlock() + + if movieOutput.assetWriterVideoInput.isReadyForMoreMediaData { + self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) + } + if movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false { + if let readerAudioTrackOutput = readerAudioTrackOutput { + self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) + } + } + } else { + self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) + if let readerAudioTrackOutput = readerAudioTrackOutput, + self.audioEncodingTarget?.readyForNextAudioBuffer() ?? true { + self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) + } + } + } + } + assetReader.cancelReading() - self.endProcessing() + + let readerPostAction = { + // Since only the main thread will cancel and create threads jump onto it to prevent + // the current thread from being cancelled in between the below if statement and creating the new thread. + DispatchQueue.main.async { + assetReader.cancelReading() + + // Start the video over so long as it wasn't cancelled. + if self.loop && !thread.isCancelled { + self.currentThread = Thread(target: self, selector: #selector(self.beginReading), object: nil) + self.currentThread?.start() + } else { + self.synchronizedEncodingDebugPrint("MovieInput finished reading") + self.synchronizedEncodingDebugPrint("MovieInput total frames sent: \(self.totalFramesSent)") + self.delegate?.didFinishMovie() + self.completion?(nil) + if thread.isCancelled && self.synchronizedMovieOutput != nil { + self.synchronizedMovieOutput?.cancelRecording() + } + } + } + } + + if synchronizedMovieOutput != nil { + // Make sure all image processing task is finished when encoding + sharedImageProcessingContext.runOperationAsynchronously(readerPostAction) + } else { + readerPostAction() + } } - func endProcessing() { + func readNextVideoFrame(with assetReader: AVAssetReader, from videoTrackOutput: AVAssetReaderOutput) { + guard let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() else { + if let movieOutput = self.synchronizedMovieOutput { + MovieOutput.movieProcessingContext.runOperationAsynchronously { + // Documentation: "Clients that are monitoring each input's readyForMoreMediaData value must call markAsFinished on an input when they are done + // appending buffers to it. This is necessary to prevent other inputs from stalling, as they may otherwise wait forever + // for that input's media data, attempting to complete the ideal interleaving pattern." + movieOutput.markIsFinishedAfterProcessing = true + } + } + return + } + + if delegate != nil { + sharedImageProcessingContext.runOperationSynchronously { [weak self] in + self?.delegate?.didReadVideoFrame(sampleBuffer) + } + } + + var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + currentTime = currentSampleTime + + var duration = asset.duration // Only used for the progress block so its acuracy is not critical + if let startTime = startTime { + // Make sure our samples start at kCMTimeZero if the video was started midway. + currentSampleTime = CMTimeSubtract(currentSampleTime, startTime) + if let trimmedDuration = trimmedDuration, startTime.seconds > 0, CMTimeAdd(startTime, trimmedDuration) <= duration { + duration = trimmedDuration + } else { + duration = CMTimeSubtract(duration, startTime) + } + } + progress?(currentSampleTime.seconds / duration.seconds) + + if transcodingOnly, let movieOutput = synchronizedMovieOutput { + movieOutput.processVideoBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) + return + } + + // NOTE: When calculating frame pre second, floating point maybe rounded, so we have to add tolerance manually + if let fps = maxFPS, let currentTime = currentTime, (currentSampleTime.seconds - currentTime.seconds) < 1 / Double(fps) - 0.0000001 { + return + } + + if synchronizedMovieOutput != nil { + // For synchrozied transcoding, separate AVAssetReader thread and OpenGL thread to improve performance + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + self?.processNextVideoSampleOnGLThread(sampleBuffer, currentSampleTime: currentSampleTime) + CMSampleBufferInvalidate(sampleBuffer) + } + } else { + processNextVideoSampleOnGLThread(sampleBuffer, currentSampleTime: currentSampleTime) + CMSampleBufferInvalidate(sampleBuffer) + } } - // MARK: - - // MARK: Internal processing functions - - func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { - if ((assetReader.status == .reading) && !videoEncodingIsFinished) { - if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { - if (playAtActualSpeed) { - // Do this outside of the video processing queue to not slow that down while waiting - let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime) - let currentActualTime = CFAbsoluteTimeGetCurrent() - - let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame) - let actualTimeDifference = currentActualTime - previousActualFrameTime - - if (frameTimeDifference > actualTimeDifference) { - usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference)))) - } - - previousFrameTime = currentSampleTime - previousActualFrameTime = CFAbsoluteTimeGetCurrent() - } - - sharedImageProcessingContext.runOperationSynchronously{ - self.process(movieFrame:sampleBuffer) - CMSampleBufferInvalidate(sampleBuffer) - } + func processNextVideoSampleOnGLThread(_ sampleBuffer: CMSampleBuffer, currentSampleTime: CMTime) { + synchronizedEncodingDebugPrint("Process video frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") + + if playAtActualSpeed { + let currentSampleTimeNanoseconds = Int64(currentSampleTime.seconds * 1_000_000_000 / playrate) + let currentActualTime = DispatchTime.now() + + if actualStartTime == nil { + actualStartTime = currentActualTime + } + + // Determine how much time we need to wait in order to display the frame at the right currentActualTime such that it will match the currentSampleTime. + // The reason we subtract the actualStartTime from the currentActualTime is so the actual time starts at zero relative to the video start. + let delay = currentSampleTimeNanoseconds - Int64(currentActualTime.uptimeNanoseconds - actualStartTime!.uptimeNanoseconds) + + // print("currentSampleTime: \(currentSampleTimeNanoseconds) currentTime: \((currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds)) delay: \(delay)") + + if delay > 0 { + mach_wait_until(mach_absolute_time() + nanosToAbs(UInt64(delay))) } else { - if (!loop) { - videoEncodingIsFinished = true - if (videoEncodingIsFinished) { - self.endProcessing() - } + // This only happens if we aren't given enough processing time for playback + // but is necessary otherwise the playback will never catch up to its timeline. + // If we weren't adhearing to the sample timline and used the old timing method + // the video would still lag during an event like this. + // print("Dropping frame in order to catch up") + return + } + } + + sharedImageProcessingContext.runOperationSynchronously { + self.process(movieFrame: sampleBuffer) + } + } + + func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput: AVAssetReaderOutput) { + let shouldInvalidate = !transcodingOnly + guard let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() else { + if let movieOutput = self.synchronizedMovieOutput { + MovieOutput.movieProcessingContext.runOperationAsynchronously { + movieOutput.flushPendingAudioBuffers(shouldInvalidateSampleWhenDone: shouldInvalidate) + movieOutput.audioEncodingIsFinished = true + movieOutput.assetWriterAudioInput?.markAsFinished() } } + return + } + + self.synchronizedEncodingDebugPrint("Process audio sample input. Time:\(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)))") + + if synchronizedMovieOutput != nil { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + guard let self = self else { return } + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidate) + } + } else { + audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidate) } -// else if (synchronizedMovieWriter != nil) { -// if (assetReader.status == .Completed) { -// self.endProcessing() -// } -// } - } - func process(movieFrame frame:CMSampleBuffer) { + func process(movieFrame frame: CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! - -// processingFrameTime = currentSampleTime - self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) + + self.process(movieFrame: movieFrame, withSampleTime: currentSampleTime) } - func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { - let bufferHeight = CVPixelBufferGetHeight(movieFrame) - let bufferWidth = CVPixelBufferGetWidth(movieFrame) - CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - - let conversionMatrix = colorConversionMatrix601FullRangeDefault - // TODO: Get this color query working -// if let colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, nil) { -// if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == .EqualTo) { -// _preferredConversion = kColorConversion601FullRange -// } else { -// _preferredConversion = kColorConversion709 -// } -// } else { -// _preferredConversion = kColorConversion601FullRange -// } - - let startTime = CFAbsoluteTimeGetCurrent() - - let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) - luminanceFramebuffer.lock() - glActiveTexture(GLenum(GL_TEXTURE0)) - glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) - glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 0)) - - let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) - chrominanceFramebuffer.lock() - glActiveTexture(GLenum(GL_TEXTURE1)) - glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) - glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 1)) - - let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) - - convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) - CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - - movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) - self.updateTargetsWithFramebuffer(movieFramebuffer) + func process(movieFrame: CVPixelBuffer, withSampleTime: CMTime) { + let startTime = CACurrentMediaTime() + var outputFramebuffer: Framebuffer? + if let processSteps = processSteps, !processSteps.isEmpty { + outputFramebuffer = framebufferGenerator.processAndGenerateFromBuffer(movieFrame, frameTime: withSampleTime, processSteps: processSteps, videoOrientation: asset.originalOrientation ?? .portrait) + } else { + outputFramebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: withSampleTime, videoOrientation: videoOrientation) + } + guard let framebuffer = outputFramebuffer else { + print("Cannot generate framebuffer from YUVBuffer") + return + } + #if DEBUG + debugRenderInfo = """ +{ + MovieInput: { + input: \(CVPixelBufferGetWidth(movieFrame))x\(CVPixelBufferGetHeight(movieFrame)), input_type: CVPixelBuffer, + output: { size: \(framebuffer.debugRenderInfo), time: \((CACurrentMediaTime() - startTime) * 1000.0)ms } + } +}, +""" + #endif + framebuffer.userInfo = framebufferUserInfo + self.movieFramebuffer = framebuffer + self.updateTargetsWithFramebuffer(framebuffer) + + if self.runBenchmark || self.synchronizedEncodingDebug { + self.totalFramesSent += 1 + } if self.runBenchmark { - let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) - self.numberOfFramesCaptured += 1 + let currentFrameTime = (CACurrentMediaTime() - startTime) self.totalFrameTimeDuringCapture += currentFrameTime - print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured)) ms") + print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.totalFramesSent)) ms") print("Current frame time : \(1000.0 * currentFrameTime) ms") } } - - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Not needed for movie inputs } + + public func transmitPreviousFrame() { + sharedImageProcessingContext.runOperationAsynchronously { + if let movieFramebuffer = self.movieFramebuffer { + self.updateTargetsWithFramebuffer(movieFramebuffer) + } + } + } + + public func setAudioEncodingTarget(_ target: AudioEncodingTarget?) throws { + audioEncodingTarget = target + + guard let audioEncodingTarget = audioEncodingTarget else { + return + } + + try audioEncodingTarget.activateAudioTrack() + + // Call enableSynchronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput. + if synchronizedMovieOutput != nil { self.enableSynchronizedEncoding() } + } + + // MARK: - + // MARK: Synchronized encoding + + func enableSynchronizedEncoding() { + self.synchronizedMovieOutput?.encodingLiveVideo = false + self.synchronizedMovieOutput?.synchronizedEncodingDebug = self.synchronizedEncodingDebug + self.playAtActualSpeed = false + self.loop = false + + // Subscribe to isReadyForMoreMediaData changes + self.setupObservers() + // Set the intial state of the lock + self.updateLock() + } + + func setupObservers() { + self.videoInputStatusObserver?.invalidate() + self.audioInputStatusObserver?.invalidate() + + guard let movieOutput = self.synchronizedMovieOutput else { return } + + self.videoInputStatusObserver = movieOutput.assetWriterVideoInput.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] _, _ in + guard let weakSelf = self else { return } + weakSelf.updateLock() + } + self.audioInputStatusObserver = movieOutput.assetWriterAudioInput?.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] _, _ in + guard let weakSelf = self else { return } + weakSelf.updateLock() + } + } + + func updateLock() { + guard let movieOutput = self.synchronizedMovieOutput else { return } + + self.conditionLock.lock() + // Allow reading if either input is able to accept data, prevent reading if both inputs are unable to accept data. + if movieOutput.assetWriterVideoInput.isReadyForMoreMediaData || movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false { + self.readingShouldWait = false + self.conditionLock.signal() + } else { + self.readingShouldWait = true + } + self.conditionLock.unlock() + } + + // MARK: - + // MARK: Thread configuration + + func configureThread() { + let clock2abs = Double(timebaseInfo.denom) / Double(timebaseInfo.numer) * Double(NSEC_PER_MSEC) + + // http://docs.huihoo.com/darwin/kernel-programming-guide/scheduler/chapter_8_section_4.html + // + // To see the impact of adjusting these values, uncomment the print statement above mach_wait_until() in self.readNextVideoFrame() + // + // Setup for 5 ms of work. + // The anticpated frame render duration is in the 1-3 ms range on an iPhone 6 for 1080p without filters and 1-7 ms range with filters + // If the render duration is allowed to exceed 16ms (the duration of a frame in 60fps video) + // the 60fps video will no longer be playing in real time. + let computation = UInt32(5 * clock2abs) + // Tell the scheduler the next 20 ms of work needs to be done as soon as possible. + let period = UInt32(0 * clock2abs) + // According to the above scheduling chapter this constraint only appears relevant + // if preemtible is set to true and the period is not 0. If this is wrong, please let me know. + let constraint = UInt32(5 * clock2abs) + + // print("period: \(period) computation: \(computation) constraint: \(constraint)") + + let THREAD_TIME_CONSTRAINT_POLICY_COUNT = mach_msg_type_number_t(MemoryLayout.size / MemoryLayout.size) + + var policy = thread_time_constraint_policy() + var ret: Int32 + let thread: thread_port_t = pthread_mach_thread_np(pthread_self()) + + policy.period = period + policy.computation = computation + policy.constraint = constraint + policy.preemptible = 0 + + ret = withUnsafeMutablePointer(to: &policy) { + $0.withMemoryRebound(to: integer_t.self, capacity: Int(THREAD_TIME_CONSTRAINT_POLICY_COUNT)) { + thread_policy_set(thread, UInt32(THREAD_TIME_CONSTRAINT_POLICY), $0, THREAD_TIME_CONSTRAINT_POLICY_COUNT) + } + } + + if ret != KERN_SUCCESS { + mach_error("thread_policy_set:", ret) + print("Unable to configure thread") + } + } + + func nanosToAbs(_ nanos: UInt64) -> UInt64 { + return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer) + } + + func synchronizedEncodingDebugPrint(_ string: String) { + if synchronizedMovieOutput != nil && synchronizedEncodingDebug { print(string) } + } } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 03bc9593..947dc563 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -1,229 +1,816 @@ import AVFoundation +import CoreImage +import UIKit public protocol AudioEncodingTarget { - func activateAudioTrack() - func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) + func activateAudioTrack() throws + func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) + // Note: This is not used for synchronized encoding. + func readyForNextAudioBuffer() -> Bool +} + +public protocol MovieOutputDelegate: AnyObject { + func movieOutputDidStartWriting(_ movieOutput: MovieOutput, at time: CMTime) + func movieOutputWriterError(_ movieOutput: MovieOutput, error: Error) +} + +public extension MovieOutputDelegate { + func movieOutputDidStartWriting(_ movieOutput: MovieOutput, at time: CMTime) {} + func movieOutputWriterError(_ movieOutput: MovieOutput, error: Error) {} +} + +public enum MovieOutputError: Error, CustomStringConvertible { + case startWritingError(assetWriterError: Error?) + case pixelBufferPoolNilError + case activeAudioTrackError + + public var errorDescription: String { + switch self { + case .startWritingError(let assetWriterError): + return "Could not start asset writer: \(String(describing: assetWriterError))" + case .pixelBufferPoolNilError: + return "Asset writer pixel buffer pool was nil. Make sure that your output file doesn't already exist." + case .activeAudioTrackError: + return "cannot active audio track when assetWriter status is not 0" + } + } + + public var description: String { + return "<\(type(of: self)): errorDescription = \(self.errorDescription)>" + } +} + +public enum MovieOutputState: String { + case unknown + case idle + case writing + case finished + case canceled } public class MovieOutput: ImageConsumer, AudioEncodingTarget { + private static let assetWriterQueue = DispatchQueue(label: "com.GPUImage2.MovieOutput.assetWriterQueue", qos: .userInitiated) public let sources = SourceContainer() - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 - let assetWriter:AVAssetWriter - let assetWriterVideoInput:AVAssetWriterInput - var assetWriterAudioInput:AVAssetWriterInput? - - let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor - let size:Size - let colorSwizzlingShader:ShaderProgram - private var isRecording = false - private var videoEncodingIsFinished = false - private var audioEncodingIsFinished = false - private var startTime:CMTime? - private var previousFrameTime = kCMTimeNegativeInfinity - private var previousAudioTime = kCMTimeNegativeInfinity - private var encodingLiveVideo:Bool - var pixelBuffer:CVPixelBuffer? = nil - var renderFramebuffer:Framebuffer! - - public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { + public weak var delegate: MovieOutputDelegate? + + public let url: URL + public let fps: Double + public var videoID: String? + public var writerStatus: AVAssetWriter.Status { assetWriter.status } + public var writerError: Error? { assetWriter.error } + private let assetWriter: AVAssetWriter + let assetWriterVideoInput: AVAssetWriterInput + var assetWriterAudioInput: AVAssetWriterInput? + private let assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor + public let size: Size + private let colorSwizzlingShader: ShaderProgram + public let needAlignAV: Bool + var videoEncodingIsFinished = false + var audioEncodingIsFinished = false + var markIsFinishedAfterProcessing = false + private var hasVideoBuffer = false + private var hasAuidoBuffer = false + public private(set) var startFrameTime: CMTime? + public private(set) var recordedDuration: CMTime? + private var previousVideoStartTime: CMTime? + private var previousAudioStartTime: CMTime? + private var previousVideoEndTime: CMTime? + private var previousAudioEndTime: CMTime? + + var encodingLiveVideo: Bool { + didSet { + assetWriterVideoInput.expectsMediaDataInRealTime = encodingLiveVideo + assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo + } + } + private var ciFilter: CILookupFilter? + private var cpuCIContext: CIContext? + public private(set) var pixelBuffer: CVPixelBuffer? + public var waitUtilDataIsReadyForLiveVideo = false + public private(set) var renderFramebuffer: Framebuffer! + + public private(set) var audioSettings: [String: Any]? + public private(set) var audioSourceFormatHint: CMFormatDescription? + + public static let movieProcessingContext: OpenGLContext = { + var context: OpenGLContext? + imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup + sharedImageProcessingContext.runOperationSynchronously { + context = OpenGLContext(queueLabel: "com.GPUImage2.MovieOutput.imageProcess") + } + imageProcessingShareGroup = nil + return context! + }() + public private(set) var audioSampleBufferCache = [CMSampleBuffer]() + public private(set) var videoSampleBufferCache = [CMSampleBuffer]() + public private(set) var frameBufferCache = [Framebuffer]() + public private(set) var cacheBuffersDuration: TimeInterval = 0 + public let disablePixelBufferAttachments: Bool + private var pixelBufferPoolSemaphore = DispatchSemaphore(value: 1) + private var writtenSampleTimes = Set() + + var synchronizedEncodingDebug = false + public private(set) var totalVideoFramesAppended = 0 + public private(set) var totalAudioFramesAppended = 0 + private var observations = [NSKeyValueObservation]() + + deinit { + observations.forEach { $0.invalidate() } + print("movie output deinit \(assetWriter.outputURL)") + } + var shouldWaitForEncoding: Bool { + return !encodingLiveVideo || waitUtilDataIsReadyForLiveVideo + } + var preferredTransform: CGAffineTransform? + private var isProcessing = false + #if DEBUG + public var debugRenderInfo: String = "" + #endif + + public init(URL: Foundation.URL, fps: Double, size: Size, needAlignAV: Bool = true, fileType: AVFileType = .mov, liveVideo: Bool = false, videoSettings: [String: Any]? = nil, videoNaturalTimeScale: CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings: [String: Any]? = nil, audioSourceFormatHint: CMFormatDescription? = nil) throws { + print("movie output init \(URL)") + self.url = URL + self.fps = fps + self.needAlignAV = needAlignAV && (audioSettings != nil || audioSourceFormatHint != nil) + if sharedImageProcessingContext.supportsTextureCaches() { self.colorSwizzlingShader = sharedImageProcessingContext.passthroughShader } else { - self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:ColorSwizzlingFragmentShader)} + self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader: ColorSwizzlingFragmentShader) } } self.size = size - assetWriter = try AVAssetWriter(url:URL, fileType:fileType) - // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. - assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) - var localSettings:[String:AnyObject] - if let settings = settings { - localSettings = settings + assetWriter = try AVAssetWriter(url: URL, fileType: fileType) + if optimizeForNetworkUse { + // NOTE: this is neccessary for streaming play support, but it will slow down finish writing speed + assetWriter.shouldOptimizeForNetworkUse = true + } + + var localSettings: [String: Any] + if let videoSettings = videoSettings { + localSettings = videoSettings } else { - localSettings = [String:AnyObject]() + localSettings = [String: Any]() } - localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) - localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString + localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? size.width + localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? size.height + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecType.h264.rawValue - assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) + assetWriterVideoInput = AVAssetWriterInput(mediaType: .video, outputSettings: localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo + + // You should provide a naturalTimeScale if you have one for the current media. + // Otherwise the asset writer will choose one for you and it may result in misaligned frames. + if let naturalTimeScale = videoNaturalTimeScale { + assetWriter.movieTimeScale = naturalTimeScale + assetWriterVideoInput.mediaTimeScale = naturalTimeScale + // This is set to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. + assetWriter.movieFragmentInterval = CMTime(seconds: 1, preferredTimescale: naturalTimeScale) + } else { + assetWriter.movieFragmentInterval = CMTime(seconds: 1, preferredTimescale: 1000) + } + encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. - let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value:Int32(kCVPixelFormatType_32BGRA)), - kCVPixelBufferWidthKey as String:NSNumber(value:size.width), - kCVPixelBufferHeightKey as String:NSNumber(value:size.height)] + let sourcePixelBufferAttributesDictionary: [String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int32(kCVPixelFormatType_32BGRA), + kCVPixelBufferWidthKey as String: self.size.width, + kCVPixelBufferHeightKey as String: self.size.height] - assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) + assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) assetWriter.add(assetWriterVideoInput) + + self.disablePixelBufferAttachments = disablePixelBufferAttachments + + self.audioSettings = audioSettings + self.audioSourceFormatHint = audioSourceFormatHint } - public func startRecording() { - startTime = nil - sharedImageProcessingContext.runOperationSynchronously{ - self.isRecording = self.assetWriter.startWriting() - - CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) - - /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion - * regardless of the kCVImageBufferYCbCrMatrixKey value. - * Tagging the resulting video file as BT.601, is the best option right now. - * Creating a proper BT.709 video is not possible at the moment. - */ - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) - - let bufferSize = GLSize(self.size) - var cachedTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, self.pixelBuffer!, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) - let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - - self.renderFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + public func setupSoftwareLUTFilter(lutImage: UIImage, intensity: Double? = nil, brightnessFactor: Double? = nil, sync: Bool = true) { + let block: () -> Void = { [weak self] in + if self?.cpuCIContext == nil { + let colorSpace = CGColorSpaceCreateDeviceRGB() + let options: [CIContextOption: AnyObject] = [ + .workingColorSpace: colorSpace, + .outputColorSpace: colorSpace, + .useSoftwareRenderer: NSNumber(value: true) + ] + self?.cpuCIContext = CIContext(options: options) + } + self?.ciFilter = CILookupFilter(lutImage: lutImage, intensity: intensity, brightnessFactor: brightnessFactor) + } + if sync { + Self.movieProcessingContext.runOperationSynchronously(block) + } else { + Self.movieProcessingContext.runOperationAsynchronously(block) } } - public func finishRecording(_ completionCallback:(() -> Void)? = nil) { - sharedImageProcessingContext.runOperationSynchronously{ - self.isRecording = false - - if (self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown) { - sharedImageProcessingContext.runOperationAsynchronously{ - completionCallback?() + public func cleanSoftwareFilter(sync: Bool = true) { + let block: () -> Void = { [weak self] in + self?.ciFilter = nil + } + if sync { + Self.movieProcessingContext.runOperationSynchronously(block) + } else { + Self.movieProcessingContext.runOperationAsynchronously(block) + } + } + + public func startRecording(sync: Bool = false, _ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { + // Don't do this work on the movieProcessingContext queue so we don't block it. + // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) + // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. + let block = { [weak self] () -> Void in + do { + guard let self = self else { return } + if self.assetWriter.status == .writing { + completionCallback?(true, nil) + return + } else if self.assetWriter.status == .cancelled { + throw MovieOutputError.startWritingError(assetWriterError: nil) + } + + let observation = self.assetWriter.observe(\.error) { [weak self] writer, _ in + guard let self = self, let error = writer.error else { return } + self.delegate?.movieOutputWriterError(self, error: error) + } + self.observations.append(observation) + + if let preferredTransform = self.preferredTransform { + self.assetWriterVideoInput.transform = preferredTransform + } + print("MovieOutput starting writing...") + var success = false + let assetWriter = self.assetWriter + try NSObject.catchException { + success = assetWriter.startWriting() + } + + if !success { + throw MovieOutputError.startWritingError(assetWriterError: self.assetWriter.error) + } + + // NOTE: pixelBufferPool is not multi-thread safe, and it will be accessed in another thread in order to improve the performance + self.pixelBufferPoolSemaphore.wait() + defer { + self.pixelBufferPoolSemaphore.signal() + } + guard self.assetWriterPixelBufferInput.pixelBufferPool != nil else { + /* + When the pixelBufferPool returns nil, check the following: + 1. the the output file of the AVAssetsWriter doesn't exist. + 2. use the pixelbuffer after calling startSessionAtTime: on the AVAssetsWriter. + 3. the settings of AVAssetWriterInput and AVAssetWriterInputPixelBufferAdaptor are correct. + 4. the present times of appendPixelBuffer uses are not the same. + https://stackoverflow.com/a/20110179/1275014 + */ + throw MovieOutputError.pixelBufferPoolNilError } + + print("MovieOutput started writing") + + completionCallback?(true, nil) + } catch { + self?.assetWriter.cancelWriting() + + print("MovieOutput failed to start writing. error:\(error)") + + completionCallback?(false, error) + } + } + + if sync { + Self.movieProcessingContext.runOperationSynchronously(block) + } else { + Self.movieProcessingContext.runOperationAsynchronously(block) + } + } + + public func finishRecording(sync: Bool = false, _ completionCallback:(() -> Void)? = nil) { + print("MovieOutput start finishing writing, optimizeForNetworkUse:\(assetWriter.shouldOptimizeForNetworkUse)") + let block = { + guard self.assetWriter.status == .writing else { + completionCallback?() return } - if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) { - self.videoEncodingIsFinished = true - self.assetWriterVideoInput.markAsFinished() + + self.audioEncodingIsFinished = true + self.videoEncodingIsFinished = true + + self.assetWriterAudioInput?.markAsFinished() + self.assetWriterVideoInput.markAsFinished() + + var lastFrameTime: CMTime? + if let lastVideoFrame = self.previousVideoStartTime { + if !self.needAlignAV { + print("MovieOutput start endSession") + lastFrameTime = lastVideoFrame + self.assetWriter.endSession(atSourceTime: lastVideoFrame) + } else if let lastAudioTime = self.previousAudioEndTime, let lastVideoTime = self.previousVideoEndTime { + let endTime = min(lastAudioTime, lastVideoTime) + lastFrameTime = endTime + print("MovieOutput start endSession, last audio end time is:\(lastAudioTime.seconds), last video end time is:\(lastVideoTime.seconds), end time is:\(endTime.seconds)") + self.assetWriter.endSession(atSourceTime: endTime) + } } - if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) { - self.audioEncodingIsFinished = true - self.assetWriterAudioInput?.markAsFinished() + + if let lastFrame = lastFrameTime, let startFrame = self.startFrameTime { + self.recordedDuration = lastFrame - startFrame } - - // Why can't I use ?? here for the callback? - if let callback = completionCallback { - self.assetWriter.finishWriting(completionHandler: callback) - } else { - self.assetWriter.finishWriting{} - + print("MovieOutput did start finishing writing. Total frames appended video::\(self.totalVideoFramesAppended) audio:\(self.totalAudioFramesAppended)") + // Calling "finishWriting(AVAssetWriter A)" then "startWriting(AVAssetWriter B)" at the same time, + // will cause NSInternalInconsistencyException with error code 0. + // So we need to make sure these two methods will not run at the same time. + let dispatchGroup = DispatchGroup() + dispatchGroup.enter() + self.assetWriter.finishWriting { + print("MovieOutput did finish writing") + dispatchGroup.leave() + completionCallback?() } + dispatchGroup.wait() + } + if sync { + Self.movieProcessingContext.runOperationSynchronously(block) + } else { + Self.movieProcessingContext.runOperationAsynchronously(block) } } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - defer { - framebuffer.unlock() + public func cancelRecording(sync: Bool = false, _ completionCallback:(() -> Void)? = nil) { + let block = { + self.audioEncodingIsFinished = true + self.videoEncodingIsFinished = true + print("MovieOutput cancel writing, state:\(self.assetWriter.status.rawValue)") + if self.assetWriter.status == .writing { + self.pixelBufferPoolSemaphore.wait() + self.assetWriter.cancelWriting() + self.pixelBufferPoolSemaphore.signal() + } + completionCallback?() } - guard isRecording else { return } - // Ignore still images and other non-video updates (do I still need this?) - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case - guard (frameTime != previousFrameTime) else { return } + if sync { + Self.movieProcessingContext.runOperationSynchronously(block) + } else { + Self.movieProcessingContext.runOperationAsynchronously(block) + } + } + + public func cancelRecodingImmediately() { + self.audioEncodingIsFinished = true + self.videoEncodingIsFinished = true + } + + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + glFinish() - if (startTime == nil) { - if (assetWriter.status != .writing) { - assetWriter.startWriting() - } - - assetWriter.startSession(atSourceTime: frameTime) - startTime = frameTime + if previousVideoStartTime == nil { + debugPrint("MovieOutput starting process new framebuffer when previousFrameTime == nil") } - // TODO: Run the following on an internal movie recording dispatch queue, context - guard (assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo)) else { - debugPrint("Had to drop a frame at time \(frameTime)") - return + let work = { [weak self] in + _ = self?._processFramebuffer(framebuffer) + sharedImageProcessingContext.runOperationAsynchronously { + framebuffer.unlock() + } } + if encodingLiveVideo { + // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext queue, + // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur + // since the framebuffers come in much slower than during synchronized encoding. + sharedImageProcessingContext.runOperationAsynchronously(work) + } else { + // This is done synchronously to prevent framebuffers from piling up during synchronized encoding. + // If we don't force the sharedImageProcessingContext queue to wait for this frame to finish processing it will + // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext queue would run when the system wants it to. + sharedImageProcessingContext.runOperationSynchronously(work) + } + } + + func _processFramebuffer(_ framebuffer: Framebuffer) -> Bool { + guard assetWriter.status == .writing, !videoEncodingIsFinished else { + print("MovieOutput Guard fell through, dropping video frame. writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") + return false + } + + framebuffer.lock() + frameBufferCache.append(framebuffer) + hasVideoBuffer = true - if !sharedImageProcessingContext.supportsTextureCaches() { - let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) - guard ((pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { return } + guard _canStartWritingVideo() else { + return true } - renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) + if needAlignAV && startFrameTime == nil { + _decideStartTime() + } - if (!assetWriterPixelBufferInput.append(pixelBuffer!, withPresentationTime:frameTime)) { - debugPrint("Problem appending pixel buffer at time: \(frameTime)") + #if DEBUG + let startTime = CACurrentMediaTime() + let bufferCount = frameBufferCache.count + defer { + debugRenderInfo = """ +{ + MovieOutput: { + input: \(framebuffer.debugRenderInfo), input_count: \(bufferCount), + output: { size: \(size.debugRenderInfo), type: AVAssetWriter }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" } + #endif - CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - if !sharedImageProcessingContext.supportsTextureCaches() { - pixelBuffer = nil + var processedBufferCount = 0 + for framebuffer in frameBufferCache { + defer { framebuffer.unlock() } + do { + // Ignore still images and other non-video updates (do I still need this?) + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { + print("MovieOutput Cannot get timestamp from framebuffer, dropping frame") + continue + } + + if previousVideoStartTime == nil && !needAlignAV { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. + assetWriter.startSession(atSourceTime: frameTime) + startFrameTime = frameTime + print("MovieOutput did start writing at:\(frameTime.seconds)") + delegate?.movieOutputDidStartWriting(self, at: frameTime) + } + previousVideoStartTime = frameTime + + pixelBuffer = nil + pixelBufferPoolSemaphore.wait() + defer { + pixelBufferPoolSemaphore.signal() + } + guard assetWriterPixelBufferInput.pixelBufferPool != nil else { + print("MovieOutput WARNING: PixelBufferInput pool is nil") + continue + } + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) + guard pixelBuffer != nil && pixelBufferStatus == kCVReturnSuccess else { + print("MovieOutput WARNING: Unable to create pixel buffer, dropping frame") + continue + } + try renderIntoPixelBuffer(pixelBuffer!, framebuffer: framebuffer) + guard assetWriterVideoInput.isReadyForMoreMediaData || shouldWaitForEncoding else { + print("MovieOutput WARNING: Had to drop a frame at time \(frameTime)") + continue + } + while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { + synchronizedEncodingDebugPrint("MovieOutput Video waiting...") + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds + if markIsFinishedAfterProcessing { + synchronizedEncodingDebugPrint("MovieOutput set videoEncodingIsFinished to true after processing") + markIsFinishedAfterProcessing = false + videoEncodingIsFinished = true + } + } + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard !_checkSampleTimeDuplicated(frameTime) else { + processedBufferCount += 1 + continue + } + + let bufferInput = assetWriterPixelBufferInput + var appendResult = false + synchronizedEncodingDebugPrint("MovieOutput appending video framebuffer at:\(frameTime.seconds)") + // NOTE: when NSException was triggered within NSObject.catchException, the object inside the block seems cannot be released correctly, so be careful not to trigger error, or directly use "self." + try NSObject.catchException { + appendResult = bufferInput.append(self.pixelBuffer!, withPresentationTime: frameTime) + } + if !appendResult { + print("MovieOutput WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") + continue + } + totalVideoFramesAppended += 1 + processedBufferCount += 1 + previousVideoEndTime = frameTime + _videoFrameDuration() + if videoEncodingIsFinished { + assetWriterVideoInput.markAsFinished() + } + } catch { + print("MovieOutput WARNING: Trouble appending pixel buffer \(error)") + } } + frameBufferCache.removeFirst(processedBufferCount) + return true } - func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { - if !sharedImageProcessingContext.supportsTextureCaches() { - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size)) - renderFramebuffer.lock() + func _checkSampleTimeDuplicated(_ sampleTime: CMTime) -> Bool { + let sampleTimeInSeconds = sampleTime.seconds + if writtenSampleTimes.contains(sampleTimeInSeconds) { + print("MovieOutput WARNING: sampleTime:\(sampleTime) is duplicated, dropped!") + return true } + // Avoid too large collection + if writtenSampleTimes.count > 100 { + writtenSampleTimes.removeAll() + } + writtenSampleTimes.insert(sampleTimeInSeconds) + return false + } + + func renderIntoPixelBuffer(_ pixelBuffer: CVPixelBuffer, framebuffer: Framebuffer) throws { + // Is this the first pixel buffer we have recieved? + // NOTE: this will cause strange frame brightness blinking for the first few seconds, be careful about using this. + if renderFramebuffer == nil && !disablePixelBufferAttachments { + CVBufferSetAttachment(pixelBuffer, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) + } + + let bufferSize = GLSize(self.size) + var cachedTextureRef: CVOpenGLESTexture? + let ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + if ret != kCVReturnSuccess { + print("MovieOutput ret error: \(ret), pixelBuffer: \(pixelBuffer)") + return + } + let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) + + renderFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: bufferSize, textureOnly: false, overriddenTexture: cachedTexture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) - CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + renderQuadWithShader(colorSwizzlingShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: sharedImageProcessingContext) if sharedImageProcessingContext.supportsTextureCaches() { glFinish() } else { glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) - renderFramebuffer.unlock() } + CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + } + + // MARK: Append buffer directly from CMSampleBuffer + public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { + let work = { [weak self] in + _ = self?._processVideoSampleBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) + } + + if encodingLiveVideo { + Self.movieProcessingContext.runOperationSynchronously(work) + } else { + work() + } + } + + func _processVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) -> Bool { + defer { + if shouldInvalidateSampleWhenDone { + CMSampleBufferInvalidate(sampleBuffer) + } + } + + guard assetWriter.status == .writing, !videoEncodingIsFinished else { + print("MovieOutput Guard fell through, dropping video frame. writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") + return false + } + + hasVideoBuffer = true + videoSampleBufferCache.append(sampleBuffer) + + guard _canStartWritingVideo() else { + print("MovieOutput Audio not started yet") + return true + } + + if needAlignAV && startFrameTime == nil { + _decideStartTime() + } + + var processedBufferCount = 0 + for sampleBuffer in videoSampleBufferCache { + let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + + if previousVideoStartTime == nil && !needAlignAV { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. + assetWriter.startSession(atSourceTime: frameTime) + startFrameTime = frameTime + print("MovieOutput did start writing at:\(frameTime.seconds)") + delegate?.movieOutputDidStartWriting(self, at: frameTime) + } + + previousVideoStartTime = frameTime + + guard assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding else { + print("MovieOutput Had to drop a frame at time \(frameTime)") + continue + } + + while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { + self.synchronizedEncodingDebugPrint("MovieOutput Video waiting...") + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds + } + synchronizedEncodingDebugPrint("MovieOutput appending video sample buffer at:\(frameTime.seconds)") + guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + print("MovieOutput WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") + continue + } + if !assetWriterVideoInput.isReadyForMoreMediaData { + print("MovieOutput WARNING: video input is not ready at time: \(frameTime))") + continue + } + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard !_checkSampleTimeDuplicated(frameTime) else { + processedBufferCount += 1 + continue + } + + if let ciFilter = ciFilter { + let originalImage = CIImage(cvPixelBuffer: buffer) + if let outputImage = ciFilter.applyFilter(on: originalImage), let ciContext = cpuCIContext { + ciContext.render(outputImage, to: buffer) + } + } + let bufferInput = assetWriterPixelBufferInput + do { + var appendResult = false + try NSObject.catchException { + appendResult = bufferInput.append(buffer, withPresentationTime: frameTime) + } + if !appendResult { + print("MovieOutput WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: assetWriter.error))") + continue + } + totalVideoFramesAppended += 1 + processedBufferCount += 1 + previousVideoEndTime = frameTime + _videoFrameDuration() + } catch { + print("MovieOutput WARNING: Trouble appending video sample buffer at time: \(frameTime) \(error)") + } + } + videoSampleBufferCache.removeFirst(processedBufferCount) + return true } // MARK: - // MARK: Audio support - - public func activateAudioTrack() { - // TODO: Add ability to set custom output settings - assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:nil) - assetWriter.add(assetWriterAudioInput!) + + public func activateAudioTrack() throws { + guard assetWriter.status != .writing && assetWriter.status != .completed else { + throw MovieOutputError.activeAudioTrackError + } + assetWriterAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: self.audioSettings, sourceFormatHint: self.audioSourceFormatHint) + let assetWriter = self.assetWriter + let audioInpupt = self.assetWriterAudioInput! + try NSObject.catchException { + assetWriter.add(audioInpupt) + } assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } - public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) { - guard let assetWriterAudioInput = assetWriterAudioInput else { return } + public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { + let work = { [weak self] in + _ = self?._processAudioSampleBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) + } + if encodingLiveVideo { + Self.movieProcessingContext.runOperationAsynchronously(work) + } else { + work() + } + } + + func _processAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) -> Bool { + guard assetWriter.status == .writing, !audioEncodingIsFinished, let audioInput = assetWriterAudioInput else { + print("MovieOutput Guard fell through, dropping audio sample, writer.state:\(assetWriter.status.rawValue) audioEncodingIsFinished:\(audioEncodingIsFinished)") + return false + } + + // Always accept audio buffer and cache it at first, since video frame might delay a bit + hasAuidoBuffer = true + audioSampleBufferCache.append(sampleBuffer) - sharedImageProcessingContext.runOperationSynchronously{ - let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - if (self.startTime == nil) { - if (self.assetWriter.status != .writing) { - self.assetWriter.startWriting() - } - - self.assetWriter.startSession(atSourceTime: currentSampleTime) - self.startTime = currentSampleTime + guard _canStartWritingAuido() else { + print("MovieOutput Process audio sample but first video frame is not ready yet. Time:\(CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer).seconds)") + return true + } + + if startFrameTime == nil && needAlignAV { + _decideStartTime() + } + + var processedBufferCount = 0 + for audioBuffer in audioSampleBufferCache { + let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer) + previousAudioStartTime = currentSampleTime + guard audioInput.isReadyForMoreMediaData || shouldWaitForEncoding else { + print("MovieOutput Had to delay a audio sample at time \(currentSampleTime)") + continue } - guard (assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { - return + while !audioInput.isReadyForMoreMediaData && shouldWaitForEncoding && !audioEncodingIsFinished { + print("MovieOutput Audio waiting...") + usleep(100000) + if !audioInput.isReadyForMoreMediaData { + synchronizedEncodingDebugPrint("MovieOutput Audio still not ready, skip this runloop...") + continue + } } - if (!assetWriterAudioInput.append(sampleBuffer)) { - print("Trouble appending audio sample buffer") + synchronizedEncodingDebugPrint("Process audio sample output. Time:\(currentSampleTime.seconds)") + do { + var appendResult = false + try NSObject.catchException { + appendResult = audioInput.append(audioBuffer) + } + if !appendResult { + print("MovieOutput WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + continue + } + previousAudioEndTime = currentSampleTime + CMSampleBufferGetDuration(sampleBuffer) + totalAudioFramesAppended += 1 + if shouldInvalidateSampleWhenDone { + CMSampleBufferInvalidate(audioBuffer) + } + processedBufferCount += 1 + } catch { + print("MovieOutput WARNING: Trouble appending audio sample buffer: \(error)") + continue } } + audioSampleBufferCache.removeFirst(processedBufferCount) + return true + } + + func _videoFrameDuration() -> CMTime { + CMTime(seconds: 1 / fps, preferredTimescale: CMTimeScale(NSEC_PER_SEC)) + } + + func _canStartWritingVideo() -> Bool { + !needAlignAV || (needAlignAV && hasAuidoBuffer && hasVideoBuffer) + } + + func _canStartWritingAuido() -> Bool { + (!needAlignAV && previousVideoStartTime != nil) || (needAlignAV && hasAuidoBuffer && hasVideoBuffer) + } + + func _decideStartTime() { + guard let audioBuffer = audioSampleBufferCache.first else { + print("MovieOutput ERROR: empty audio buffer cache, cannot start session") + return + } + let videoTime: CMTime? = { + if let videoBuffer = videoSampleBufferCache.first { + return CMSampleBufferGetOutputPresentationTimeStamp(videoBuffer) + } else if let frameBuffer = frameBufferCache.first { + return frameBuffer.timingStyle.timestamp?.asCMTime + } else { + return nil + } + }() + guard videoTime != nil else { + print("MovieOutput ERROR: empty video time, cannot start session") + return + } + let audioTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer) + let startFrameTime = max(audioTime, videoTime!) + assetWriter.startSession(atSourceTime: startFrameTime) + self.startFrameTime = startFrameTime + delegate?.movieOutputDidStartWriting(self, at: startFrameTime) + } + + public func flushPendingAudioBuffers(shouldInvalidateSampleWhenDone: Bool) { + guard let lastBuffer = audioSampleBufferCache.popLast() else { return } + _ = _processAudioSampleBuffer(lastBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) + } + + // Note: This is not used for synchronized encoding, only live video. + public func readyForNextAudioBuffer() -> Bool { + return true + } + + func synchronizedEncodingDebugPrint(_ string: String) { + if synchronizedEncodingDebug && !encodingLiveVideo { print(string) } } } - public extension Timestamp { - public init(_ time:CMTime) { + init(_ time: CMTime) { self.value = time.value self.timescale = time.timescale - self.flags = TimestampFlags(rawValue:time.flags.rawValue) + self.flags = TimestampFlags(rawValue: time.flags.rawValue) self.epoch = time.epoch } - public var asCMTime:CMTime { + var asCMTime: CMTime { get { - return CMTimeMakeWithEpoch(value, timescale, epoch) + return CMTimeMakeWithEpoch(value: value, timescale: timescale, epoch: epoch) } } } diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift new file mode 100644 index 00000000..8118a756 --- /dev/null +++ b/framework/Source/iOS/MoviePlayer.swift @@ -0,0 +1,779 @@ +// +// MoviePlayer.swift +// DayCam +// +// Created by 陈品霖 on 2019/1/30. +// Copyright © 2019 rocry. All rights reserved. +// +import AVFoundation + +public protocol MoviePlayerDelegate: class { + func moviePlayerDidReadPixelBuffer(_ pixelBuffer: CVPixelBuffer, time: CMTime) +} + +public typealias MoviePlayerTimeObserverCallback = (CMTime) -> Void + +public struct MoviePlayerTimeObserver { + let targetTime: CMTime + let callback: MoviePlayerTimeObserverCallback + let observerID: String + init(targetTime: CMTime, callback: @escaping MoviePlayerTimeObserverCallback) { + self.targetTime = targetTime + self.callback = callback + observerID = UUID.init().uuidString + } +} + +public class MoviePlayer: AVQueuePlayer, ImageSource { + static var looperDict = [MoviePlayer: AVPlayerLooper]() + public let targets = TargetContainer() + public var runBenchmark = false + public var logEnabled = false + public weak var delegate: MoviePlayerDelegate? + public var startTime: CMTime? + public var actualStartTime: CMTime { startTime ?? .zero } + public var endTime: CMTime? + public var actualEndTime: CMTime { endTime ?? CMTimeSubtract(assetDuration, actualStartTime) } + public var actualDuration: CMTime { actualEndTime - actualStartTime } + /// Whether to loop play. + public var loop = false + private var previousPlayerActionAtItemEnd: AVPlayer.ActionAtItemEnd? + public var asset: AVAsset? { return playableItem?.asset } + public private(set) var isPlaying = false + public var lastPlayerItem: AVPlayerItem? + public var playableItem: AVPlayerItem? { currentItem ?? lastPlayerItem } + public var processSteps: [PictureInputProcessStep]? + + var displayLink: CADisplayLink? + + lazy var framebufferGenerator = FramebufferGenerator() + + var totalTimeObservers = [MoviePlayerTimeObserver]() + var timeObserversQueue = [MoviePlayerTimeObserver]() + + var timebaseInfo = mach_timebase_info_data_t() + var totalFramesSent = 0 + var totalFrameTime: Double = 0.0 + public var dropFrameBeforeTime: CMTime? + public var playrate: Float = 1.0 + private lazy var assetDurationMap = [AVAsset: CMTime]() + public var assetDuration: CMTime { + if let currentAsset = asset { + if let cachedDuration = assetDurationMap[currentAsset] { + return cachedDuration + } else if currentAsset.statusOfValue(forKey: "duration", error: nil) == .loaded { + let duration = currentAsset.duration + assetDurationMap[currentAsset] = duration + return duration + } else { + return .zero + } + } else { + return .zero + } + } + public var isReadyToPlay: Bool { + return status == .readyToPlay + } + public var videoOrientation: ImageOrientation { + guard let asset = asset else { return .portrait } + return asset.originalOrientation ?? .portrait + } + // NOTE: be careful, this property might block your thread since it needs to access currentTime + public var didPlayToEnd: Bool { + return currentItem == nil || (currentItem?.currentTime() ?? .zero >= assetDuration) + } + public var hasTarget: Bool { targets.count > 0 } + + var framebufferUserInfo: [AnyHashable: Any]? + var observations = [NSKeyValueObservation]() + + #if DEBUG + public var debugRenderInfo: String = "" + #endif + + struct SeekingInfo: Equatable { + let time: CMTime + let toleranceBefore: CMTime + let toleranceAfter: CMTime + let shouldPlayAfterSeeking: Bool + + public static func == (lhs: MoviePlayer.SeekingInfo, rhs: MoviePlayer.SeekingInfo) -> Bool { + return lhs.time.seconds == rhs.time.seconds + && lhs.toleranceBefore.seconds == rhs.toleranceBefore.seconds + && lhs.toleranceAfter.seconds == rhs.toleranceAfter.seconds + && lhs.shouldPlayAfterSeeking == rhs.shouldPlayAfterSeeking + } + } + var nextSeeking: SeekingInfo? + public var isSeeking = false + public var enableVideoOutput = false + public private(set) var isProcessing = false + private var needAddItemAfterDidEndNotify = false + private lazy var pendingNewItems = [AVPlayerItem]() + private var pendingSeekInfo: SeekingInfo? + private var shouldUseLooper: Bool { + // NOTE: if video duration too short, it will cause OOM. So it is better to use "actionItemAtEnd=.none + playToEnd + seek" solution. + return false + } + private var didTriggerEndTimeObserver = false + private var didRegisterPlayerNotification = false + private var didNotifyEndedItem: AVPlayerItem? + private var retryPlaying = false + /// Return the current item. If currentItem was played to end, will return next one + public var actualCurrentItem: AVPlayerItem? { + let playerItems = items() + guard playerItems.count > 0 else { return nil } + if didPlayToEnd { + if playerItems.count == 1 { + if actionAtItemEnd == .advance { + return nil + } else { + return playerItems[0] + } + } else { + return playerItems[1] + } + } else { + return playerItems[0] + } + } + + public override init() { + print("[MoviePlayer] init") + // Make sure player it intialized on the main thread, or it might cause KVO crash + assert(Thread.isMainThread) + super.init() + } + + deinit { + print("[MoviePlayer] deinit \(String(describing: asset))") + assert(observations.isEmpty, "observers must be removed before deinit") + pause() + displayLink?.invalidate() + if hasTarget { + sharedImageProcessingContext.framebufferCache.purgeAllUnassignedFramebuffers() + } + } + + // MARK: Data Source + public func replaceCurrentItem(with url: URL) { + replaceCurrentItem(with: url, enableVideoOutput: enableVideoOutput) + } + + public func replaceCurrentItem(with url: URL, enableVideoOutput: Bool) { + let inputAsset = AVURLAsset(url: url) + let playerItem = AVPlayerItem(asset: inputAsset, automaticallyLoadedAssetKeys: [AVURLAssetPreferPreciseDurationAndTimingKey]) + replaceCurrentItem(with: playerItem, enableVideoOutput: enableVideoOutput) + } + + override public func insert(_ item: AVPlayerItem, after afterItem: AVPlayerItem?) { + insert(item, after: afterItem, enableVideoOutput: enableVideoOutput) + } + + public func insert(_ item: AVPlayerItem, after afterItem: AVPlayerItem?, enableVideoOutput: Bool) { + if enableVideoOutput { + _setupPlayerItemVideoOutput(for: item) + } + if item.audioTimePitchAlgorithm == .lowQualityZeroLatency { + item.audioTimePitchAlgorithm = _audioPitchAlgorithm() + } + lastPlayerItem = item + self.enableVideoOutput = enableVideoOutput + _setupPlayerObservers(playerItem: item) + if shouldDelayAddPlayerItem && didNotifyEndedItem != nil && didNotifyEndedItem != item && didNotifyEndedItem != items().last { + needAddItemAfterDidEndNotify = true + pendingNewItems.append(item) + print("[MoviePlayer] pending insert. pendingNewItems:\(pendingNewItems)") + } else { + // Append previous pending items at first + if needAddItemAfterDidEndNotify { + needAddItemAfterDidEndNotify = false + pendingNewItems.forEach { insert($0, after: nil) } + pendingNewItems.removeAll() + } + remove(item) + super.insert(item, after: afterItem) + print("[MoviePlayer] insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) itemsAfter:\(items().count)") + } + didNotifyEndedItem = nil + } + + public func seekItem(_ item: AVPlayerItem, to time: CMTime, toleranceBefore: CMTime = .zero, toleranceAfter: CMTime = .zero, completionHandler: ((Bool) -> Void)? = nil) { + print("[MoviePlayer] [player] seek item:\(item) to time:\(time.seconds) toleranceBefore:\(toleranceBefore.seconds) toleranceAfter:\(toleranceAfter.seconds)") + let seekCurrentItem = item == currentItem + guard !seekCurrentItem || !isSeeking else { return } + if seekCurrentItem { + isSeeking = true + dropFrameBeforeTime = time + _setupDisplayLinkIfNeeded() + } + item.seek(to: time, toleranceBefore: toleranceBefore, toleranceAfter: toleranceAfter) { [weak self] success in + if seekCurrentItem { + self?.isSeeking = false + } + completionHandler?(success) + } + didNotifyEndedItem = nil + } + + override public func replaceCurrentItem(with item: AVPlayerItem?) { + replaceCurrentItem(with: item, enableVideoOutput: enableVideoOutput) + } + + public func replaceCurrentItem(with item: AVPlayerItem?, enableVideoOutput: Bool) { + didNotifyEndedItem = nil + dropFrameBeforeTime = nil + lastPlayerItem = item + // Stop looping before replacing + if shouldUseLooper && MoviePlayer.looperDict[self] != nil { + removeAllItems() + } + if let item = item { + if enableVideoOutput { + _setupPlayerItemVideoOutput(for: item) + } + if item.audioTimePitchAlgorithm == .lowQualityZeroLatency { + item.audioTimePitchAlgorithm = _audioPitchAlgorithm() + } + _setupPlayerObservers(playerItem: item) + } else { + _removePlayerObservers() + } + self.enableVideoOutput = enableVideoOutput + if shouldDelayAddPlayerItem && item != nil { + needAddItemAfterDidEndNotify = true + pendingNewItems.append(item!) + print("[MoviePlayer] pending replace. pendingNewItems:\(pendingNewItems)") + } else { + super.replaceCurrentItem(with: item) + } + print("[MoviePlayer] replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) enableVideoOutput:\(enableVideoOutput) itemsAfter:\(items().count) ") + } + + public func replayLastItem() { + guard let playerItem = lastPlayerItem else { return } + replaceCurrentItem(with: playerItem) + if playerItem.currentTime() != actualStartTime { + seekToTime(actualStartTime, shouldPlayAfterSeeking: true) + } else { + play() + } + print("[MoviePlayer] replay last item:\(playerItem)") + } + + override public func remove(_ item: AVPlayerItem) { + super.remove(item) + pendingNewItems.removeAll { $0 == item } + print("[MoviePlayer] remove item:\(item)") + } + + override public func removeAllItems() { + _stopLoopingIfNeeded() + super.removeAllItems() + pendingNewItems.removeAll() + print("[MoviePlayer] remove all items") + } + + override public func advanceToNextItem() { + super.advanceToNextItem() + print("[MoviePlayer] advance to next item") + } + + // MARK: - + // MARK: Playback control + + override public func play() { + if displayLink == nil || didPlayToEnd { + start() + } else { + resume() + } + } + + override public func playImmediately(atRate rate: Float) { + playrate = rate + start() + } + + public func start() { + if actionAtItemEnd == .advance, currentItem == nil, let playerItem = lastPlayerItem { + insert(playerItem, after: nil) + } + + guard currentItem != nil else { + // Sometime the player.items() seems still 0 even if insert was called, but it won't result in crash, just print a error log for information. + print("[MoviePlayer] ERROR! player currentItem is nil") + return + } + isPlaying = true + isProcessing = false +// print("[MoviePlayer] start duration:\(String(describing: asset?.duration.seconds)) items:\(items())") + _setupDisplayLinkIfNeeded() + _resetTimeObservers() + didNotifyEndedItem = nil + if shouldUseLooper { + if let playerItem = lastPlayerItem { + MoviePlayer.looperDict[self]?.disableLooping() + let looper = AVPlayerLooper(player: self, templateItem: playerItem, timeRange: CMTimeRange(start: actualStartTime, end: actualEndTime)) + MoviePlayer.looperDict[self] = looper + } + rate = playrate + } else { + if loop { + actionAtItemEnd = .none + } + if currentTime() != actualStartTime { + seekToTime(actualStartTime, shouldPlayAfterSeeking: true) + } else { + rate = playrate + } + } + } + + public func resume() { + isPlaying = true + rate = playrate + print("movie player resume \(String(describing: asset))") + } + + override public func pause() { + isPlaying = false + guard rate != 0 else { return } + print("movie player pause \(String(describing: asset))") + super.pause() + } + + public func stop() { + pause() + print("movie player stop \(String(describing: asset))") + _timeObserversUpdate { [weak self] in + self?.timeObserversQueue.removeAll() + } + displayLink?.invalidate() + displayLink = nil + isSeeking = false + nextSeeking = nil + dropFrameBeforeTime = nil + assetDurationMap.removeAll() + MoviePlayer.looperDict[self]?.disableLooping() + MoviePlayer.looperDict[self] = nil + } + + public func seekToTime(_ time: TimeInterval, shouldPlayAfterSeeking: Bool) { + seekToTime(CMTime(seconds: time, preferredTimescale: 48000), shouldPlayAfterSeeking: shouldPlayAfterSeeking) + } + + public func seekToTime(_ targetTime: CMTime, shouldPlayAfterSeeking: Bool) { + if shouldPlayAfterSeeking { + // 0.1s has 3 frames tolerance for 30 FPS video, it should be enough if there is no sticky video + let toleranceTime = CMTime(seconds: 0.1, preferredTimescale: 600) + isPlaying = true + nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: toleranceTime, toleranceAfter: toleranceTime, shouldPlayAfterSeeking: shouldPlayAfterSeeking) + } else { + nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: .zero, toleranceAfter: .zero, shouldPlayAfterSeeking: shouldPlayAfterSeeking) + } + _setupDisplayLinkIfNeeded() + if assetDuration <= .zero { + print("[MoviePlayer] cannot seek since assetDuration is 0. currentItem:\(String(describing: currentItem))") + } else { + actuallySeekToTime() + } + } + + /// Cleanup all player resource and observers. This must be called before deinit, or it might crash on iOS 10 due to observation assertion. + public func cleanup() { + pendingNewItems.removeAll() + stop() + _removePlayerObservers() + } + + func actuallySeekToTime() { + // Avoid seeking choppy when fast seeking + // https://developer.apple.com/library/archive/qa/qa1820/_index.html#//apple_ref/doc/uid/DTS40016828 + guard !isSeeking, let seekingInfo = nextSeeking, isReadyToPlay else { return } + isSeeking = true + seek(to: seekingInfo.time, toleranceBefore: seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] _ in +// debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success) shouldPlayAfterSeeking:\(seekingInfo.shouldPlayAfterSeeking)") + guard let self = self else { return } + if seekingInfo.shouldPlayAfterSeeking && self.isPlaying { + self._resetTimeObservers() + self.rate = self.playrate + } + + self.isSeeking = false + + if seekingInfo != self.nextSeeking { + self.actuallySeekToTime() + } else { + self.nextSeeking = nil + } + } + } + + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { + // Not needed for movie inputs + } + + public func addTimeObserver(at time: CMTime, callback: @escaping MoviePlayerTimeObserverCallback) -> MoviePlayerTimeObserver { + let timeObserver = MoviePlayerTimeObserver(targetTime: time, callback: callback) + _timeObserversUpdate { [weak self] in + guard let self = self else { return } + self.totalTimeObservers.append(timeObserver) + self.totalTimeObservers = self.totalTimeObservers.sorted { lhs, rhs in + return lhs.targetTime > rhs.targetTime + } + if self.isPlaying { + if let lastIndex = self.timeObserversQueue.firstIndex(where: { $0.targetTime >= time }) { + self.timeObserversQueue.insert(timeObserver, at: lastIndex) + } else { + self.timeObserversQueue.append(timeObserver) + } + } + } + return timeObserver + } + + public func removeTimeObserver(timeObserver: MoviePlayerTimeObserver) { + _timeObserversUpdate { [weak self] in + self?.totalTimeObservers.removeAll { $0.observerID == timeObserver.observerID } + self?.timeObserversQueue.removeAll { $0.observerID == timeObserver.observerID } + } + } + + public func removeAllTimeObservers() { + _timeObserversUpdate { [weak self] in + self?.timeObserversQueue.removeAll() + self?.totalTimeObservers.removeAll() + } + } + + public func setLoopEnabled(_ enabled: Bool, timeRange: CMTimeRange) { + print("MoviePlayer set loop enable: \(enabled) time range: \(timeRange)") + if enabled { + if previousPlayerActionAtItemEnd == nil { + previousPlayerActionAtItemEnd = actionAtItemEnd + } + actionAtItemEnd = .none + startTime = timeRange.start + endTime = timeRange.end + assert(timeRange.start >= .zero || timeRange.end > .zero && CMTimeSubtract(timeRange.end, assetDuration) < .zero, "timerange is invalid. timerange:\(timeRange) assetDuration:\(assetDuration)") + } else { + actionAtItemEnd = previousPlayerActionAtItemEnd ?? .advance + startTime = nil + endTime = nil + } + _resetTimeObservers() + loop = enabled + } + + public func changePlayRate(to rate: Float) { + let ct = currentTime() + playrate = rate + items().forEach { + if $0.audioTimePitchAlgorithm == .lowQualityZeroLatency { + $0.audioTimePitchAlgorithm = _audioPitchAlgorithm() + } + } + let toleranceTime = CMTime(seconds: 0.1, preferredTimescale: 600) + nextSeeking = SeekingInfo(time: ct, toleranceBefore: toleranceTime, toleranceAfter: toleranceTime, shouldPlayAfterSeeking: true) + resume() + } +} + +private extension MoviePlayer { + func _setupDisplayLinkIfNeeded() { + if displayLink == nil { + displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallback)) + displayLink?.add(to: RunLoop.main, forMode: .common) + } + } + + func _stopLoopingIfNeeded() { + if loop, let looper = MoviePlayer.looperDict[self] { + looper.disableLooping() + MoviePlayer.looperDict[self] = nil + print("[MoviePlayer] stop looping item)") + } + } + + func _setupPlayerItemVideoOutput(for item: AVPlayerItem) { + guard !item.outputs.contains(where: { $0 is AVPlayerItemVideoOutput }) else { return } + let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey): kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] + let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) + videoOutput.suppressesPlayerRendering = true + item.add(videoOutput) + } + + func _setupPlayerObservers(playerItem: AVPlayerItem?) { + _removePlayerObservers(removeNotificationCenter: !didRegisterPlayerNotification) + if !didRegisterPlayerNotification { + NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) + didRegisterPlayerNotification = true + } + observations.append(observe(\.status) { [weak self] _, _ in + self?.playerStatusDidChange() + }) + observations.append(observe(\.rate) { [weak self] _, _ in + self?.playerRateDidChange() + }) + if let item = playerItem { + observations.append(item.observe(\AVPlayerItem.status) { [weak self] _, _ in + self?.playerItemStatusDidChange(item) + }) + } + } + + func _removePlayerObservers(removeNotificationCenter: Bool = true) { + if removeNotificationCenter { + NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: nil) + NotificationCenter.default.removeObserver(self, name: .AVPlayerItemPlaybackStalled, object: nil) + didRegisterPlayerNotification = false + } + observations.forEach { $0.invalidate() } + observations.removeAll() + } + + /// NOTE: all time observer operations will be executed in main queue + func _timeObserversUpdate(_ block: @escaping () -> Void) { + if Thread.isMainThread { + block() + } else { + DispatchQueue.main.async { + block() + } + } + } + + func _resetTimeObservers() { + didTriggerEndTimeObserver = false + _timeObserversUpdate { [weak self] in + guard let self = self else { return } + self.timeObserversQueue.removeAll() + for observer in self.totalTimeObservers { + guard observer.targetTime >= self.actualStartTime && observer.targetTime <= self.actualEndTime else { + continue + } + self.timeObserversQueue.append(observer) + } + } + } + + // Both algorithm has the highest quality + // Except .spectral has pitch correction, which is suitable for fast/slow play rate + func _audioPitchAlgorithm() -> AVAudioTimePitchAlgorithm { + return abs(playrate - 1.0) < .ulpOfOne ? .varispeed : .spectral + } + + func onCurrentItemPlayToEnd() { + if loop && isPlaying { + start() + } + } + + func playerRateDidChange() { +// debugPrint("rate change to:\(player.rate) asset:\(asset) status:\(player.status.rawValue)") + resumeIfNeeded() + } + + func playerStatusDidChange() { + debugPrint("[MoviePlayer] Player status change to:\(status.rawValue) asset:\(String(describing: asset))") + resumeIfNeeded() + } + + func playerItemStatusDidChange(_ playerItem: AVPlayerItem) { + debugPrint("[MoviePlayer] PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset), error: \(playerItem.error)") + if playerItem == currentItem && playerItem.status == .readyToPlay { + resumeIfNeeded() + } + } + + func resumeIfNeeded() { + guard isReadyToPlay && isPlaying == true else { return } + if nextSeeking != nil { + actuallySeekToTime() + } else if rate != playrate { + rate = playrate + } + } + + // MARK: - + // MARK: Internal processing functions + + func _process(_ pixelBuffer: CVPixelBuffer, at timeForDisplay: CMTime) { + // Out of range when looping, skip process. So that it won't show unexpected frames. + if loop && isPlaying && (timeForDisplay < actualStartTime || timeForDisplay >= actualEndTime) { + print("[MoviePlayer] Skipped frame at time:\(timeForDisplay.seconds) is larger than range: [\(actualStartTime.seconds), \(actualEndTime.seconds)]") + return + } + + // There are still some previous frames coming after seeking. So we drop these frames + if isPlaying, let dropFrameBeforeTime = dropFrameBeforeTime, CMTimeCompare(timeForDisplay, dropFrameBeforeTime) <= 0 { + print("[MoviePlayer] drop frame at time:\(timeForDisplay.seconds), dropFrameBeforeTime:\(dropFrameBeforeTime.seconds)") + return + } + dropFrameBeforeTime = nil + +// print("[MoviePlayer] read frame at time:\(timeForDisplay.seconds)") + + delegate?.moviePlayerDidReadPixelBuffer(pixelBuffer, time: timeForDisplay) + + let startTime = CACurrentMediaTime() + if runBenchmark || logEnabled { + totalFramesSent += 1 + } + defer { + if runBenchmark { + let currentFrameTime = (CACurrentMediaTime() - startTime) + totalFrameTime += currentFrameTime + print("[MoviePlayer] Average frame time :\(1000.0 * totalFrameTime / Double(totalFramesSent)) ms") + print("[MoviePlayer] Current frame time :\(1000.0 * currentFrameTime) ms") + } + } + + guard hasTarget else { return } + let newFramebuffer: Framebuffer? + if let processSteps = processSteps, !processSteps.isEmpty { + newFramebuffer = framebufferGenerator.processAndGenerateFromBuffer(pixelBuffer, frameTime: timeForDisplay, processSteps: processSteps, videoOrientation: asset?.originalOrientation ?? .portrait) + } else { + newFramebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: timeForDisplay, videoOrientation: videoOrientation) + } + guard let framebuffer = newFramebuffer else { return } + framebuffer.userInfo = framebufferUserInfo + + #if DEBUG + debugRenderInfo = """ +{ + MoviePlayer: { + input: \(CVPixelBufferGetWidth(pixelBuffer))x\(CVPixelBufferGetHeight(pixelBuffer)), input_type: CVPixelBuffer, + output: \(framebuffer.debugRenderInfo), + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + #endif + + updateTargetsWithFramebuffer(framebuffer) + } + + @objc func displayLinkCallback(displayLink: CADisplayLink) { + if !items().isEmpty { + if retryPlaying { + retryPlaying = false + print("[MoviePlayer] Resume playing succeed") + } + } + guard currentItem?.status == .readyToPlay else { return } + let playTime = currentTime() + guard playTime.seconds > 0 else { return } + + guard let videoOutput = videoOutput else { + _notifyTimeObserver(with: playTime) + return + } + guard !isProcessing, videoOutput.hasNewPixelBuffer(forItemTime: playTime) == true else { return } + isProcessing = true + var timeForDisplay: CMTime = .zero + guard let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: &timeForDisplay) else { + print("[MoviePlayer] Failed to copy pixel buffer at time:\(playTime)") + isProcessing = false + return + } + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + defer { + self?.isProcessing = false + } + self?._process(pixelBuffer, at: playTime) + self?._notifyTimeObserver(with: playTime) + } + } + + var videoOutput: AVPlayerItemVideoOutput? { + return currentItem?.outputs.first(where: { $0 is AVPlayerItemVideoOutput }) as? AVPlayerItemVideoOutput + } + + /// Wait for didPlayToEnd notification and add a new playerItem. + var shouldDelayAddPlayerItem: Bool { + // NOTE: AVQueuePlayer will remove new added item immediately after inserting if last item has already played to end. + // The workaround solution is to add new item after playerDidPlayToEnd notification. + return didPlayToEnd && items().count == 1 && !shouldUseLooper + } + + @objc func playerDidPlayToEnd(notification: Notification) { + print("[MoviePlayer] did play to end. currentTime:\(currentTime().seconds) notification:\(notification) items:\(items())") + guard (notification.object as? AVPlayerItem) == currentItem else { return } + didNotifyEndedItem = currentItem + if needAddItemAfterDidEndNotify { + DispatchQueue.main.async { [weak self] in + guard let self = self else { return } + self.needAddItemAfterDidEndNotify = false + self.pendingNewItems.forEach { self.insert($0, after: nil) } + self.pendingNewItems.removeAll() + if self.isPlaying { + self.play() + } + } + } else { + DispatchQueue.main.async { [weak self] in + self?.onCurrentItemPlayToEnd() + } + } + } + + @objc func playerStalled(notification: Notification) { + print("[MoviePlayer] player was stalled. currentTime:\(currentTime().seconds) notification:\(notification)") + guard (notification.object as? AVPlayerItem) == currentItem else { return } + } + + func _notifyTimeObserver(with sampleTime: CMTime) { + // Directly callback time play to end observer since it needs to be callbacked more timely, ex. seeking to start + if sampleTime > actualEndTime && !shouldUseLooper && endTime != nil && !didTriggerEndTimeObserver { + didTriggerEndTimeObserver = true + onCurrentItemPlayToEnd() + } + + // Other observers might has delay since it needs to wait for main thread + _timeObserversUpdate { [weak self] in + while let lastObserver = self?.timeObserversQueue.last, lastObserver.targetTime <= sampleTime { + self?.timeObserversQueue.removeLast() + lastObserver.callback(sampleTime) + } + } + } +} + +public extension AVAsset { + var imageOrientation: ImageOrientation? { + guard let videoTrack = tracks(withMediaType: AVMediaType.video).first else { + return nil + } + let trackTransform = videoTrack.preferredTransform + switch (trackTransform.a, trackTransform.b, trackTransform.c, trackTransform.d) { + case (1, 0, 0, 1): return .portrait + case (1, 0, 0, -1), (-1, 0, 0, -1): return .portraitUpsideDown + case (0, 1, -1, 0): return .landscapeLeft + case (0, -1, 1, 0): return .landscapeRight + default: + print("ERROR: unsupport transform!\(trackTransform)") + return .portrait + } + } + + // For original orientation is different with preferred image orientation when it is landscape + var originalOrientation: ImageOrientation? { + guard let videoTrack = tracks(withMediaType: AVMediaType.video).first else { + return nil + } + let trackTransform = videoTrack.preferredTransform + switch (trackTransform.a, trackTransform.b, trackTransform.c, trackTransform.d) { + case (1, 0, 0, 1): return .portrait + case (1, 0, 0, -1), (-1, 0, 0, -1): return .portraitUpsideDown + case (0, 1, -1, 0): return .landscapeRight + case (0, -1, 1, 0): return .landscapeLeft + default: + print("ERROR: unsupport transform!\(trackTransform)") + return .portrait + } + } +} diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index fc5c81d8..010db2b4 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -2,43 +2,50 @@ import OpenGLES import UIKit // TODO: Find a way to warn people if they set this after the context has been created -var imageProcessingShareGroup:EAGLSharegroup? = nil +var imageProcessingShareGroup: EAGLSharegroup? + +var dispatchQueKeyValueCounter = 81 public class OpenGLContext: SerialDispatch { - lazy var framebufferCache:FramebufferCache = { - return FramebufferCache(context:self) + public private(set) lazy var framebufferCache: FramebufferCache = { + return FramebufferCache(context: self) }() - var shaderCache:[String:ShaderProgram] = [:] - public let standardImageVBO:GLuint - var textureVBOs:[Rotation:GLuint] = [:] + var shaderCache: [String: ShaderProgram] = [:] + public let standardImageVBO: GLuint + var textureVBOs: [Rotation: GLuint] = [:] - let context:EAGLContext + public let context: EAGLContext - lazy var passthroughShader:ShaderProgram = { - return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + public private(set) lazy var passthroughShader: ShaderProgram = { + return crashOnShaderCompileFailure("OpenGLContext") { return try self.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() - lazy var coreVideoTextureCache:CVOpenGLESTextureCache = { - var newTextureCache:CVOpenGLESTextureCache? = nil + public private(set) lazy var coreVideoTextureCache: CVOpenGLESTextureCache = { + var newTextureCache: CVOpenGLESTextureCache? let err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nil, self.context, nil, &newTextureCache) return newTextureCache! }() - - public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) + public let serialDispatchQueue: DispatchQueue public let dispatchQueueKey = DispatchSpecificKey() + public let dispatchQueueKeyValue: Int + public var executeStartTime: TimeInterval? + public lazy var _debugPipelineOnePassRenderInfo = [String]() // MARK: - // MARK: Initialization and teardown - init() { - serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:81) + init(queueLabel: String? = nil) { + serialDispatchQueue = DispatchQueue(label: (queueLabel ?? "com.sunsetlakesoftware.GPUImage.processingQueue"), qos: .userInitiated) + dispatchQueueKeyValue = dispatchQueKeyValueCounter + serialDispatchQueue.setSpecific(key: dispatchQueueKey, value: dispatchQueueKeyValue) + dispatchQueKeyValueCounter += 1 - let generatedContext:EAGLContext? + let generatedContext: EAGLContext? if let shareGroup = imageProcessingShareGroup { - generatedContext = EAGLContext(api:.openGLES2, sharegroup:shareGroup) + generatedContext = EAGLContext(api: .openGLES2, sharegroup: shareGroup) } else { - generatedContext = EAGLContext(api:.openGLES2) + generatedContext = EAGLContext(api: .openGLES2) } guard let concreteGeneratedContext = generatedContext else { @@ -48,7 +55,7 @@ public class OpenGLContext: SerialDispatch { self.context = concreteGeneratedContext EAGLContext.setCurrent(concreteGeneratedContext) - standardImageVBO = generateVBO(for:standardImageVertices) + standardImageVBO = generateVBO(for: standardImageVertices) generateTextureVBOs() glDisable(GLenum(GL_DEPTH_TEST)) @@ -59,8 +66,7 @@ public class OpenGLContext: SerialDispatch { // MARK: Rendering public func makeCurrentContext() { - if (EAGLContext.current() != self.context) - { + if EAGLContext.current() != self.context { EAGLContext.setCurrent(self.context) } } @@ -69,37 +75,44 @@ public class OpenGLContext: SerialDispatch { self.context.presentRenderbuffer(Int(GL_RENDERBUFFER)) } - // MARK: - // MARK: Device capabilities - func supportsTextureCaches() -> Bool { -#if (arch(i386) || arch(x86_64)) && os(iOS) + public func supportsTextureCaches() -> Bool { +#if targetEnvironment(simulator) return false // Simulator glitches out on use of texture caches #else return true // Every iOS version and device that can run Swift can handle texture caches #endif } - public var maximumTextureSizeForThisDevice:GLint {get { return _maximumTextureSizeForThisDevice } } - private lazy var _maximumTextureSizeForThisDevice:GLint = { + public var maximumTextureSizeForThisDevice: GLint { get { return _maximumTextureSizeForThisDevice } } + private lazy var _maximumTextureSizeForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_SIZE) }() - public var maximumTextureUnitsForThisDevice:GLint {get { return _maximumTextureUnitsForThisDevice } } - private lazy var _maximumTextureUnitsForThisDevice:GLint = { + public var maximumTextureUnitsForThisDevice: GLint { get { return _maximumTextureUnitsForThisDevice } } + private lazy var _maximumTextureUnitsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_IMAGE_UNITS) }() - public var maximumVaryingVectorsForThisDevice:GLint {get { return _maximumVaryingVectorsForThisDevice } } - private lazy var _maximumVaryingVectorsForThisDevice:GLint = { + public var maximumVaryingVectorsForThisDevice: GLint { get { return _maximumVaryingVectorsForThisDevice } } + private lazy var _maximumVaryingVectorsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_VARYING_VECTORS) }() - lazy var extensionString:String = { - return self.runOperationSynchronously{ + lazy var extensionString: String = { + return self.runOperationSynchronously { self.makeCurrentContext() - return String(cString:unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to:UnsafePointer.self)) + return String(cString: unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to: UnsafePointer.self)) } }() + + public func debugResetOnePassRenderingInfo() { + _debugPipelineOnePassRenderInfo.removeAll() + } + + public func debugAppendRenderingInfo(_ info: String) { + _debugPipelineOnePassRenderInfo.append(info) + } } diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 3c6aedf9..5636dc45 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -1,32 +1,84 @@ import OpenGLES import UIKit +public enum PictureInputError: Error, CustomStringConvertible { + case zeroSizedImageError + case dataProviderNilError + case noSuchImageError(imageName: String) + case createImageError + + public var errorDescription: String { + switch self { + case .zeroSizedImageError: + return "Tried to pass in a zero-sized image" + case .dataProviderNilError: + return "Unable to retrieve image dataProvider" + case .noSuchImageError(let imageName): + return "No such image named: \(imageName) in your application bundle" + case .createImageError: + return "Fail to create image" + } + } + + public var description: String { + return "<\(type(of: self)): errorDescription = \(self.errorDescription)>" + } +} + public class PictureInput: ImageSource { public let targets = TargetContainer() - var imageFramebuffer:Framebuffer! - var hasProcessedImage:Bool = false + public private(set) var imageFramebuffer: Framebuffer? + public var framebufferUserInfo: [AnyHashable: Any]? + public let imageName: String + var hasProcessedImage = false + #if DEBUG + public var printDebugRenderInfos = true + public var debugRenderInfo: String = "" + #endif - public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - // TODO: Dispatch this whole thing asynchronously to move image loading off main thread + public init( + image: CGImage, + imageName: String? = nil, + smoothlyScaleOutput: Bool = false, + orientation: ImageOrientation = .portrait, + preprocessRenderInfo: String = "") throws { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + debugRenderInfo = """ +\(preprocessRenderInfo) +{ + PictureInput: { + input: \(image.width)x\(image.height), input_type: CGImage, + output: { size: \(imageFramebuffer?.debugRenderInfo ?? "") }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + } + #endif + + self.imageName = imageName ?? "CGImage" + let widthOfImage = GLint(image.width) let heightOfImage = GLint(image.height) // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. - guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } - + guard (widthOfImage > 0) && (heightOfImage > 0) else { throw PictureInputError.zeroSizedImageError } + var widthToUseForTexture = widthOfImage var heightToUseForTexture = heightOfImage var shouldRedrawUsingCoreGraphics = false // For now, deal with images larger than the maximum texture size by resizing to be within that limit - let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width:Float(widthOfImage), height:Float(heightOfImage)))) - if ((scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage)) { + let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width: Float(widthOfImage), height: Float(heightOfImage)))) + if (scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage) { widthToUseForTexture = scaledImageSizeToFitOnGPU.width heightToUseForTexture = scaledImageSizeToFitOnGPU.height shouldRedrawUsingCoreGraphics = true } - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill let powerClosestToWidth = ceil(log2(Float(widthToUseForTexture))) let powerClosestToHeight = ceil(log2(Float(heightToUseForTexture))) @@ -36,34 +88,33 @@ public class PictureInput: ImageSource { shouldRedrawUsingCoreGraphics = true } - var imageData:UnsafeMutablePointer! - var dataFromImageDataProvider:CFData! + var imageData: UnsafeMutablePointer! + var dataFromImageDataProvider: CFData! var format = GL_BGRA - if (!shouldRedrawUsingCoreGraphics) { + if !shouldRedrawUsingCoreGraphics { /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to - * tell GL about the memory layout with GLES. - */ - if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) - { + * tell GL about the memory layout with GLES. + */ + if (image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8) { shouldRedrawUsingCoreGraphics = true } else { /* Check that the bitmap pixel format is compatible with GL */ let bitmapInfo = image.bitmapInfo - if (bitmapInfo.contains(.floatComponents)) { + if bitmapInfo.contains(.floatComponents) { /* We don't support float components for use directly in GL */ shouldRedrawUsingCoreGraphics = true } else { - let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) - if (bitmapInfo.contains(.byteOrder32Little)) { + let alphaInfo = CGImageAlphaInfo(rawValue: bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) + if bitmapInfo.contains(.byteOrder32Little) { /* Little endian, for alpha-first we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { - shouldRedrawUsingCoreGraphics = true + if (alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst) { + shouldRedrawUsingCoreGraphics = true } - } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { + } else if (bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big)) { /* Big endian, for alpha-last we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { - shouldRedrawUsingCoreGraphics = true + if (alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast) { + shouldRedrawUsingCoreGraphics = true } else { /* Can access directly using GL_RGBA pixel format */ format = GL_RGBA @@ -73,76 +124,172 @@ public class PictureInput: ImageSource { } } - // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); - - if (shouldRedrawUsingCoreGraphics) { - // For resized or incompatible image: redraw - imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) - - let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() - - let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) - // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html - imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) - } else { - // Access the raw image bytes directly - dataFromImageDataProvider = image.dataProvider?.data - imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)) - } - - sharedImageProcessingContext.runOperationSynchronously{ - do { - // TODO: Alter orientation based on metadata from photo - self.imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) - } catch { - fatalError("ERROR: Unable to initialize framebuffer of size (\(widthToUseForTexture), \(heightToUseForTexture)) with error: \(error)") + try sharedImageProcessingContext.runOperationSynchronously { + if shouldRedrawUsingCoreGraphics { + // For resized or incompatible image: redraw + imageData = UnsafeMutablePointer.allocate(capacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) + + let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() + + let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) + // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html + imageContext?.draw(image, in: CGRect(x: 0.0, y: 0.0, width: CGFloat(widthToUseForTexture), height: CGFloat(heightToUseForTexture))) + } else { + // Access the raw image bytes directly + guard let data = image.dataProvider?.data else { throw PictureInputError.dataProviderNilError } + dataFromImageDataProvider = data + imageData = UnsafeMutablePointer(mutating: CFDataGetBytePtr(dataFromImageDataProvider)) } - glBindTexture(GLenum(GL_TEXTURE_2D), self.imageFramebuffer.texture) - if (smoothlyScaleOutput) { + // TODO: Alter orientation based on metadata from photo + self.imageFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: orientation, size: GLSize(width: widthToUseForTexture, height: heightToUseForTexture), textureOnly: true) + self.imageFramebuffer!.lock() + + glBindTexture(GLenum(GL_TEXTURE_2D), self.imageFramebuffer!.texture) + if smoothlyScaleOutput { glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR_MIPMAP_LINEAR) } glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, widthToUseForTexture, heightToUseForTexture, 0, GLenum(format), GLenum(GL_UNSIGNED_BYTE), imageData) - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { glGenerateMipmap(GLenum(GL_TEXTURE_2D)) } glBindTexture(GLenum(GL_TEXTURE_2D), 0) } - if (shouldRedrawUsingCoreGraphics) { - imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + if shouldRedrawUsingCoreGraphics { + imageData.deallocate() } + } - public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(image: UIImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { + try self.init(image: image.cgImage!, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation ?? image.imageOrientation.gpuOrientation) } - - public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - guard let image = UIImage(named:imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } - self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + + public convenience init(imageName: String, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { + guard let image = UIImage(named: imageName) else { throw PictureInputError.noSuchImageError(imageName: imageName) } + try self.init(image: image.cgImage!, imageName: imageName, smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation ?? image.imageOrientation.gpuOrientation) } - - public func processImage(synchronously:Bool = false) { + + public convenience init(image: UIImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil, processSteps: [PictureInputProcessStep]? = nil) throws { + #if DEBUG + let startTime = CACurrentMediaTime() + #endif + var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation + var croppedCGImage: CGImage? + if let processSteps = processSteps, !processSteps.isEmpty { + try autoreleasepool { + // Get CIImage with orientation + let ciImage: CIImage? + if let associatedCIImage = image.ciImage { + ciImage = associatedCIImage + } else { + ciImage = CIImage(image: image, options: [ + .applyOrientationProperty: true, + .properties: [ + kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue + ] + ]) + } + guard let newCgImage = ciImage?.processed(with: processSteps).renderToCGImage(onGPU: false) else { + throw PictureInputError.createImageError + } + croppedCGImage = newCgImage + targetOrientation = orientation ?? .portrait + } + } else if image.imageOrientation != .up, + let ciImage = CIImage(image: image, + options: [.applyOrientationProperty: true, + .properties: [ kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue ]]), + let rotatedImage = ciImage.renderToCGImage(onGPU: false) { + // Rotated correct orientation + croppedCGImage = rotatedImage + } else { + croppedCGImage = image.cgImage! + } + guard let cgImage = croppedCGImage else { + throw PictureInputError.createImageError + } + + let preprocessRenderInfo: String + #if DEBUG + preprocessRenderInfo = """ +{ + PictureInput_pre_process : { + input: { + size: \(image.size.debugRenderInfo), type: UIImage, processSteps: \(String(describing: processSteps)) + }, + output: { size: \(cgImage.width)x\(cgImage.height), type: CGImage }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms +}, +""" + #else + preprocessRenderInfo = "" + #endif + + try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation, preprocessRenderInfo: preprocessRenderInfo) + } + + deinit { + // debugPrint("Deallocating operation: \(self)") + + self.imageFramebuffer?.unlock() + } + + public func processImage(synchronously: Bool = false) { + self.imageFramebuffer?.userInfo = self.framebufferUserInfo + if synchronously { - sharedImageProcessingContext.runOperationSynchronously{ - self.updateTargetsWithFramebuffer(self.imageFramebuffer) - self.hasProcessedImage = true + sharedImageProcessingContext.runOperationSynchronously { + if let framebuffer = self.imageFramebuffer { + self.updateTargetsWithFramebuffer(framebuffer) + self.hasProcessedImage = true + } + #if DEBUG + if self.printDebugRenderInfos { + debugPrint(self.debugGetOnePassRenderInfos()) + } + #endif } } else { - sharedImageProcessingContext.runOperationAsynchronously{ - self.updateTargetsWithFramebuffer(self.imageFramebuffer) - self.hasProcessedImage = true + sharedImageProcessingContext.runOperationAsynchronously { + if let framebuffer = self.imageFramebuffer { + self.updateTargetsWithFramebuffer(framebuffer) + self.hasProcessedImage = true + } + #if DEBUG + if self.printDebugRenderInfos { + debugPrint(self.debugGetOnePassRenderInfos()) + } + #endif } } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { - if hasProcessedImage { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { + // This gets called after the pipline gets adjusted and needs an image it + // Disabled so we can adjust/prepare the pipline freely without worrying an old framebuffer will get pushed through it + // If after changing the pipline you need the prior frame buffer to be reprocessed, call processImage() again. + /*if hasProcessedImage { imageFramebuffer.lock() target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) + }*/ + } +} + +public extension CGSize { + func rotatedByOrientation(_ imageOrientation: ImageOrientation) -> CGSize { + switch imageOrientation { + case .portrait, .portraitUpsideDown: + return self + case .landscapeLeft, .landscapeRight: + return CGSize(width: height, height: width) } } + + #if DEBUG + var debugRenderInfo: String { "\(width)x\(height)" } + #endif } diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 6e434bf6..4974219f 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -7,30 +7,39 @@ public enum PictureFileFormat { } public class PictureOutput: ImageConsumer { - public var encodedImageAvailableCallback:((Data) -> ())? - public var encodedImageFormat:PictureFileFormat = .png - public var imageAvailableCallback:((UIImage) -> ())? - public var onlyCaptureNextFrame:Bool = true - public var keepImageAroundForSynchronousCapture:Bool = false - var storedFramebuffer:Framebuffer? + public var encodedImageAvailableCallback: ((Data) -> Void)? + public var encodedImageFormat: PictureFileFormat = .png + public var encodedJPEGImageCompressionQuality: CGFloat = 0.8 + public var imageAvailableCallback: ((UIImage) -> Void)? + public var cgImageAvailableCallback: ((CGImage) -> Void)? + public var onlyCaptureNextFrame = true + public var keepImageAroundForSynchronousCapture = false + public var exportWithAlpha = false + var storedFramebuffer: Framebuffer? public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - var url:URL! + public let maximumInputs: UInt = 1 + var url: URL! + + #if DEBUG + public var debugRenderInfo: String = "" + #endif public init() { + debugPrint("PictureOutput init") } deinit { + debugPrint("PictureOutput deinit") } - public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url: URL, format: PictureFileFormat) { onlyCaptureNextFrame = true encodedImageFormat = format self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured encodedImageAvailableCallback = {imageData in do { - try imageData.write(to: self.url, options:.atomic) + try imageData.write(to: self.url, options: .atomic) } catch { // TODO: Handle this better print("WARNING: Couldn't save image with error:\(error)") @@ -39,34 +48,59 @@ public class PictureOutput: ImageConsumer { } // TODO: Replace with texture caches - func cgImageFromFramebuffer(_ framebuffer:Framebuffer) -> CGImage { - let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) + func cgImageFromFramebuffer(_ framebuffer: Framebuffer) -> CGImage { + let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: framebuffer.orientation, size: framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.red) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) let data = UnsafeMutablePointer.allocate(capacity: imageByteSize) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) renderFramebuffer.unlock() - guard let dataProvider = CGDataProvider(dataInfo:nil, data:data, size:imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not allocate a CGDataProvider")} + guard let dataProvider = CGDataProvider(dataInfo: nil, data: data, size: imageByteSize, releaseData: dataProviderReleaseCallback) else { fatalError("Could not allocate a CGDataProvider") } let defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB() - return CGImage(width:Int(framebuffer.size.width), height:Int(framebuffer.size.height), bitsPerComponent:8, bitsPerPixel:32, bytesPerRow:4 * Int(framebuffer.size.width), space:defaultRGBColorSpace, bitmapInfo:CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider:dataProvider, decode:nil, shouldInterpolate:false, intent:.defaultIntent)! + let bitmapInfo = exportWithAlpha ? CGBitmapInfo(rawValue: CGImageAlphaInfo.last.rawValue) : CGBitmapInfo() + return CGImage(width: Int(framebuffer.size.width), height: Int(framebuffer.size.height), bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: 4 * Int(framebuffer.size.width), space: defaultRGBColorSpace, bitmapInfo: bitmapInfo, provider: dataProvider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)! } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + debugRenderInfo = """ +{ + PictureOutput: { + input: \(framebuffer.debugRenderInfo), + output: { type: ImageOutput, time: \((CACurrentMediaTime() - startTime) * 1000.0)ms } + } +}, +""" + } + #endif + if keepImageAroundForSynchronousCapture { storedFramebuffer?.unlock() storedFramebuffer = framebuffer } + if let imageCallback = cgImageAvailableCallback { + let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) + + imageCallback(cgImageFromBytes) + + if onlyCaptureNextFrame { + cgImageAvailableCallback = nil + } + } + if let imageCallback = imageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) // TODO: Let people specify orientations - let image = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) + let image = UIImage(cgImage: cgImageFromBytes, scale: 1.0, orientation: .up) imageCallback(image) @@ -77,11 +111,11 @@ public class PictureOutput: ImageConsumer { if let imageCallback = encodedImageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) - let image = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) - let imageData:Data + let image = UIImage(cgImage: cgImageFromBytes, scale: 1.0, orientation: .up) + let imageData: Data switch encodedImageFormat { - case .png: imageData = UIImagePNGRepresentation(image)! // TODO: Better error handling here - case .jpeg: imageData = UIImageJPEGRepresentation(image, 0.8)! // TODO: Be able to set image quality + case .png: imageData = image.pngData()! // TODO: Better error handling here + case .jpeg: imageData = image.jpegData(compressionQuality: encodedJPEGImageCompressionQuality)! } imageCallback(imageData) @@ -93,12 +127,12 @@ public class PictureOutput: ImageConsumer { } public func synchronousImageCapture() -> UIImage { - var outputImage:UIImage! - sharedImageProcessingContext.runOperationSynchronously{ + var outputImage: UIImage! + sharedImageProcessingContext.runOperationSynchronously { guard let currentFramebuffer = storedFramebuffer else { fatalError("Synchronous access requires keepImageAroundForSynchronousCapture to be set to true") } let cgImageFromBytes = cgImageFromFramebuffer(currentFramebuffer) - outputImage = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) + outputImage = UIImage(cgImage: cgImageFromBytes, scale: 1.0, orientation: .up) } return outputImage @@ -106,35 +140,35 @@ public class PictureOutput: ImageConsumer { } public extension ImageSource { - public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { + func saveNextFrameToURL(_ url: URL, format: PictureFileFormat) { let pictureOutput = PictureOutput() - pictureOutput.saveNextFrameToURL(url, format:format) + pictureOutput.saveNextFrameToURL(url, format: format) self --> pictureOutput } } public extension UIImage { - public func filterWithOperation(_ operation:T) -> UIImage { - return filterWithPipeline{input, output in + func filterWithOperation(_ operation: T) throws -> UIImage { + return try filterWithPipeline {input, output in input --> operation --> output } } - public func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) -> UIImage { - let picture = PictureInput(image:self) - var outputImage:UIImage? + func filterWithPipeline(_ pipeline: (PictureInput, PictureOutput) -> Void) throws -> UIImage { + let picture = try PictureInput(image: self) + var outputImage: UIImage? let pictureOutput = PictureOutput() pictureOutput.onlyCaptureNextFrame = true pictureOutput.imageAvailableCallback = {image in outputImage = image } pipeline(picture, pictureOutput) - picture.processImage(synchronously:true) + picture.processImage(synchronously: true) return outputImage! } } // Why are these flipped in the callback definition? -func dataProviderReleaseCallback(_ context:UnsafeMutableRawPointer?, data:UnsafeRawPointer, size:Int) { - data.deallocate(bytes:size, alignedTo:1) +func dataProviderReleaseCallback(_ context: UnsafeMutableRawPointer?, data: UnsafeRawPointer, size: Int) { + data.deallocate() } diff --git a/framework/Source/iOS/PictureProcessor.swift b/framework/Source/iOS/PictureProcessor.swift new file mode 100644 index 00000000..fe7c2b39 --- /dev/null +++ b/framework/Source/iOS/PictureProcessor.swift @@ -0,0 +1,181 @@ +// +// PictureProcessor.swift +// GPUImage2 +// +// Created by 陈品霖 on 2021/5/8. +// + +import Foundation + +/// Operation on input image, which will be translated into CIImage opereation +public enum PictureInputProcessStep { + public enum AnchorPoint { + // Default anchor point for CIImage + case originPoint + // CIImage.extent.center as anchor point + case extentCenter + // Custom anchor point + case custom(point: CGPoint) + } + /// Scale + case scale(x: CGFloat, y: CGFloat, anchorPoint: AnchorPoint) + /// Crop to rect. Rect values are from [0, 1] and base on the latest extend rect of the image after previous steps. + /// **isViewCoordinate** is true indicates zero point is Left-Top corner, false indicates zero point is Left-Bottom corner. + case crop(rect: CGRect, isViewCoordinate: Bool) + /// Rotate image by angle (unit: radian) + case rotation(angle: CGFloat, anchorPoint: AnchorPoint) + /// Remember the original extent rect, rotate image by angle (unit: radian), scale by ratio, then crop to original extent rect + case rotateScaleAndKeepRect(angle: CGFloat, scale: CGFloat, anchorPoint: AnchorPoint) + /// Scale and crop to match target size ratio + case resizeAspectRatio(size: CGSize, isFill: Bool, allowUpScale: Bool) +} + +extension CIImage { + /// Shared CIContext to improve performance + static var ciGPUContext = CIContext(eaglContext: sharedImageProcessingContext.context) + static var ciCPUContext = CIContext() + + public func processed(with processSteps: [PictureInputProcessStep]?) -> CIImage { + guard let processSteps = processSteps, !processSteps.isEmpty else { return self } + var newImage = self + for step in processSteps { + switch step { + case let .scale(x, y, anchorPoint): + guard x != 1.0 || y != 1.0 else { continue } + newImage = newImage.processedWithAnchorPoint(anchorPoint) { + let transform = CGAffineTransform(scaleX: x, y: y) + return $0.accurateTransformed(by: transform) + } + case let .crop(rect, isViewCoordinate): + guard rect.origin != .zero || rect.size != CGSize(width: 1.0, height: 1.0) else { continue } + // rasterized: [0, 1] -> [0, width/height] + let adjustedY: CGFloat = isViewCoordinate ? (1.0 - rect.maxY) : rect.minY + let rasterizedRect = CGRect(x: rect.minX * newImage.accurateExtent.size.width + newImage.accurateExtent.minX, + y: adjustedY * newImage.accurateExtent.size.height + newImage.accurateExtent.minY, + width: rect.size.width * newImage.accurateExtent.size.width, + height: rect.size.height * newImage.accurateExtent.size.height).rounded() + newImage = newImage.accurateCropped(to: rasterizedRect) + case let .rotation(angle, anchorPoint): + guard angle != 0 else { continue } + newImage = newImage.processedWithAnchorPoint(anchorPoint) { + let transform = CGAffineTransform(rotationAngle: angle) + return $0.accurateTransformed(by: transform) + } + case let .rotateScaleAndKeepRect(angle, scale, anchorPoint): + guard angle != 0 || scale != 0 else { continue } + let originExtent = newImage.accurateExtent + newImage = newImage.processedWithAnchorPoint(anchorPoint) { + let transform = CGAffineTransform(rotationAngle: angle).scaledBy(x: scale, y: scale) + return $0.accurateTransformed(by: transform) + } + newImage = newImage.accurateCropped(to: originExtent) + case let .resizeAspectRatio(size, isFill, allowUpScale): + guard size != newImage.accurateExtent.size && size != .zero else { continue } + // Crop to target size ratio, always use center point as anchor point when cropping + let targetRect = CGRect(x: newImage.accurateExtent.midX - size.width / 2, y: newImage.accurateExtent.midY - size.height / 2, width: size.width, height: size.height) + var roundedCroppedUnscaleFrame: CGRect + // NOTE: this operation needs reverse thinking. Fill: target rect fits original rect. Fit: target rect fill original rect. + if isFill { + roundedCroppedUnscaleFrame = targetRect.fitRect(inside: newImage.accurateExtent).rounded() + } else { + roundedCroppedUnscaleFrame = targetRect.aspectToFill(insideRect: newImage.accurateExtent).rounded() + } + newImage = newImage.accurateCropped(to: roundedCroppedUnscaleFrame) + // Scale to target size if needed + let scaleRatio = size.width / roundedCroppedUnscaleFrame.width + if scaleRatio < 1 || allowUpScale { + newImage = newImage.accurateTransformed(by: .init(scaleX: scaleRatio, y: scaleRatio)) + } + } + } + return newImage + } + + func processedWithAnchorPoint(_ anchorPoint: PictureInputProcessStep.AnchorPoint, processes: (CIImage) -> CIImage) -> CIImage { + switch anchorPoint { + case .originPoint: + // Do nothing since it is how CIImage works + return self + case .extentCenter: + let center = CGPoint(x: accurateExtent.midX, y: accurateExtent.midY) + let anchoredImage = accurateTransformed(by: CGAffineTransform(translationX: -center.x, y: -center.y)) + let processedImage = processes(anchoredImage) + let anchoreResetImage = processedImage.accurateTransformed(by: CGAffineTransform(translationX: center.x, y: center.y)) + return anchoreResetImage + case let .custom(point): + let anchoredImage = accurateTransformed(by: CGAffineTransform(translationX: -point.x, y: -point.y)) + let processedImage = processes(anchoredImage) + let anchoreResetImage = processedImage.accurateTransformed(by: CGAffineTransform(translationX: point.x, y: point.y)) + return anchoreResetImage + } + } + + func accurateTransformed(by transform: CGAffineTransform, rounded: Bool = true) -> CIImage { + let transformedRect = accurateExtent.applying(transform) + let transformedImage: CIImage + if rounded && transformedRect.rounded() != transformedRect { + let sizeRoundedTransform = transform.scaledBy(x: transformedRect.rounded().width / transformedRect.width, y: transformedRect.rounded().height / transformedRect.height) + let sizeRoundedRect = accurateExtent.applying(sizeRoundedTransform) + let positionRoundedRect = sizeRoundedRect.rounded(.towardZero) + let positionRoundedTransform = sizeRoundedTransform.translatedBy(x: positionRoundedRect.minX - sizeRoundedRect.minX, + y: positionRoundedRect.minY - sizeRoundedRect.minY) + transformedImage = transformed(by: positionRoundedTransform) + transformedImage.accurateExtent = accurateExtent.applying(positionRoundedTransform) + } else { + transformedImage = transformed(by: transform) + transformedImage.accurateExtent = transformedRect + } + return transformedImage + } + + func accurateCropped(to rect: CGRect) -> CIImage { + let croppedImage = cropped(to: rect) + croppedImage.accurateExtent = croppedImage.extent + return croppedImage + } + + func renderToCGImage(onGPU: Bool) -> CGImage? { + return (onGPU ? Self.ciGPUContext : Self.ciCPUContext).createCGImage(self, from: accurateExtent.rounded(.towardZero)) + } + + private static var _accurateExtentKey = 0 + + // NOTE: CIImage.extend will sometimes return an integral rect, so if we want the accurate rect after transforming, we need to apply transform on the original rect + var accurateExtent: CGRect { + get { (objc_getAssociatedObject(self, &Self._accurateExtentKey) as? NSValue)?.cgRectValue ?? extent } + set { objc_setAssociatedObject(self, &Self._accurateExtentKey, NSValue(cgRect: newValue), .OBJC_ASSOCIATION_RETAIN_NONATOMIC) } + } + + // Return the original rect if every number is integral, or it will thrink by 1 point in border + var trimmedExtent: CGRect { + let accurateExtent = accurateExtent + if accurateExtent.integral != accurateExtent { + return accurateExtent.rounded(.up).insetBy(dx: 1, dy: 1) + } else { + return accurateExtent + } + } +} + +extension CGRect { + func fitRect(inside rect: CGRect) -> CGRect { + let scale = min(rect.width / width, rect.height / height) + let scaledSize = size.applying(CGAffineTransform(scaleX: scale, y: scale)) + let fitX = (rect.width - scaledSize.width) / 2 + rect.minX + let fitY = (rect.height - scaledSize.height) / 2 + rect.minY + return CGRect(origin: CGPoint(x: fitX, y: fitY), size: scaledSize) + } + + func aspectToFill(insideRect boundingRect: CGRect) -> CGRect { + let widthScale = boundingRect.width / width + let heightScale = boundingRect.height / height + let scale = max(widthScale, heightScale) + var newRect = applying(CGAffineTransform(scaleX: scale, y: scale)) + newRect.origin = CGPoint(x: boundingRect.midX - newRect.size.width / 2, y: boundingRect.midY - newRect.size.height / 2) + return newRect + } + + func rounded(_ rule: FloatingPointRoundingRule = .toNearestOrAwayFromZero) -> CGRect { + return CGRect(x: minX.rounded(rule), y: minY.rounded(rule), width: size.width.rounded(rule), height: size.height.rounded(rule)) + } +} diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 3bc4f382..71743496 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -1,97 +1,182 @@ import UIKit +public protocol RenderViewDelegate: AnyObject { + func willDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) + func didDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) + // Only use this if you need to do layout in willDisplayFramebuffer before the framebuffer actually gets displayed + // Typically should only be used for one frame otherwise will cause serious playback issues + // When true the above delegate methods will be called from the main thread instead of the sharedImageProcessing que + // Default is false + func shouldDisplayNextFramebufferAfterMainThreadLoop() -> Bool +} + // TODO: Add support for transparency -// TODO: Deal with view resizing -public class RenderView:UIView, ImageConsumer { +public class RenderView: UIView, ImageConsumer { + public weak var delegate: RenderViewDelegate? + public var backgroundRenderColor = Color.black public var fillMode = FillMode.preserveAspectRatio - public var orientation:ImageOrientation = .portrait - public var sizeInPixels:Size { get { return Size(width:Float(frame.size.width * contentScaleFactor), height:Float(frame.size.height * contentScaleFactor))}} + public var orientation: ImageOrientation = .portrait + public var cropFrame: CGRect? + public var sizeInPixels: Size { Size(width: Float(frame.size.width * contentScaleFactor), height: Float(frame.size.height * contentScaleFactor)) } public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - var displayFramebuffer:GLuint? - var displayRenderbuffer:GLuint? - var backingSize = GLSize(width:0, height:0) + public let maximumInputs: UInt = 1 + var displayFramebuffer: GLuint? + var displayRenderbuffer: GLuint? + var backingSize = GLSize(width: 0, height: 0) + var renderSize = CGSize.zero + private var isAppForeground = true - private lazy var displayShader:ShaderProgram = { + private lazy var displayShader: ShaderProgram = { return sharedImageProcessingContext.passthroughShader }() - - // TODO: Need to set viewport to appropriate size, resize viewport on view reshape - required public init?(coder:NSCoder) { - super.init(coder:coder) + private var internalLayer: CAEAGLLayer! + #if DEBUG + public var debugRenderInfo: String = "" + #endif + + required public init?(coder: NSCoder) { + super.init(coder: coder) self.commonInit() } - - public override init(frame:CGRect) { - super.init(frame:frame) + + public override init(frame: CGRect) { + super.init(frame: frame) self.commonInit() } - - override public class var layerClass:Swift.AnyClass { + + override public class var layerClass: Swift.AnyClass { get { return CAEAGLLayer.self } } + override public var bounds: CGRect { + didSet { + // Check if the size changed + updateAsSizeChange(oldSize: oldValue.size, newSize: self.bounds.size) + } + } + + override public var frame: CGRect { + didSet { + // Check if the size changed + updateAsSizeChange(oldSize: oldValue.size, newSize: self.frame.size) + } + } + func commonInit() { self.contentScaleFactor = UIScreen.main.scale let eaglLayer = self.layer as! CAEAGLLayer eaglLayer.isOpaque = true - eaglLayer.drawableProperties = [NSNumber(value:false): kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8: kEAGLDrawablePropertyColorFormat] + eaglLayer.drawableProperties = [kEAGLDrawablePropertyRetainedBacking: NSNumber(value: false), kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8] + eaglLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill // Just for safety to prevent distortion + + NotificationCenter.default.addObserver(forName: UIApplication.didBecomeActiveNotification, object: nil, queue: .main) { [weak self] _ in + self?.isAppForeground = true + } + NotificationCenter.default.addObserver(forName: UIApplication.didEnterBackgroundNotification, object: nil, queue: .main) { [weak self] _ in + self?.isAppForeground = false + } + + self.internalLayer = eaglLayer + + self.renderSize = bounds.size } deinit { - destroyDisplayFramebuffer() + debugPrint("RenderView deinit") + let strongDisplayFramebuffer = displayFramebuffer + let strongDisplayRenderbuffer = displayRenderbuffer + sharedImageProcessingContext.runOperationAsynchronously { + if let displayFramebuffer = strongDisplayFramebuffer { + var temporaryFramebuffer = displayFramebuffer + glDeleteFramebuffers(1, &temporaryFramebuffer) + } + if let displayRenderbuffer = strongDisplayRenderbuffer { + var temporaryRenderbuffer = displayRenderbuffer + glDeleteRenderbuffers(1, &temporaryRenderbuffer) + } + } } - func createDisplayFramebuffer() { - var newDisplayFramebuffer:GLuint = 0 + func createDisplayFramebuffer() -> Bool { + // Fix crash when calling OpenGL when app is not foreground + guard isAppForeground else { return false } + + var newDisplayFramebuffer: GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) displayFramebuffer = newDisplayFramebuffer glBindFramebuffer(GLenum(GL_FRAMEBUFFER), displayFramebuffer!) - - var newDisplayRenderbuffer:GLuint = 0 + + var newDisplayRenderbuffer: GLuint = 0 glGenRenderbuffers(1, &newDisplayRenderbuffer) displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) - - sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.layer as! CAEAGLLayer) - - var backingWidth:GLint = 0 - var backingHeight:GLint = 0 + + // Without the flush you will occasionally get a warning from UIKit and when that happens the RenderView just stays black. + // "CoreAnimation: [EAGLContext renderbufferStorage:fromDrawable:] was called from a non-main thread in an implicit transaction! + // Note that this may be unsafe without an explicit CATransaction or a call to [CATransaction flush]." + // I tried a transaction and that doesn't work and this is probably why --> http://danielkbx.com/post/108060601989/catransaction-flush + // Using flush is important because it guarantees the view is layed out at the correct size before it is drawn to since this is being done on a background thread. + // Its possible the size of the view was changed right before we got here and would result in us drawing to the view at the old size + // and then the view size would change to the new size at the next layout pass and distort our already drawn image. + // Since we do not call this function often we do not need to worry about the performance impact of calling flush. + CATransaction.flush() + sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from: self.internalLayer) + + var backingWidth: GLint = 0 + var backingHeight: GLint = 0 glGetRenderbufferParameteriv(GLenum(GL_RENDERBUFFER), GLenum(GL_RENDERBUFFER_WIDTH), &backingWidth) glGetRenderbufferParameteriv(GLenum(GL_RENDERBUFFER), GLenum(GL_RENDERBUFFER_HEIGHT), &backingHeight) - backingSize = GLSize(width:backingWidth, height:backingHeight) + backingSize = GLSize(width: backingWidth, height: backingHeight) - guard ((backingWidth > 0) && (backingHeight > 0)) else { - fatalError("View had a zero size") + guard backingWidth > 0 && backingHeight > 0 else { + print("WARNING: View had a zero size") + + if self.internalLayer.bounds.width > 0 && self.internalLayer.bounds.height > 0 { + print("WARNING: View size \(self.internalLayer.bounds) may be too large ") + } + return false } - + glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_RENDERBUFFER), displayRenderbuffer!) let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) - if (status != GLenum(GL_FRAMEBUFFER_COMPLETE)) { - fatalError("Display framebuffer creation failed with error: \(FramebufferCreationError(errorCode:status))") + if status != GLenum(GL_FRAMEBUFFER_COMPLETE) { + print("WARNING: Display framebuffer creation failed with error: \(FramebufferCreationError(errorCode: status))") + return false + } + + return true + } + + func updateAsSizeChange(oldSize: CGSize, newSize: CGSize) { + if oldSize == newSize { return } + + sharedImageProcessingContext.runOperationAsynchronously { + self.updateRenderSize(newSize: newSize) + self.destroyDisplayFramebuffer() } } + func updateRenderSize(newSize: CGSize) { + self.renderSize = newSize + } + func destroyDisplayFramebuffer() { - sharedImageProcessingContext.runOperationSynchronously{ - if let displayFramebuffer = self.displayFramebuffer { - var temporaryFramebuffer = displayFramebuffer - glDeleteFramebuffers(1, &temporaryFramebuffer) - self.displayFramebuffer = nil - } - - if let displayRenderbuffer = self.displayRenderbuffer { - var temporaryRenderbuffer = displayRenderbuffer - glDeleteRenderbuffers(1, &temporaryRenderbuffer) - self.displayRenderbuffer = nil - } + if let displayFramebuffer = self.displayFramebuffer { + var temporaryFramebuffer = displayFramebuffer + glDeleteFramebuffers(1, &temporaryFramebuffer) + self.displayFramebuffer = nil + } + if let displayRenderbuffer = self.displayRenderbuffer { + var temporaryRenderbuffer = displayRenderbuffer + glDeleteRenderbuffers(1, &temporaryRenderbuffer) + self.displayRenderbuffer = nil } } @@ -100,19 +185,96 @@ public class RenderView:UIView, ImageConsumer { glViewport(0, 0, backingSize.width, backingSize.height) } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - if (displayFramebuffer == nil) { - self.createDisplayFramebuffer() + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + let cleanup: () -> Void = { [weak self] in + guard let self = self else { return } + + if self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false { + DispatchQueue.main.async { + self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + framebuffer.unlock() + } + } else { + self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + framebuffer.unlock() + } } - self.activateDisplayFramebuffer() - clearFramebufferWithColor(backgroundRenderColor) - - let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(self.orientation), toFitSize:backingSize) - renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(self.orientation)]) - framebuffer.unlock() + let work: () -> Void = { [weak self] in + guard let self = self else { return } + + // Fix crash when calling OpenGL when app is not foreground + guard self.isAppForeground else { return } + + if self.displayFramebuffer == nil && !self.createDisplayFramebuffer() { + cleanup() + // Bail if we couldn't successfully create the displayFramebuffer + return + } + + #if DEBUG + let startTime = CACurrentMediaTime() + #endif + + self.activateDisplayFramebuffer() + + clearFramebufferWithColor(self.backgroundRenderColor) + + let inputTexture: InputTextureProperties + // RenderView will discard content outside cropFrame + // e.g.: renderView.bounds is (0, 0, 414, 805), the actual content size to be rendered is (420, 805) and will be rendered center aligned + // Instead of changing renderView.frame to (-3, 0, 420, 805), we can set cropFrame to (3, 0, 414, 805) + if let cropFrame = self.cropFrame, cropFrame != CGRect(origin: .zero, size: self.renderSize) { + let x: Float = max(0, Float(cropFrame.minX / self.renderSize.width)) + let y: Float = max(0, Float(cropFrame.minY / self.renderSize.height)) + let width: Float = max(0, min(Float(cropFrame.width / self.renderSize.width), 1)) + let height: Float = max(0, min(Float(cropFrame.height / self.renderSize.height), 1)) + inputTexture = InputTextureProperties(textureCoordinates: Rotation.noRotation.croppedTextureCoordinates(offsetFromOrigin: .init(x, y), cropSize: .init(width: width, height: height)), texture: framebuffer.texture) + } else { + inputTexture = framebuffer.texturePropertiesForTargetOrientation(self.orientation) + } + + let scaledVertices = self.fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize: framebuffer.sizeForTargetOrientation(self.orientation), toFitSize: self.backingSize) + renderQuadWithShader(self.displayShader, vertices: scaledVertices, inputTextures: [inputTexture]) + + glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.displayRenderbuffer!) + + sharedImageProcessingContext.presentBufferForDisplay() + + cleanup() + + #if DEBUG + self.debugRenderInfo = """ +{ + RenderView: { + input: \(framebuffer.debugRenderInfo), + output: { size: \(self.backingSize.debugRenderInfo), time: \((CACurrentMediaTime() - startTime) * 1000.0)ms } + } +}, +""" + #endif + } - glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) - sharedImageProcessingContext.presentBufferForDisplay() + if self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false { + // CAUTION: Never call sync from the sharedImageProcessingContext, it will cause cyclic thread deadlocks + // If you are curious, change this to sync, then try trimming/scrubbing a video + // Before that happens you will get a deadlock when someone calls runOperationSynchronously since the main thread is blocked + // There is a way to get around this but then the first thing mentioned will happen + DispatchQueue.main.async { + self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + + sharedImageProcessingContext.runOperationAsynchronously(work) + } + } else { + self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + + work() + } + } +} + +extension RenderView: DebugPipelineNameable { + public var debugNameForPipeline: String { + return "RenderView" } } diff --git a/framework/Source/iOS/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift new file mode 100644 index 00000000..23d1d7f8 --- /dev/null +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -0,0 +1,326 @@ +// +// SpeakerOutput.swift +// GPUImage +// +// Rewritten by Josh Bernfeld on 3/1/18 +// and originally created by Uzi Refaeli on 3/9/13. +// Copyright (c) 2018 Brad Larson. All rights reserved. +// + +import Foundation +import AudioToolbox +import AVFoundation + +public class SpeakerOutput: AudioEncodingTarget { + public var changesAudioSession = true + + public private(set) var isPlaying = false + + public var isMuted = false + + var hasBuffer = false + var isReadyForMoreMediaData = true { + willSet { + guard newValue else { return } + + // When we are ready to begin accepting new data check if we had something + // in the rescue buffer. If we did then move it to the main buffer. + self.copyRescueBufferContentsToCircularBuffer() + } + } + + var processingGraph: AUGraph? + var mixerUnit: AudioUnit? + + var firstBufferReached = false + + let outputBus: AudioUnitElement = 0 + let inputBus: AudioUnitElement = 1 + + let unitSize = UInt32(MemoryLayout.size) + let bufferUnit: UInt32 = 655360 + + var circularBuffer = TPCircularBuffer() + let circularBufferSize: UInt32 + + var rescueBuffer: UnsafeMutableRawPointer? + let rescueBufferSize: Int + var rescueBufferContentsSize: UInt32 = 0 + + public init() { + circularBufferSize = bufferUnit * unitSize + rescueBufferSize = Int(bufferUnit / 2) + } + + deinit { + if let processingGraph = processingGraph { + DisposeAUGraph(processingGraph) + } + if let rescueBuffer = rescueBuffer { + free(rescueBuffer) + } + TPCircularBufferCleanup(&circularBuffer) + + self.cancel() + } + + // MARK: - + // MARK: Playback control + + public func start() { + if isPlaying || processingGraph == nil { return } + + AUGraphStart(processingGraph!) + + isPlaying = true + } + + public func cancel() { + if !isPlaying || processingGraph == nil { return } + + AUGraphStop(processingGraph!) + + isPlaying = false + + rescueBufferContentsSize = 0 + TPCircularBufferClear(&circularBuffer) + hasBuffer = false + isReadyForMoreMediaData = true + } + + // MARK: - + // MARK: AudioEncodingTarget protocol + + public func activateAudioTrack() throws { + if changesAudioSession { + do { + try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.ambient) + try AVAudioSession.sharedInstance().setActive(true) + } catch { + print("ERROR: Unable to set audio session: \(error)") + } + } + + // Create a new AUGraph + NewAUGraph(&processingGraph) + + // AUNodes represent AudioUnits on the AUGraph and provide an + // easy means for connecting audioUnits together. + var outputNode = AUNode() + var mixerNode = AUNode() + + // Create AudioComponentDescriptions for the AUs we want in the graph mixer component + var mixerDesc = AudioComponentDescription() + mixerDesc.componentType = kAudioUnitType_Mixer + mixerDesc.componentSubType = kAudioUnitSubType_SpatialMixer + mixerDesc.componentFlags = 0 + mixerDesc.componentFlagsMask = 0 + mixerDesc.componentManufacturer = kAudioUnitManufacturer_Apple + + // Output component + var outputDesc = AudioComponentDescription() + outputDesc.componentType = kAudioUnitType_Output + outputDesc.componentSubType = kAudioUnitSubType_RemoteIO + outputDesc.componentFlags = 0 + outputDesc.componentFlagsMask = 0 + outputDesc.componentManufacturer = kAudioUnitManufacturer_Apple + + // Add nodes to the graph to hold our AudioUnits, + // You pass in a reference to the AudioComponentDescription + // and get back an AudioUnit + AUGraphAddNode(processingGraph!, &mixerDesc, &mixerNode) + AUGraphAddNode(processingGraph!, &outputDesc, &outputNode) + + // Now we can manage connections using nodes in the graph. + // Connect the mixer node's output to the output node's input + AUGraphConnectNodeInput(processingGraph!, mixerNode, 0, outputNode, 0) + + // Upon return from this function call, the audio units belonging to the graph are open but not initialized. Specifically, no resource allocation occurs. + AUGraphOpen(processingGraph!) + + // Get a link to the mixer AU so we can talk to it later + AUGraphNodeInfo(processingGraph!, mixerNode, nil, &mixerUnit) + + var elementCount: UInt32 = 1 + AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_ElementCount, kAudioUnitScope_Input, 0, &elementCount, UInt32(MemoryLayout.size)) + + // Set output callback, this is how audio sample data will be retrieved + var callbackStruct = AURenderCallbackStruct() + callbackStruct.inputProc = playbackCallback + callbackStruct.inputProcRefCon = bridgeObject(self) + AUGraphSetNodeInputCallback(processingGraph!, mixerNode, 0, &callbackStruct) + + // Describe the format, this will get adjusted when the first sample comes in. + var audioFormat = AudioStreamBasicDescription() + audioFormat.mFormatID = kAudioFormatLinearPCM + audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked + audioFormat.mSampleRate = 44100.0 + audioFormat.mReserved = 0 + + audioFormat.mBytesPerPacket = 2 + audioFormat.mFramesPerPacket = 1 + audioFormat.mBytesPerFrame = 2 + audioFormat.mChannelsPerFrame = 1 + audioFormat.mBitsPerChannel = 16 + + // Apply the format + AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, outputBus, &audioFormat, UInt32(MemoryLayout.size)) + + // Initialize the processing graph + AUGraphInitialize(processingGraph!) + + circularBuffer = TPCircularBuffer() + + // Initialize the circular buffer + _TPCircularBufferInit(&circularBuffer, circularBufferSize, MemoryLayout.size) + + hasBuffer = false + } + + public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { + defer { + if shouldInvalidateSampleWhenDone { + CMSampleBufferInvalidate(sampleBuffer) + } + } + + if !isReadyForMoreMediaData || !isPlaying { return } + + if !firstBufferReached { + firstBufferReached = true + // Get the format information of the sample + let desc = CMSampleBufferGetFormatDescription(sampleBuffer)! + let basicDesc = CMAudioFormatDescriptionGetStreamBasicDescription(desc)! + + var oSize = UInt32(MemoryLayout.size) + // Retrieve the existing set audio format + var audioFormat = AudioStreamBasicDescription() + AudioUnitGetProperty(mixerUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, outputBus, &audioFormat, &oSize) + + // Update the audio format with the information we have from the sample + audioFormat.mSampleRate = basicDesc.pointee.mSampleRate + + audioFormat.mBytesPerPacket = basicDesc.pointee.mBytesPerPacket + audioFormat.mFramesPerPacket = basicDesc.pointee.mFramesPerPacket + audioFormat.mBytesPerFrame = basicDesc.pointee.mBytesPerFrame + audioFormat.mChannelsPerFrame = basicDesc.pointee.mChannelsPerFrame + audioFormat.mBitsPerChannel = basicDesc.pointee.mBitsPerChannel + + // Apply the format + AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, outputBus, &audioFormat, UInt32(MemoryLayout.size)) + AUGraphUpdate(processingGraph!, nil) + } + + // Populate an AudioBufferList with the sample + var audioBufferList = AudioBufferList() + var blockBuffer: CMBlockBuffer? + CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, bufferListSizeNeededOut: nil, bufferListOut: &audioBufferList, bufferListSize: MemoryLayout.size, blockBufferAllocator: nil, blockBufferMemoryAllocator: nil, flags: kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, blockBufferOut: &blockBuffer) + + // This is actually doing audioBufferList.mBuffers[0] + // Since the struct has an array of length of 1 the compiler is interpreting + // it as a single item array and not letting us use the above line. + // Since the array pointer points to the first item of the c array + // and all we want is the first item this is equally fine. + let audioBuffer = audioBufferList.mBuffers + + // Place the AudioBufferList in the circular buffer + let sampleSize = UInt32(CMSampleBufferGetTotalSampleSize(sampleBuffer)) + let didCopyBytes = TPCircularBufferProduceBytes(&circularBuffer, audioBuffer.mData, sampleSize) + + // The circular buffer has not been proceseed quickly enough and has filled up. + // Disable reading any further samples and save this last buffer so we don't lose it. + if !didCopyBytes { + // print("TPCircularBuffer limit reached: \(sampleSize) Bytes") + + isReadyForMoreMediaData = false + + self.writeToRescueBuffer(audioBuffer.mData, sampleSize) + } else { + hasBuffer = true + } + } + + public func readyForNextAudioBuffer() -> Bool { + return isReadyForMoreMediaData + } + + // MARK: - + // MARK: Rescue buffer + + func writeToRescueBuffer(_ src: UnsafeRawPointer!, _ size: UInt32) { + if rescueBufferContentsSize > 0 { + print("WARNING: Writing to rescue buffer with contents already inside") + } + + if size > rescueBufferSize { + print("WARNING: Unable to allocate enought space for rescue buffer, dropping audio sample") + } else { + if rescueBuffer == nil { + rescueBuffer = malloc(rescueBufferSize) + } + + rescueBufferContentsSize = size + memcpy(rescueBuffer!, src, Int(size)) + } + } + + func copyRescueBufferContentsToCircularBuffer() { + if rescueBufferContentsSize > 0 { + let didCopyBytes = TPCircularBufferProduceBytes(&circularBuffer, rescueBuffer, rescueBufferContentsSize) + if !didCopyBytes { + print("WARNING: Unable to copy rescue buffer into main buffer, dropping audio sample") + } + rescueBufferContentsSize = 0 + } + } +} + +func playbackCallback( + inRefCon: UnsafeMutableRawPointer, + ioActionFlags: UnsafeMutablePointer, + inTimeStamp: UnsafePointer, + inBusNumber: UInt32, + inNumberFrames: UInt32, + ioData: UnsafeMutablePointer?) -> OSStatus { + let audioBuffer = ioData!.pointee.mBuffers + let numberOfChannels = audioBuffer.mNumberChannels + let outSamples = audioBuffer.mData + + // Zero-out all of the output samples first + memset(outSamples, 0, Int(audioBuffer.mDataByteSize)) + + let p = bridgeRawPointer(inRefCon) as! SpeakerOutput + + if p.hasBuffer && p.isPlaying { + var availableBytes: UInt32 = 0 + let bufferTail = TPCircularBufferTail(&p.circularBuffer, &availableBytes) + + let requestedBytesSize = inNumberFrames * p.unitSize * numberOfChannels + + let bytesToRead = min(availableBytes, requestedBytesSize) + if !p.isMuted { + // Copy the bytes from the circular buffer into the outSample + memcpy(outSamples, bufferTail, Int(bytesToRead)) + } + // Clear what we just read out of the circular buffer + TPCircularBufferConsume(&p.circularBuffer, bytesToRead) + + if availableBytes <= requestedBytesSize * 2 { + p.isReadyForMoreMediaData = true + } + + if availableBytes <= requestedBytesSize { + p.hasBuffer = false + } + } + + return noErr +} + +func bridgeObject(_ obj: AnyObject) -> UnsafeMutableRawPointer { + return UnsafeMutableRawPointer(Unmanaged.passUnretained(obj).toOpaque()) +} + +func bridgeRawPointer(_ ptr: UnsafeMutableRawPointer) -> AnyObject { + return Unmanaged.fromOpaque(ptr).takeUnretainedValue() +} diff --git a/framework/Tests/Pipeline_Tests.swift b/framework/Tests/Pipeline_Tests.swift index dc8bfba8..47a92257 100755 --- a/framework/Tests/Pipeline_Tests.swift +++ b/framework/Tests/Pipeline_Tests.swift @@ -1,35 +1,35 @@ import XCTest -//@testable import GPUImage +// @testable import GPUImage class FakeOperation: ImageProcessingOperation { let targets = TargetContainer() let sources = SourceContainer() - var maximumInputs:UInt { get { return 1 } } // Computed property, so it can be overridden - let name:String + var maximumInputs: UInt { get { return 1 } } // Computed property, so it can be overridden + let name: String - init(name:String) { + init(name: String) { self.name = name } - func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { } - func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { } } class FakeRenderView: ImageConsumer { let sources = SourceContainer() - let maximumInputs:UInt = 1 + let maximumInputs: UInt = 1 - func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { } } class FakeCamera: ImageSource { let targets = TargetContainer() - func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { } func newCameraFrame() { @@ -46,19 +46,18 @@ class FakeCamera: ImageSource { } class Pipeline_Tests: XCTestCase { - func testTargetContainer() { let targetContainer = TargetContainer() // All operations have been added and should have a strong reference - var operation1:FakeOperation? = FakeOperation(name:"Operation 1") - targetContainer.append(operation1!, indexAtTarget:0) - var operation2:FakeOperation? = FakeOperation(name:"Operation 2") - targetContainer.append(operation2!, indexAtTarget:0) - var operation3:FakeOperation? = FakeOperation(name:"Operation 3") - targetContainer.append(operation3!, indexAtTarget:0) - var operation4:FakeOperation? = FakeOperation(name:"Operation 4") - targetContainer.append(operation4!, indexAtTarget:0) + var operation1: FakeOperation? = FakeOperation(name: "Operation 1") + targetContainer.append(operation1!, indexAtTarget: 0) + var operation2: FakeOperation? = FakeOperation(name: "Operation 2") + targetContainer.append(operation2!, indexAtTarget: 0) + var operation3: FakeOperation? = FakeOperation(name: "Operation 3") + targetContainer.append(operation3!, indexAtTarget: 0) + var operation4: FakeOperation? = FakeOperation(name: "Operation 4") + targetContainer.append(operation4!, indexAtTarget: 0) for (index, (target, _)) in targetContainer.enumerated() { let operation = target as! FakeOperation @@ -101,7 +100,6 @@ class Pipeline_Tests: XCTestCase { } func testSourceContainer() { - } func testChaining() { diff --git a/framework/Tests/ShaderProgram_Tests.swift b/framework/Tests/ShaderProgram_Tests.swift index 23e45408..34533da7 100755 --- a/framework/Tests/ShaderProgram_Tests.swift +++ b/framework/Tests/ShaderProgram_Tests.swift @@ -6,14 +6,12 @@ public let TestBrokenVertexShader = "attribute vec4 position;\n attribute vec4 i public let TestBrokenFragmentShader = "varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void ma)\n {\n gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n " public let TestMismatchedFragmentShader = "varying vec2 textureCoordinateF;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n " - class ShaderProgram_Tests: XCTestCase { - func testExample() { sharedImageProcessingContext.makeCurrentContext() do { - let shaderProgram = try ShaderProgram(vertexShader:TestVertexShader, fragmentShader:TestFragmentShader) + let shaderProgram = try ShaderProgram(vertexShader: TestVertexShader, fragmentShader: TestFragmentShader) let temporaryPosition = shaderProgram.attributeIndex("position") XCTAssert(temporaryPosition != nil, "Could not find position attribute") XCTAssert(temporaryPosition == shaderProgram.attributeIndex("position"), "Could not retrieve the same position attribute") @@ -30,15 +28,15 @@ class ShaderProgram_Tests: XCTestCase { XCTFail("Should not have thrown error during shader compilation: \(error)") } - if ((try? ShaderProgram(vertexShader:TestBrokenVertexShader, fragmentShader:TestFragmentShader)) != nil) { + if (try? ShaderProgram(vertexShader: TestBrokenVertexShader, fragmentShader: TestFragmentShader)) != nil { XCTFail("Program should not have compiled correctly") } - if ((try? ShaderProgram(vertexShader:TestVertexShader, fragmentShader:TestBrokenFragmentShader)) != nil) { + if (try? ShaderProgram(vertexShader: TestVertexShader, fragmentShader: TestBrokenFragmentShader)) != nil { XCTFail("Program should not have compiled correctly") } - if ((try? ShaderProgram(vertexShader:TestVertexShader, fragmentShader:TestMismatchedFragmentShader)) != nil) { + if (try? ShaderProgram(vertexShader: TestVertexShader, fragmentShader: TestMismatchedFragmentShader)) != nil { XCTFail("Program should not have compiled correctly") } }